From 4297c9bacc10d24e2a56b04b3e7bda8414797a46 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 25 Nov 2022 08:47:59 +0100 Subject: [PATCH 001/501] RefCell::get_mut: fix typo and fix the same typo in a bunch of other places --- crates/flycheck/src/lib.rs | 2 +- crates/hir-expand/src/lib.rs | 4 ++-- crates/rust-analyzer/src/config.rs | 2 +- crates/syntax/src/tests/sourcegen_ast.rs | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 8a91d6066614f..ac086d4fe7433 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -362,7 +362,7 @@ impl FlycheckActor { /// A handle to a cargo process used for fly-checking. struct CargoHandle { /// The handle to the actual cargo process. As we cannot cancel directly from with - /// a read syscall dropping and therefor terminating the process is our best option. + /// a read syscall dropping and therefore terminating the process is our best option. child: JodChild, thread: jod_thread::JoinHandle>, receiver: Receiver, diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index a5b499fe8d9d4..7352b003a491c 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -814,7 +814,7 @@ impl<'a> InFile<&'a SyntaxNode> { pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefor can't find an `N` node in the input + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input if !self.file_id.is_macro() { return Some(self.map(Clone::clone)); } else if !self.file_id.is_attr_macro(db) { @@ -926,7 +926,7 @@ impl InFile { pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option> { // This kind of upmapping can only be achieved in attribute expanded files, - // as we don't have node inputs otherwise and therefor can't find an `N` node in the input + // as we don't have node inputs otherwise and therefore can't find an `N` node in the input if !self.file_id.is_macro() { return Some(self); } else if !self.file_id.is_attr_macro(db) { diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 4072ae585dbd9..c278ba2d7c5b3 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -157,7 +157,7 @@ config_data! { checkOnSave_noDefaultFeatures: Option = "null", /// Override the command rust-analyzer uses instead of `cargo check` for /// diagnostics on save. The command is required to output json and - /// should therefor include `--message-format=json` or a similar option. + /// should therefore include `--message-format=json` or a similar option. /// /// If you're changing this because you're using some tool wrapping /// Cargo, you might also want to change diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 70b54843dbaab..712ef5f63b651 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -86,7 +86,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { .traits .iter() .filter(|trait_name| { - // Loops have two expressions so this might collide, therefor manual impl it + // Loops have two expressions so this might collide, therefore manual impl it node.name != "ForExpr" && node.name != "WhileExpr" || trait_name.as_str() != "HasLoopBody" }) From e97203c3f893893611818997bbeb0116ded2605f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Dr=C3=B6ge?= Date: Wed, 21 Dec 2022 16:01:18 +0200 Subject: [PATCH 002/501] Stop at the first `NULL` argument when iterating `argv` Some C commandline parsers (e.g. GLib and Qt) are replacing already handled arguments in `argv` with `NULL` and move them to the end. That means that `argc` might be bigger than the actual number of non-`NULL` pointers in `argv` at this point. To handle this we simply stop iterating at the first `NULL` argument. `argv` is also guaranteed to be `NULL`-terminated so any non-`NULL` arguments after the first `NULL` can safely be ignored. Fixes https://github.com/rust-lang/rust/issues/105999 --- library/std/src/sys/unix/args.rs | 28 ++++++++++++++++++++++------ 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/library/std/src/sys/unix/args.rs b/library/std/src/sys/unix/args.rs index a342f0f5e8597..a5ce6d5120dad 100644 --- a/library/std/src/sys/unix/args.rs +++ b/library/std/src/sys/unix/args.rs @@ -141,12 +141,28 @@ mod imp { // list. let argv = ARGV.load(Ordering::Relaxed); let argc = if argv.is_null() { 0 } else { ARGC.load(Ordering::Relaxed) }; - (0..argc) - .map(|i| { - let cstr = CStr::from_ptr(*argv.offset(i) as *const libc::c_char); - OsStringExt::from_vec(cstr.to_bytes().to_vec()) - }) - .collect() + let mut args = Vec::with_capacity(argc as usize); + for i in 0..argc { + let ptr = *argv.offset(i) as *const libc::c_char; + + // Some C commandline parsers (e.g. GLib and Qt) are replacing already + // handled arguments in `argv` with `NULL` and move them to the end. That + // means that `argc` might be bigger than the actual number of non-`NULL` + // pointers in `argv` at this point. + // + // To handle this we simply stop iterating at the first `NULL` argument. + // + // `argv` is also guaranteed to be `NULL`-terminated so any non-`NULL` arguments + // after the first `NULL` can safely be ignored. + if ptr.is_null() { + break; + } + + let cstr = CStr::from_ptr(ptr); + args.push(OsStringExt::from_vec(cstr.to_bytes().to_vec())); + } + + args } } } From ca5f4a1deea72a7a0c0654055195c6e98af9ef41 Mon Sep 17 00:00:00 2001 From: Trevor Gross Date: Sat, 24 Dec 2022 23:26:25 -0500 Subject: [PATCH 003/501] option_if_let_else: update known problems wording --- clippy_lints/src/option_if_let_else.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/clippy_lints/src/option_if_let_else.rs b/clippy_lints/src/option_if_let_else.rs index 472f52380bbf4..c5ea09590d3df 100644 --- a/clippy_lints/src/option_if_let_else.rs +++ b/clippy_lints/src/option_if_let_else.rs @@ -25,11 +25,11 @@ declare_clippy_lint! { /// Using the dedicated functions of the `Option` type is clearer and /// more concise than an `if let` expression. /// - /// ### Known problems - /// This lint uses a deliberately conservative metric for checking - /// if the inside of either body contains breaks or continues which will - /// cause it to not suggest a fix if either block contains a loop with - /// continues or breaks contained within the loop. + /// ### Notes + /// This lint uses a deliberately conservative metric for checking if the + /// inside of either body contains loop control expressions `break` or + /// `continue` (which cannot be used within closures). If these are found, + /// this lint will not be raised. /// /// ### Example /// ```rust From 4f369427b5bd5fe3fed208c43aadf07d0c821d7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Donny/=EA=B0=95=EB=8F=99=EC=9C=A4?= Date: Sat, 10 Dec 2022 13:09:18 +0900 Subject: [PATCH 004/501] increase limit --- crates/hir-expand/src/db.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index b28e60187deff..294046296acec 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -25,7 +25,7 @@ use crate::{ /// an error will be emitted. /// /// Actual max for `analysis-stats .` at some point: 30672. -static TOKEN_LIMIT: Limit = Limit::new(524_288); +static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { From 68723043db2e7ee6e780c7f42cd9e57df72a1fd0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 19:29:28 +0100 Subject: [PATCH 005/501] Split out hir-def attribute handling parts into hir-expand --- Cargo.lock | 13 + crates/hir-def/Cargo.toml | 1 + crates/hir-def/src/adt.rs | 2 +- crates/hir-def/src/attr.rs | 332 +----------------- crates/hir-def/src/body.rs | 8 +- crates/hir-def/src/body/lower.rs | 2 +- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/db.rs | 2 +- crates/hir-def/src/expr.rs | 2 +- crates/hir-def/src/generics.rs | 2 +- crates/hir-def/src/item_scope.rs | 6 +- crates/hir-def/src/item_tree.rs | 15 +- crates/hir-def/src/item_tree/lower.rs | 24 +- crates/hir-def/src/item_tree/pretty.rs | 1 - crates/hir-def/src/keys.rs | 3 +- crates/hir-def/src/lib.rs | 13 +- crates/hir-def/src/nameres/attr_resolution.rs | 3 +- crates/hir-def/src/nameres/collector.rs | 3 +- crates/hir-def/src/nameres/diagnostics.rs | 3 +- crates/hir-def/src/path.rs | 2 +- crates/hir-def/src/path/lower.rs | 3 +- crates/hir-def/src/pretty.rs | 2 +- crates/hir-def/src/resolver.rs | 2 +- crates/hir-def/src/type_ref.rs | 2 +- crates/hir-expand/Cargo.toml | 1 + crates/hir-expand/src/attrs.rs | 313 +++++++++++++++++ crates/hir-expand/src/lib.rs | 3 + crates/hir-ty/Cargo.toml | 1 + crates/hir-ty/src/display.rs | 2 +- crates/hir-ty/src/interner.rs | 7 +- crates/hir-ty/src/lower.rs | 2 +- crates/hir-ty/src/utils.rs | 2 +- crates/hir/src/lib.rs | 3 +- crates/hir/src/semantics/source_to_def.rs | 3 +- crates/intern/Cargo.toml | 13 + .../src/intern.rs => intern/src/lib.rs} | 19 +- 36 files changed, 435 insertions(+), 382 deletions(-) create mode 100644 crates/hir-expand/src/attrs.rs create mode 100644 crates/intern/Cargo.toml rename crates/{hir-def/src/intern.rs => intern/src/lib.rs} (92%) diff --git a/Cargo.lock b/Cargo.lock index d27ae416f04b9..5f426d8856263 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -519,6 +519,7 @@ dependencies = [ "hkalbasi-rustc-ap-rustc_abi", "hkalbasi-rustc-ap-rustc_index", "indexmap", + "intern", "itertools", "la-arena", "limit", @@ -544,6 +545,7 @@ dependencies = [ "either", "expect-test", "hashbrown", + "intern", "itertools", "la-arena", "limit", @@ -574,6 +576,7 @@ dependencies = [ "hir-def", "hir-expand", "hkalbasi-rustc-ap-rustc_index", + "intern", "itertools", "la-arena", "limit", @@ -803,6 +806,16 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "intern" +version = "0.0.0" +dependencies = [ + "dashmap", + "hashbrown", + "once_cell", + "rustc-hash", +] + [[package]] name = "itertools" version = "0.10.5" diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 698be76656cce..a1078427707b9 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -29,6 +29,7 @@ smallvec = "1.10.0" tracing = "0.1.35" stdx = { path = "../stdx", version = "0.0.0" } +intern = { path = "../intern", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" } diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index db3b419488147..cd35ba00f6f52 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -8,6 +8,7 @@ use hir_expand::{ name::{AsName, Name}, HirFileId, InFile, }; +use intern::Interned; use la_arena::{Arena, ArenaMap}; use rustc_abi::{Integer, IntegerType}; use syntax::ast::{self, HasName, HasVisibility}; @@ -17,7 +18,6 @@ use crate::{ body::{CfgExpander, LowerCtx}, builtin_type::{BuiltinInt, BuiltinUint}, db::DefDatabase, - intern::Interned, item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, layout::{Align, ReprFlags, ReprOptions}, nameres::diagnostics::DefDiagnostic, diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index ab5d180e1bb9e..eb88a74e448de 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -1,27 +1,27 @@ //! A higher level attributes based on TokenTree, with also some shortcuts. -use std::{fmt, hash::Hash, ops, sync::Arc}; +use std::{hash::Hash, ops, sync::Arc}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile}; +use hir_expand::{ + attrs::{collect_attrs, Attr, AttrId, RawAttrs}, + HirFileId, InFile, +}; use itertools::Itertools; use la_arena::{ArenaMap, Idx, RawIdx}; -use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; -use smallvec::{smallvec, SmallVec}; +use mbe::DelimiterKind; use syntax::{ - ast::{self, AstNode, HasAttrs, IsString}, - match_ast, AstPtr, AstToken, SmolStr, SyntaxNode, TextRange, TextSize, + ast::{self, HasAttrs, IsString}, + AstPtr, AstToken, SmolStr, TextRange, TextSize, }; use tt::Subtree; use crate::{ db::DefDatabase, - intern::Interned, item_tree::{AttrOwner, Fields, ItemTreeId, ItemTreeNode}, nameres::{ModuleOrigin, ModuleSource}, - path::{ModPath, PathKind}, src::{HasChildSource, HasSource}, AdtId, AttrDefId, EnumId, GenericParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroId, VariantId, @@ -47,12 +47,6 @@ impl From for String { } } -/// Syntactical attributes, without filtering of `cfg_attr`s. -#[derive(Default, Debug, Clone, PartialEq, Eq)] -pub(crate) struct RawAttrs { - entries: Option>, -} - #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct Attrs(RawAttrs); @@ -62,30 +56,21 @@ pub struct AttrsWithOwner { owner: AttrDefId, } -impl ops::Deref for RawAttrs { - type Target = [Attr]; - - fn deref(&self) -> &[Attr] { - match &self.entries { - Some(it) => &*it, - None => &[], - } - } -} impl Attrs { pub fn get(&self, id: AttrId) -> Option<&Attr> { (**self).iter().find(|attr| attr.id == id) } + + pub(crate) fn filter(db: &dyn DefDatabase, krate: CrateId, raw_attrs: RawAttrs) -> Attrs { + Attrs(raw_attrs.filter(db.upcast(), krate)) + } } impl ops::Deref for Attrs { type Target = [Attr]; fn deref(&self) -> &[Attr] { - match &self.0.entries { - Some(it) => &*it, - None => &[], - } + &self.0 } } @@ -97,114 +82,6 @@ impl ops::Deref for AttrsWithOwner { } } -impl RawAttrs { - pub(crate) const EMPTY: Self = Self { entries: None }; - - pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { - let entries = collect_attrs(owner) - .filter_map(|(id, attr)| match attr { - Either::Left(attr) => { - attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) - } - Either::Right(comment) => comment.doc_comment().map(|doc| Attr { - id, - input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), - path: Interned::new(ModPath::from(hir_expand::name!(doc))), - }), - }) - .collect::>(); - - Self { entries: if entries.is_empty() { None } else { Some(entries) } } - } - - fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { - let hygiene = Hygiene::new(db.upcast(), owner.file_id); - Self::new(db, owner.value, &hygiene) - } - - pub(crate) fn merge(&self, other: Self) -> Self { - // FIXME: This needs to fixup `AttrId`s - match (&self.entries, other.entries) { - (None, None) => Self::EMPTY, - (None, entries @ Some(_)) => Self { entries }, - (Some(entries), None) => Self { entries: Some(entries.clone()) }, - (Some(a), Some(b)) => { - let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); - Self { - entries: Some( - a.iter() - .cloned() - .chain(b.iter().map(|it| { - let mut it = it.clone(); - it.id.ast_index += last_ast_index; - it - })) - .collect(), - ), - } - } - } - } - - /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. - pub(crate) fn filter(self, db: &dyn DefDatabase, krate: CrateId) -> Attrs { - let has_cfg_attrs = self.iter().any(|attr| { - attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]) - }); - if !has_cfg_attrs { - return Attrs(self); - } - - let crate_graph = db.crate_graph(); - let new_attrs = self - .iter() - .flat_map(|attr| -> SmallVec<[_; 1]> { - let is_cfg_attr = - attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]); - if !is_cfg_attr { - return smallvec![attr.clone()]; - } - - let subtree = match attr.token_tree_value() { - Some(it) => it, - _ => return smallvec![attr.clone()], - }; - - // Input subtree is: `(cfg, $(attr),+)` - // Split it up into a `cfg` subtree and the `attr` subtrees. - // FIXME: There should be a common API for this. - let mut parts = subtree.token_trees.split(|tt| { - matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))) - }); - let cfg = match parts.next() { - Some(it) => it, - None => return smallvec![], - }; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; - let cfg = CfgExpr::parse(&cfg); - let index = attr.id; - let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| { - let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; - // FIXME hygiene - let hygiene = Hygiene::new_unhygienic(); - Attr::from_tt(db, &tree, &hygiene, index) - }); - - let cfg_options = &crate_graph[krate].cfg_options; - if cfg_options.check(&cfg) == Some(false) { - smallvec![] - } else { - cov_mark::hit!(cfg_attr_active); - - attrs.collect() - } - }) - .collect(); - - Attrs(RawAttrs { entries: Some(new_attrs) }) - } -} - impl Attrs { pub const EMPTY: Self = Self(RawAttrs::EMPTY); @@ -403,7 +280,7 @@ impl AttrsWithOwner { .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into())) .clone(), ModuleOrigin::BlockExpr { block } => RawAttrs::from_attrs_owner( - db, + db.upcast(), InFile::new(block.file_id, block.to_node(db.upcast())) .as_ref() .map(|it| it as &dyn ast::HasAttrs), @@ -439,7 +316,7 @@ impl AttrsWithOwner { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); RawAttrs::from_attrs_owner( - db, + db.upcast(), src.with_value(src.value[it.local_id()].as_ref().either( |it| match it { ast::TypeOrConstParam::Type(it) => it as _, @@ -452,7 +329,7 @@ impl AttrsWithOwner { GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); RawAttrs::from_attrs_owner( - db, + db.upcast(), src.with_value(src.value[it.local_id()].as_ref().either( |it| match it { ast::TypeOrConstParam::Type(it) => it as _, @@ -464,14 +341,14 @@ impl AttrsWithOwner { } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner(db, src.with_value(&src.value[it.local_id])) + RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id])) } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree(it.lookup(db).id, db), }; - let attrs = raw_attrs.filter(db, def.krate(db)); - Self { attrs, owner: def } + let attrs = raw_attrs.filter(db.upcast(), def.krate(db)); + Self { attrs: Attrs(attrs), owner: def } } pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap { @@ -627,40 +504,6 @@ fn doc_indent(attrs: &Attrs) -> usize { .unwrap_or(0) } -fn inner_attributes( - syntax: &SyntaxNode, -) -> Option>> { - let node = match_ast! { - match syntax { - ast::SourceFile(_) => syntax.clone(), - ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(), - ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(), - ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), - ast::Module(it) => it.item_list()?.syntax().clone(), - ast::BlockExpr(it) => { - use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT}; - // Block expressions accept outer and inner attributes, but only when they are the outer - // expression of an expression statement or the final expression of another block expression. - let may_carry_attributes = matches!( - it.syntax().parent().map(|it| it.kind()), - Some(BLOCK_EXPR | EXPR_STMT) - ); - if !may_carry_attributes { - return None - } - syntax.clone() - }, - _ => return None, - } - }; - - let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el { - Either::Left(attr) => attr.kind().is_inner(), - Either::Right(comment) => comment.is_inner(), - }); - Some(attrs) -} - #[derive(Debug)] pub struct AttrSourceMap { source: Vec>, @@ -779,128 +622,6 @@ fn get_doc_string_in_attr(it: &ast::Attr) -> Option { } } -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct AttrId { - pub(crate) ast_index: u32, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Attr { - pub(crate) id: AttrId, - pub(crate) path: Interned, - pub(crate) input: Option>, -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum AttrInput { - /// `#[attr = "string"]` - Literal(SmolStr), - /// `#[attr(subtree)]` - TokenTree(tt::Subtree, mbe::TokenMap), -} - -impl fmt::Display for AttrInput { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), - AttrInput::TokenTree(subtree, _) => subtree.fmt(f), - } - } -} - -impl Attr { - fn from_src( - db: &dyn DefDatabase, - ast: ast::Meta, - hygiene: &Hygiene, - id: AttrId, - ) -> Option { - let path = Interned::new(ModPath::from_src(db.upcast(), ast.path()?, hygiene)?); - let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { - let value = match lit.kind() { - ast::LiteralKind::String(string) => string.value()?.into(), - _ => lit.syntax().first_token()?.text().trim_matches('"').into(), - }; - Some(Interned::new(AttrInput::Literal(value))) - } else if let Some(tt) = ast.token_tree() { - let (tree, map) = syntax_node_to_token_tree(tt.syntax()); - Some(Interned::new(AttrInput::TokenTree(tree, map))) - } else { - None - }; - Some(Attr { id, path, input }) - } - - fn from_tt( - db: &dyn DefDatabase, - tt: &tt::Subtree, - hygiene: &Hygiene, - id: AttrId, - ) -> Option { - let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); - let ast = ast::Meta::cast(parse.syntax_node())?; - - Self::from_src(db, ast, hygiene, id) - } - - pub fn path(&self) -> &ModPath { - &self.path - } -} - -impl Attr { - /// #[path = "string"] - pub fn string_value(&self) -> Option<&SmolStr> { - match self.input.as_deref()? { - AttrInput::Literal(it) => Some(it), - _ => None, - } - } - - /// #[path(ident)] - pub fn single_ident_value(&self) -> Option<&tt::Ident> { - match self.input.as_deref()? { - AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), - _ => None, - }, - _ => None, - } - } - - /// #[path TokenTree] - pub fn token_tree_value(&self) -> Option<&Subtree> { - match self.input.as_deref()? { - AttrInput::TokenTree(subtree, _) => Some(subtree), - _ => None, - } - } - - /// Parses this attribute as a token tree consisting of comma separated paths. - pub fn parse_path_comma_token_tree(&self) -> Option + '_> { - let args = self.token_tree_value()?; - - if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) { - return None; - } - let paths = args - .token_trees - .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) - .filter_map(|tts| { - if tts.is_empty() { - return None; - } - let segments = tts.iter().filter_map(|tt| match tt { - tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()), - _ => None, - }); - Some(ModPath::from_segments(PathKind::Plain, segments)) - }); - - Some(paths) - } -} - #[derive(Debug, Clone, Copy)] pub struct AttrQuery<'attr> { attrs: &'attr Attrs, @@ -953,21 +674,6 @@ fn attrs_from_item_tree(id: ItemTreeId, db: &dyn DefDatabase tree.raw_attrs(mod_item.into()).clone() } -fn collect_attrs( - owner: &dyn ast::HasAttrs, -) -> impl Iterator)> { - let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten(); - let outer_attrs = - ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el { - Either::Left(attr) => attr.kind().is_outer(), - Either::Right(comment) => comment.is_outer(), - }); - outer_attrs - .chain(inner_attrs) - .enumerate() - .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr)) -} - pub(crate) fn variants_attrs_source_map( db: &dyn DefDatabase, def: EnumId, diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 78fbaa9d7d353..9713256813eb6 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -12,7 +12,9 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use drop_bomb::DropBomb; use either::Either; -use hir_expand::{hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId}; +use hir_expand::{ + attrs::RawAttrs, hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId, +}; use la_arena::{Arena, ArenaMap}; use limit::Limit; use profile::Count; @@ -20,7 +22,7 @@ use rustc_hash::FxHashMap; use syntax::{ast, AstPtr, SyntaxNodePtr}; use crate::{ - attr::{Attrs, RawAttrs}, + attr::Attrs, db::DefDatabase, expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId}, item_scope::BuiltinShadowMode, @@ -64,7 +66,7 @@ impl CfgExpander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - RawAttrs::new(db, owner, &self.hygiene).filter(db, self.krate) + Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) } pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool { diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index e8da24e3addaa..4c29e16b2b813 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -10,6 +10,7 @@ use hir_expand::{ name::{name, AsName, Name}, AstId, ExpandError, HirFileId, InFile, }; +use intern::Interned; use la_arena::Arena; use once_cell::unsync::OnceCell; use profile::Count; @@ -33,7 +34,6 @@ use crate::{ Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField, Statement, }, - intern::Interned, item_scope::BuiltinShadowMode, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index e6b05f27a5447..f461e85b0182c 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind}; +use intern::Interned; use smallvec::SmallVec; use syntax::ast; @@ -10,7 +11,6 @@ use crate::{ attr::Attrs, body::{Expander, Mark}, db::DefDatabase, - intern::Interned, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, nameres::{ attr_resolution::ResolvedAttr, diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 431c8255497b6..65cdd1b69b17d 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use base_db::{salsa, CrateId, SourceDatabase, Upcast}; use either::Either; use hir_expand::{db::AstDatabase, HirFileId}; +use intern::Interned; use la_arena::ArenaMap; use syntax::{ast, AstPtr, SmolStr}; @@ -17,7 +18,6 @@ use crate::{ }, generics::GenericParams, import_map::ImportMap, - intern::Interned, item_tree::{AttrOwner, ItemTree}, lang_item::{LangItemTarget, LangItems}, nameres::{diagnostics::DefDiagnostic, DefMap}, diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 7b6569421195d..48028b7c6a82a 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -15,11 +15,11 @@ use std::fmt; use hir_expand::name::Name; +use intern::Interned; use la_arena::{Idx, RawIdx}; use crate::{ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint}, - intern::Interned, path::{GenericArgs, Path}, type_ref::{Mutability, Rawness, TypeRef}, BlockId, diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index f74559f5d6634..b2ab0c30e0370 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -9,6 +9,7 @@ use hir_expand::{ name::{AsName, Name}, ExpandResult, HirFileId, InFile, }; +use intern::Interned; use la_arena::{Arena, ArenaMap, Idx}; use once_cell::unsync::Lazy; use std::ops::DerefMut; @@ -20,7 +21,6 @@ use crate::{ child_by_source::ChildBySource, db::DefDatabase, dyn_map::DynMap, - intern::Interned, keys, src::{HasChildSource, HasSource}, type_ref::{LifetimeRef, TypeBound, TypeRef}, diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index c7b213b7e9814..53a4173ff4230 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -4,7 +4,7 @@ use std::collections::hash_map::Entry; use base_db::CrateId; -use hir_expand::{name::Name, AstId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::Name, AstId, MacroCallId}; use itertools::Itertools; use once_cell::sync::Lazy; use profile::Count; @@ -14,8 +14,8 @@ use stdx::format_to; use syntax::ast; use crate::{ - attr::AttrId, db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, - ConstId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, + db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule, + ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, }; #[derive(Copy, Clone, Debug)] diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 80297f8adf16e..3e1f7d44460f1 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -48,10 +48,12 @@ use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::FileAstId, + attrs::RawAttrs, hygiene::Hygiene, name::{name, AsName, Name}, ExpandTo, HirFileId, InFile, }; +use intern::Interned; use la_arena::{Arena, Idx, IdxRange, RawIdx}; use profile::Count; use rustc_hash::FxHashMap; @@ -60,10 +62,9 @@ use stdx::never; use syntax::{ast, match_ast, SyntaxKind}; use crate::{ - attr::{Attrs, RawAttrs}, + attr::Attrs, db::DefDatabase, generics::GenericParams, - intern::Interned, path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, visibility::RawVisibility, @@ -120,7 +121,7 @@ impl ItemTree { let mut item_tree = match_ast! { match syntax { ast::SourceFile(file) => { - top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene())); + top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene())); ctx.lower_module_items(&file) }, ast::MacroItems(items) => { @@ -152,7 +153,11 @@ impl ItemTree { /// Returns the inner attributes of the source file. pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs { - self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate) + Attrs::filter( + db, + krate, + self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(), + ) } pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs { @@ -160,7 +165,7 @@ impl ItemTree { } pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs { - self.raw_attrs(of).clone().filter(db, krate) + Attrs::filter(db, krate, self.raw_attrs(of).clone()) } pub fn pretty_print(&self) -> String { diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index b25274bccc9a4..27705cbbbdc58 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -99,7 +99,7 @@ impl<'a> Ctx<'a> { } fn lower_mod_item(&mut self, item: &ast::Item) -> Option { - let attrs = RawAttrs::new(self.db, item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene()); let item: ModItem = match item { ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), ast::Item::Union(ast) => self.lower_union(ast)?.into(), @@ -173,7 +173,7 @@ impl<'a> Ctx<'a> { for field in fields.fields() { if let Some(data) = self.lower_record_field(&field) { let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); } } let end = self.next_field_idx(); @@ -194,7 +194,7 @@ impl<'a> Ctx<'a> { for (i, field) in fields.fields().enumerate() { let data = self.lower_tuple_field(i, &field); let idx = self.data().fields.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); } let end = self.next_field_idx(); IdxRange::new(start..end) @@ -239,7 +239,10 @@ impl<'a> Ctx<'a> { for variant in variants.variants() { if let Some(data) = self.lower_variant(&variant) { let idx = self.data().variants.alloc(data); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &variant, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new(self.db.upcast(), &variant, self.hygiene()), + ); } } let end = self.next_variant_idx(); @@ -283,7 +286,10 @@ impl<'a> Ctx<'a> { }; let ty = Interned::new(self_type); let idx = self.data().params.alloc(Param::Normal(None, ty)); - self.add_attrs(idx.into(), RawAttrs::new(self.db, &self_param, self.hygiene())); + self.add_attrs( + idx.into(), + RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()), + ); has_self_param = true; } for param in param_list.params() { @@ -307,7 +313,7 @@ impl<'a> Ctx<'a> { self.data().params.alloc(Param::Normal(name, ty)) } }; - self.add_attrs(idx.into(), RawAttrs::new(self.db, ¶m, self.hygiene())); + self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), ¶m, self.hygiene())); } } let end_param = self.next_param_idx(); @@ -442,7 +448,7 @@ impl<'a> Ctx<'a> { let items = trait_def.assoc_item_list().map(|list| { list.assoc_items() .filter_map(|item| { - let attrs = RawAttrs::new(self.db, &item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); self.lower_assoc_item(&item).map(|item| { self.add_attrs(ModItem::from(item).into(), attrs); item @@ -471,7 +477,7 @@ impl<'a> Ctx<'a> { .flat_map(|it| it.assoc_items()) .filter_map(|item| { let assoc = self.lower_assoc_item(&item)?; - let attrs = RawAttrs::new(self.db, &item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); self.add_attrs(ModItem::from(assoc).into(), attrs); Some(assoc) }) @@ -541,7 +547,7 @@ impl<'a> Ctx<'a> { // (in other words, the knowledge that they're in an extern block must not be used). // This is because an extern block can contain macros whose ItemTree's top-level items // should be considered to be in an extern block too. - let attrs = RawAttrs::new(self.db, &item, self.hygiene()); + let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); let id: ModItem = match item { ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(), ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(), diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 48c40df22ff5f..8f230b87d0101 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -3,7 +3,6 @@ use std::fmt::{self, Write}; use crate::{ - attr::RawAttrs, generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, pretty::{print_path, print_type_bounds, print_type_ref}, visibility::RawVisibility, diff --git a/crates/hir-def/src/keys.rs b/crates/hir-def/src/keys.rs index c5cb9a2af5374..72beec8186c1f 100644 --- a/crates/hir-def/src/keys.rs +++ b/crates/hir-def/src/keys.rs @@ -2,12 +2,11 @@ use std::marker::PhantomData; -use hir_expand::MacroCallId; +use hir_expand::{attrs::AttrId, MacroCallId}; use rustc_hash::FxHashMap; use syntax::{ast, AstNode, AstPtr}; use crate::{ - attr::AttrId, dyn_map::{DynMap, Policy}, ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId, diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 8267ef09cb0a2..cc0ea14d0156a 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -28,7 +28,6 @@ pub mod dyn_map; pub mod keys; pub mod item_tree; -pub mod intern; pub mod adt; pub mod data; @@ -61,10 +60,10 @@ use std::{ sync::Arc, }; -use attr::Attr; use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind}; use hir_expand::{ ast_id_map::FileAstId, + attrs::{Attr, AttrId, AttrInput}, builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, @@ -82,7 +81,6 @@ use syntax::ast; use crate::{ adt::VariantData, - attr::AttrId, builtin_type::BuiltinType, item_tree::{ Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem, @@ -971,7 +969,7 @@ fn attr_macro_as_call_id( is_derive: bool, ) -> MacroCallId { let mut arg = match macro_attr.input.as_deref() { - Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()), + Some(AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()), _ => Default::default(), }; @@ -990,3 +988,10 @@ fn attr_macro_as_call_id( ); res } +intern::impl_internable!( + crate::type_ref::TypeRef, + crate::type_ref::TraitRef, + crate::type_ref::TypeBound, + crate::path::GenericArgs, + generics::GenericParams, +); diff --git a/crates/hir-def/src/nameres/attr_resolution.rs b/crates/hir-def/src/nameres/attr_resolution.rs index 3650204ee9d73..79cabeb0fb7a4 100644 --- a/crates/hir-def/src/nameres/attr_resolution.rs +++ b/crates/hir-def/src/nameres/attr_resolution.rs @@ -1,10 +1,9 @@ //! Post-nameres attribute resolution. -use hir_expand::MacroCallId; +use hir_expand::{attrs::Attr, MacroCallId}; use syntax::{ast, SmolStr}; use crate::{ - attr::Attr, attr_macro_as_call_id, builtin_attr, db::DefDatabase, item_scope::BuiltinShadowMode, diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 160203b778344..ad31e9aac2b5b 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -10,6 +10,7 @@ use cfg::{CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ ast_id_map::FileAstId, + attrs::{Attr, AttrId}, builtin_attr_macro::find_builtin_attr, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, @@ -26,7 +27,7 @@ use stdx::always; use syntax::{ast, SmolStr}; use crate::{ - attr::{Attr, AttrId, Attrs}, + attr::Attrs, attr_macro_as_call_id, db::DefDatabase, derive_macro_as_call_id, diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 066142291981d..74b25f4cc9828 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -2,12 +2,11 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::MacroCallKind; +use hir_expand::{attrs::AttrId, MacroCallKind}; use la_arena::Idx; use syntax::ast::{self, AnyHasAttrs}; use crate::{ - attr::AttrId, item_tree::{self, ItemTreeId}, nameres::LocalModuleId, path::ModPath, diff --git a/crates/hir-def/src/path.rs b/crates/hir-def/src/path.rs index 592223f7d85fe..25a23fcd61a51 100644 --- a/crates/hir-def/src/path.rs +++ b/crates/hir-def/src/path.rs @@ -8,10 +8,10 @@ use std::{ use crate::{ body::LowerCtx, - intern::Interned, type_ref::{ConstScalarOrPath, LifetimeRef}, }; use hir_expand::name::Name; +use intern::Interned; use syntax::ast; use crate::type_ref::{TypeBound, TypeRef}; diff --git a/crates/hir-def/src/path/lower.rs b/crates/hir-def/src/path/lower.rs index cfa3a6baaf8b4..d570191595b68 100644 --- a/crates/hir-def/src/path/lower.rs +++ b/crates/hir-def/src/path/lower.rs @@ -1,9 +1,10 @@ //! Transforms syntax into `Path` objects, ideally with accounting for hygiene -use crate::{intern::Interned, type_ref::ConstScalarOrPath}; +use crate::type_ref::ConstScalarOrPath; use either::Either; use hir_expand::name::{name, AsName}; +use intern::Interned; use syntax::ast::{self, AstNode, HasTypeBounds}; use super::AssociatedTypeBinding; diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs index befd0c5ffa055..1c0bd204d309b 100644 --- a/crates/hir-def/src/pretty.rs +++ b/crates/hir-def/src/pretty.rs @@ -3,10 +3,10 @@ use std::fmt::{self, Write}; use hir_expand::mod_path::PathKind; +use intern::Interned; use itertools::Itertools; use crate::{ - intern::Interned, path::{GenericArg, GenericArgs, Path}, type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef}, }; diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 1ef7f9577fe8b..86958e3daea4d 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -4,6 +4,7 @@ use std::{hash::BuildHasherDefault, sync::Arc}; use base_db::CrateId; use hir_expand::name::{name, Name}; use indexmap::IndexMap; +use intern::Interned; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; @@ -13,7 +14,6 @@ use crate::{ db::DefDatabase, expr::{ExprId, LabelId, PatId}, generics::{GenericParams, TypeOrConstParamData}, - intern::Interned, item_scope::{BuiltinShadowMode, BUILTIN_SCOPE}, nameres::DefMap, path::{ModPath, PathKind}, diff --git a/crates/hir-def/src/type_ref.rs b/crates/hir-def/src/type_ref.rs index f8bb78ddcfe02..0149fdaa43cf3 100644 --- a/crates/hir-def/src/type_ref.rs +++ b/crates/hir-def/src/type_ref.rs @@ -7,13 +7,13 @@ use hir_expand::{ name::{AsName, Name}, AstId, }; +use intern::Interned; use syntax::ast::{self, HasName}; use crate::{ body::LowerCtx, builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, expr::Literal, - intern::Interned, path::Path, }; diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 77eb1fd450433..a73e690a7e055 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -22,6 +22,7 @@ hashbrown = { version = "0.12.1", features = [ smallvec = { version = "1.10.0", features = ["const_new"] } stdx = { path = "../stdx", version = "0.0.0" } +intern = { path = "../intern", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } cfg = { path = "../cfg", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs new file mode 100644 index 0000000000000..6967d153271f1 --- /dev/null +++ b/crates/hir-expand/src/attrs.rs @@ -0,0 +1,313 @@ +use std::{fmt, ops, sync::Arc}; + +use base_db::CrateId; +use cfg::CfgExpr; +use either::Either; +use intern::Interned; +use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; +use smallvec::{smallvec, SmallVec}; +use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; +use tt::Subtree; + +use crate::{ + db::AstDatabase, + hygiene::Hygiene, + mod_path::{ModPath, PathKind}, + name::AsName, + InFile, +}; + +/// Syntactical attributes, without filtering of `cfg_attr`s. +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct RawAttrs { + entries: Option>, +} + +impl ops::Deref for RawAttrs { + type Target = [Attr]; + + fn deref(&self) -> &[Attr] { + match &self.entries { + Some(it) => &*it, + None => &[], + } + } +} + +impl RawAttrs { + pub const EMPTY: Self = Self { entries: None }; + + pub fn new(db: &dyn AstDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { + let entries = collect_attrs(owner) + .filter_map(|(id, attr)| match attr { + Either::Left(attr) => { + attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) + } + Either::Right(comment) => comment.doc_comment().map(|doc| Attr { + id, + input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), + path: Interned::new(ModPath::from(crate::name!(doc))), + }), + }) + .collect::>(); + + Self { entries: if entries.is_empty() { None } else { Some(entries) } } + } + + pub fn from_attrs_owner(db: &dyn AstDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { + let hygiene = Hygiene::new(db, owner.file_id); + Self::new(db, owner.value, &hygiene) + } + + pub fn merge(&self, other: Self) -> Self { + match (&self.entries, other.entries) { + (None, None) => Self::EMPTY, + (None, entries @ Some(_)) => Self { entries }, + (Some(entries), None) => Self { entries: Some(entries.clone()) }, + (Some(a), Some(b)) => { + let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); + Self { + entries: Some( + a.iter() + .cloned() + .chain(b.iter().map(|it| { + let mut it = it.clone(); + it.id.ast_index += last_ast_index; + it + })) + .collect(), + ), + } + } + } + } + + /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. + pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs { + let has_cfg_attrs = self + .iter() + .any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr])); + if !has_cfg_attrs { + return self; + } + + let crate_graph = db.crate_graph(); + let new_attrs = self + .iter() + .flat_map(|attr| -> SmallVec<[_; 1]> { + let is_cfg_attr = + attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]); + if !is_cfg_attr { + return smallvec![attr.clone()]; + } + + let subtree = match attr.token_tree_value() { + Some(it) => it, + _ => return smallvec![attr.clone()], + }; + + // Input subtree is: `(cfg, $(attr),+)` + // Split it up into a `cfg` subtree and the `attr` subtrees. + // FIXME: There should be a common API for this. + let mut parts = subtree.token_trees.split(|tt| { + matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))) + }); + let cfg = match parts.next() { + Some(it) => it, + None => return smallvec![], + }; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; + let cfg = CfgExpr::parse(&cfg); + let index = attr.id; + let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| { + let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; + // FIXME hygiene + let hygiene = Hygiene::new_unhygienic(); + Attr::from_tt(db, &tree, &hygiene, index) + }); + + let cfg_options = &crate_graph[krate].cfg_options; + if cfg_options.check(&cfg) == Some(false) { + smallvec![] + } else { + cov_mark::hit!(cfg_attr_active); + + attrs.collect() + } + }) + .collect(); + + RawAttrs { entries: Some(new_attrs) } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct AttrId { + pub ast_index: u32, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Attr { + pub id: AttrId, + pub path: Interned, + pub input: Option>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum AttrInput { + /// `#[attr = "string"]` + Literal(SmolStr), + /// `#[attr(subtree)]` + TokenTree(tt::Subtree, mbe::TokenMap), +} + +impl fmt::Display for AttrInput { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), + AttrInput::TokenTree(subtree, _) => subtree.fmt(f), + } + } +} + +impl Attr { + fn from_src( + db: &dyn AstDatabase, + ast: ast::Meta, + hygiene: &Hygiene, + id: AttrId, + ) -> Option { + let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); + let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { + let value = match lit.kind() { + ast::LiteralKind::String(string) => string.value()?.into(), + _ => lit.syntax().first_token()?.text().trim_matches('"').into(), + }; + Some(Interned::new(AttrInput::Literal(value))) + } else if let Some(tt) = ast.token_tree() { + let (tree, map) = syntax_node_to_token_tree(tt.syntax()); + Some(Interned::new(AttrInput::TokenTree(tree, map))) + } else { + None + }; + Some(Attr { id, path, input }) + } + + fn from_tt( + db: &dyn AstDatabase, + tt: &tt::Subtree, + hygiene: &Hygiene, + id: AttrId, + ) -> Option { + let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem); + let ast = ast::Meta::cast(parse.syntax_node())?; + + Self::from_src(db, ast, hygiene, id) + } + + pub fn path(&self) -> &ModPath { + &self.path + } +} + +impl Attr { + /// #[path = "string"] + pub fn string_value(&self) -> Option<&SmolStr> { + match self.input.as_deref()? { + AttrInput::Literal(it) => Some(it), + _ => None, + } + } + + /// #[path(ident)] + pub fn single_ident_value(&self) -> Option<&tt::Ident> { + match self.input.as_deref()? { + AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees { + [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), + _ => None, + }, + _ => None, + } + } + + /// #[path TokenTree] + pub fn token_tree_value(&self) -> Option<&Subtree> { + match self.input.as_deref()? { + AttrInput::TokenTree(subtree, _) => Some(subtree), + _ => None, + } + } + + /// Parses this attribute as a token tree consisting of comma separated paths. + pub fn parse_path_comma_token_tree(&self) -> Option + '_> { + let args = self.token_tree_value()?; + + if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) { + return None; + } + let paths = args + .token_trees + .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))) + .filter_map(|tts| { + if tts.is_empty() { + return None; + } + let segments = tts.iter().filter_map(|tt| match tt { + tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()), + _ => None, + }); + Some(ModPath::from_segments(PathKind::Plain, segments)) + }); + + Some(paths) + } +} + +pub fn collect_attrs( + owner: &dyn ast::HasAttrs, +) -> impl Iterator)> { + let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten(); + let outer_attrs = + ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el { + Either::Left(attr) => attr.kind().is_outer(), + Either::Right(comment) => comment.is_outer(), + }); + outer_attrs + .chain(inner_attrs) + .enumerate() + .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr)) +} + +fn inner_attributes( + syntax: &SyntaxNode, +) -> Option>> { + let node = match_ast! { + match syntax { + ast::SourceFile(_) => syntax.clone(), + ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(), + ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(), + ast::Impl(it) => it.assoc_item_list()?.syntax().clone(), + ast::Module(it) => it.item_list()?.syntax().clone(), + ast::BlockExpr(it) => { + use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT}; + // Block expressions accept outer and inner attributes, but only when they are the outer + // expression of an expression statement or the final expression of another block expression. + let may_carry_attributes = matches!( + it.syntax().parent().map(|it| it.kind()), + Some(BLOCK_EXPR | EXPR_STMT) + ); + if !may_carry_attributes { + return None + } + syntax.clone() + }, + _ => return None, + } + }; + + let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el { + Either::Left(attr) => attr.kind().is_inner(), + Either::Right(comment) => comment.is_inner(), + }); + Some(attrs) +} diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index bc5f9f3b8afd4..9d61588a8b511 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -17,6 +17,7 @@ pub mod proc_macro; pub mod quote; pub mod eager; pub mod mod_path; +pub mod attrs; mod fixup; pub use mbe::{Origin, ValueResult}; @@ -1031,3 +1032,5 @@ impl ExpandTo { pub struct UnresolvedMacro { pub path: ModPath, } + +intern::impl_internable!(ModPath, attrs::AttrInput); diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index ae837ac6dce88..75b33da1f230b 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -29,6 +29,7 @@ typed-arena = "2.0.1" rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } stdx = { path = "../stdx", version = "0.0.0" } +intern = { path = "../intern", version = "0.0.0" } hir-def = { path = "../hir-def", version = "0.0.0" } hir-expand = { path = "../hir-expand", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 66e813eed8b4a..f9642aa7475ac 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -11,7 +11,6 @@ use hir_def::{ db::DefDatabase, find_path, generics::{TypeOrConstParamData, TypeParamProvenance}, - intern::{Internable, Interned}, item_scope::ItemInNs, path::{Path, PathKind}, type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef}, @@ -19,6 +18,7 @@ use hir_def::{ HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId, }; use hir_expand::{hygiene::Hygiene, name::Name}; +use intern::{Internable, Interned}; use itertools::Itertools; use smallvec::SmallVec; use syntax::SmolStr; diff --git a/crates/hir-ty/src/interner.rs b/crates/hir-ty/src/interner.rs index 441503a300e5c..7bf73560cbe6f 100644 --- a/crates/hir-ty/src/interner.rs +++ b/crates/hir-ty/src/interner.rs @@ -4,11 +4,8 @@ use crate::{chalk_db, tls, GenericArg}; use base_db::salsa::InternId; use chalk_ir::{Goal, GoalData}; -use hir_def::{ - intern::{impl_internable, InternStorage, Internable, Interned}, - type_ref::ConstScalar, - TypeAliasId, -}; +use hir_def::{type_ref::ConstScalar, TypeAliasId}; +use intern::{impl_internable, Interned}; use smallvec::SmallVec; use std::{fmt, sync::Arc}; diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 592410008a679..4b1f40f91d6e0 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -23,7 +23,6 @@ use hir_def::{ generics::{ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, - intern::Interned, lang_item::lang_attr, path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments}, resolver::{HasResolver, Resolver, TypeNs}, @@ -35,6 +34,7 @@ use hir_def::{ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId, }; use hir_expand::{name::Name, ExpandResult}; +use intern::Interned; use itertools::Either; use la_arena::ArenaMap; use rustc_hash::FxHashSet; diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 9893566bd549c..4f516e18be6e0 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -11,13 +11,13 @@ use hir_def::{ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, - intern::Interned, resolver::{HasResolver, TypeNs}, type_ref::{TraitBoundModifier, TypeRef}, ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, }; use hir_expand::name::Name; +use intern::Interned; use itertools::Either; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 08fd4453dfca5..ad44e240423e0 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -107,7 +107,7 @@ pub use { cfg::{CfgAtom, CfgExpr, CfgOptions}, hir_def::{ adt::StructKind, - attr::{Attr, Attrs, AttrsWithOwner, Documentation}, + attr::{Attrs, AttrsWithOwner, Documentation}, builtin_attr::AttributeTemplate, find_path::PrefixKind, import_map, @@ -122,6 +122,7 @@ pub use { ModuleDefId, }, hir_expand::{ + attrs::Attr, name::{known, Name}, ExpandResult, HirFileId, InFile, MacroFile, Origin, }, diff --git a/crates/hir/src/semantics/source_to_def.rs b/crates/hir/src/semantics/source_to_def.rs index fa45e3c12eb00..2b5bfda1d4347 100644 --- a/crates/hir/src/semantics/source_to_def.rs +++ b/crates/hir/src/semantics/source_to_def.rs @@ -87,7 +87,6 @@ use base_db::FileId; use hir_def::{ - attr::AttrId, child_by_source::ChildBySource, dyn_map::DynMap, expr::{LabelId, PatId}, @@ -96,7 +95,7 @@ use hir_def::{ GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, VariantId, }; -use hir_expand::{name::AsName, HirFileId, MacroCallId}; +use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId}; use rustc_hash::FxHashMap; use smallvec::SmallVec; use stdx::impl_from; diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml new file mode 100644 index 0000000000000..dd5110255c50b --- /dev/null +++ b/crates/intern/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "intern" +version = "0.0.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +# We need to freeze the version of the crate, as the raw-api feature is considered unstable +dashmap = { version = "=5.4.0", features = ["raw-api"] } +hashbrown = { version = "0.12.1", default-features = false } +once_cell = "1.15.0" +rustc-hash = "1.1.0" diff --git a/crates/hir-def/src/intern.rs b/crates/intern/src/lib.rs similarity index 92% rename from crates/hir-def/src/intern.rs rename to crates/intern/src/lib.rs index f08521a34032b..fb2903696b373 100644 --- a/crates/hir-def/src/intern.rs +++ b/crates/intern/src/lib.rs @@ -14,8 +14,6 @@ use hashbrown::HashMap; use once_cell::sync::OnceCell; use rustc_hash::FxHasher; -use crate::generics::GenericParams; - type InternMap = DashMap, (), BuildHasherDefault>; type Guard = dashmap::RwLockWriteGuard< 'static, @@ -204,9 +202,9 @@ pub trait Internable: Hash + Eq + 'static { #[doc(hidden)] macro_rules! _impl_internable { ( $($t:path),+ $(,)? ) => { $( - impl Internable for $t { - fn storage() -> &'static InternStorage { - static STORAGE: InternStorage<$t> = InternStorage::new(); + impl $crate::Internable for $t { + fn storage() -> &'static $crate::InternStorage { + static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new(); &STORAGE } } @@ -215,13 +213,4 @@ macro_rules! _impl_internable { pub use crate::_impl_internable as impl_internable; -impl_internable!( - crate::type_ref::TypeRef, - crate::type_ref::TraitRef, - crate::type_ref::TypeBound, - crate::path::ModPath, - crate::path::GenericArgs, - crate::attr::AttrInput, - GenericParams, - str, -); +impl_internable!(str,); From 25242fe93f6ae330abbaba96d5a91fd55ed1395a Mon Sep 17 00:00:00 2001 From: arcnmx Date: Mon, 9 Jan 2023 10:36:22 -0800 Subject: [PATCH 006/501] :arrow_up: rust-analyzer Merge commit '368e0bb32f1178cf162c2ce5f7e10b7ae211eb26' --- Cargo.lock | 89 +- crates/base-db/src/fixture.rs | 14 +- crates/base-db/src/input.rs | 21 +- crates/base-db/src/lib.rs | 4 +- crates/cfg/src/cfg_expr.rs | 2 +- crates/cfg/src/lib.rs | 6 +- crates/flycheck/src/lib.rs | 38 +- crates/hir-def/Cargo.toml | 2 + crates/hir-def/src/adt.rs | 113 +- crates/hir-def/src/attr.rs | 2 +- crates/hir-def/src/body.rs | 2 +- crates/hir-def/src/body/lower.rs | 4 + crates/hir-def/src/body/pretty.rs | 13 +- crates/hir-def/src/body/scope.rs | 2 +- crates/hir-def/src/builtin_attr.rs | 1 + crates/hir-def/src/data.rs | 49 +- crates/hir-def/src/expr.rs | 15 +- crates/hir-def/src/find_path.rs | 93 +- crates/hir-def/src/generics.rs | 4 +- crates/hir-def/src/import_map.rs | 12 +- crates/hir-def/src/item_scope.rs | 15 +- crates/hir-def/src/item_tree.rs | 4 +- crates/hir-def/src/layout.rs | 96 + crates/hir-def/src/lib.rs | 1 + crates/hir-def/src/macro_expansion_tests.rs | 4 +- .../macro_expansion_tests/builtin_fn_macro.rs | 6 +- .../hir-def/src/macro_expansion_tests/mbe.rs | 45 + .../src/macro_expansion_tests/mbe/matching.rs | 49 + crates/hir-def/src/nameres.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 40 +- crates/hir-def/src/nameres/mod_resolution.rs | 14 +- crates/hir-def/src/nameres/path_resolution.rs | 6 +- crates/hir-def/src/nameres/proc_macro.rs | 78 +- .../hir-def/src/nameres/tests/incremental.rs | 8 +- crates/hir-def/src/nameres/tests/macros.rs | 22 + .../src/nameres/tests/mod_resolution.rs | 37 + crates/hir-def/src/pretty.rs | 8 +- crates/hir-def/src/resolver.rs | 7 +- crates/hir-expand/src/builtin_attr_macro.rs | 3 +- crates/hir-expand/src/builtin_fn_macro.rs | 17 +- crates/hir-expand/src/db.rs | 4 +- crates/hir-expand/src/eager.rs | 4 +- crates/hir-expand/src/fixup.rs | 2 +- crates/hir-expand/src/hygiene.rs | 8 +- crates/hir-expand/src/lib.rs | 227 +- crates/hir-expand/src/name.rs | 44 +- crates/hir-expand/src/quote.rs | 2 +- crates/hir-ty/Cargo.toml | 10 +- crates/hir-ty/src/autoderef.rs | 6 +- crates/hir-ty/src/builder.rs | 2 +- crates/hir-ty/src/chalk_db.rs | 2 + crates/hir-ty/src/consteval.rs | 18 +- crates/hir-ty/src/consteval/tests.rs | 5 +- crates/hir-ty/src/db.rs | 31 +- crates/hir-ty/src/diagnostics/match_check.rs | 15 +- .../match_check/deconstruct_pat.rs | 2 +- crates/hir-ty/src/display.rs | 87 +- crates/hir-ty/src/infer.rs | 116 +- crates/hir-ty/src/infer/expr.rs | 97 +- crates/hir-ty/src/infer/pat.rs | 4 +- crates/hir-ty/src/infer/path.rs | 40 +- crates/hir-ty/src/infer/unify.rs | 85 +- crates/hir-ty/src/interner.rs | 67 +- crates/hir-ty/src/lang_items.rs | 20 + crates/hir-ty/src/layout.rs | 279 ++ crates/hir-ty/src/layout/adt.rs | 134 + crates/hir-ty/src/layout/target.rs | 36 + crates/hir-ty/src/layout/tests.rs | 208 ++ crates/hir-ty/src/lib.rs | 73 +- crates/hir-ty/src/lower.rs | 54 +- crates/hir-ty/src/method_resolution.rs | 248 +- crates/hir-ty/src/tests.rs | 29 +- crates/hir-ty/src/tests/coercion.rs | 34 + crates/hir-ty/src/tests/incremental.rs | 4 +- crates/hir-ty/src/tests/macros.rs | 2 +- crates/hir-ty/src/tests/method_resolution.rs | 50 + crates/hir-ty/src/tests/patterns.rs | 12 + crates/hir-ty/src/tests/regression.rs | 21 + crates/hir-ty/src/tests/simple.rs | 28 +- crates/hir-ty/src/tests/traits.rs | 81 + crates/hir-ty/src/tls.rs | 10 +- crates/hir-ty/src/traits.rs | 11 +- crates/hir-ty/src/utils.rs | 70 +- crates/hir/src/attrs.rs | 2 +- crates/hir/src/diagnostics.rs | 17 +- crates/hir/src/display.rs | 16 +- crates/hir/src/lib.rs | 123 +- crates/hir/src/semantics.rs | 134 +- crates/hir/src/source_analyzer.rs | 52 +- crates/ide-assists/Cargo.toml | 1 + .../src/handlers/add_explicit_type.rs | 5 +- .../src/handlers/add_missing_impl_members.rs | 99 + .../src/handlers/add_missing_match_arms.rs | 4 +- .../src/handlers/add_return_type.rs | 6 +- .../ide-assists/src/handlers/auto_import.rs | 2 +- .../handlers/convert_iter_for_each_to_for.rs | 2 +- .../convert_tuple_struct_to_named_struct.rs | 43 +- ...extract_expressions_from_format_string.rs} | 58 +- .../src/handlers/extract_function.rs | 284 +- .../src/handlers/extract_module.rs | 90 +- .../extract_struct_from_enum_variant.rs | 2 +- .../src/handlers/extract_type_alias.rs | 120 +- .../src/handlers/generate_default_from_new.rs | 2 +- .../src/handlers/generate_delegate_methods.rs | 8 +- .../src/handlers/generate_deref.rs | 3 +- .../generate_enum_projection_method.rs | 2 +- .../src/handlers/generate_enum_variant.rs | 2 +- .../handlers/generate_from_impl_for_enum.rs | 6 +- .../src/handlers/generate_function.rs | 2 +- .../src/handlers/generate_getter.rs | 6 +- .../ide-assists/src/handlers/generate_impl.rs | 411 ++- .../ide-assists/src/handlers/generate_new.rs | 2 +- .../ide-assists/src/handlers/inline_call.rs | 2 +- .../ide-assists/src/handlers/inline_macro.rs | 233 ++ .../src/handlers/inline_type_alias.rs | 6 +- .../src/handlers/move_const_to_impl.rs | 481 +++ .../src/handlers/qualify_method_call.rs | 2 +- crates/ide-assists/src/handlers/remove_dbg.rs | 2 +- .../src/handlers/remove_parentheses.rs | 221 ++ .../src/handlers/replace_arith_op.rs | 226 ++ .../replace_derive_with_manual_impl.rs | 124 +- .../src/handlers/replace_or_with_or_else.rs | 4 +- .../replace_turbofish_with_explicit_type.rs | 2 +- .../src/handlers/unnecessary_async.rs | 2 +- .../src/handlers/unqualify_method_call.rs | 211 ++ .../ide-assists/src/handlers/unwrap_block.rs | 47 +- crates/ide-assists/src/lib.rs | 17 +- crates/ide-assists/src/tests.rs | 2 +- crates/ide-assists/src/tests/generated.rs | 210 +- crates/ide-assists/src/tests/sourcegen.rs | 9 +- crates/ide-assists/src/utils.rs | 79 +- .../src/utils/gen_trait_fn_body.rs | 16 +- crates/ide-completion/src/completions.rs | 4 +- .../src/completions/attribute.rs | 6 +- .../src/completions/attribute/cfg.rs | 4 +- .../src/completions/attribute/lint.rs | 2 +- crates/ide-completion/src/completions/dot.rs | 4 +- .../src/completions/env_vars.rs | 20 +- crates/ide-completion/src/completions/expr.rs | 2 +- .../src/completions/fn_param.rs | 2 +- .../src/completions/format_string.rs | 2 +- .../src/completions/item_list/trait_impl.rs | 63 +- .../ide-completion/src/completions/postfix.rs | 154 +- .../src/completions/postfix/format_like.rs | 27 +- .../ide-completion/src/completions/record.rs | 67 +- .../ide-completion/src/completions/snippet.rs | 2 +- crates/ide-completion/src/completions/type.rs | 2 +- crates/ide-completion/src/context.rs | 28 +- crates/ide-completion/src/context/analysis.rs | 20 +- crates/ide-completion/src/context/tests.rs | 2 +- crates/ide-completion/src/item.rs | 4 +- crates/ide-completion/src/lib.rs | 1 - crates/ide-completion/src/render.rs | 13 +- crates/ide-completion/src/render/const_.rs | 2 +- crates/ide-completion/src/render/function.rs | 12 +- crates/ide-completion/src/render/literal.rs | 4 +- crates/ide-completion/src/render/macro_.rs | 2 +- crates/ide-completion/src/render/pattern.rs | 4 +- .../ide-completion/src/render/type_alias.rs | 2 +- .../src/render/union_literal.rs | 4 +- crates/ide-completion/src/render/variant.rs | 23 +- crates/ide-completion/src/snippet.rs | 2 +- crates/ide-completion/src/tests.rs | 8 +- crates/ide-completion/src/tests/attribute.rs | 24 + crates/ide-completion/src/tests/expression.rs | 2 +- crates/ide-completion/src/tests/item.rs | 2 +- crates/ide-completion/src/tests/item_list.rs | 92 +- crates/ide-completion/src/tests/pattern.rs | 2 +- crates/ide-completion/src/tests/predicate.rs | 2 +- crates/ide-completion/src/tests/special.rs | 90 +- crates/ide-completion/src/tests/type_pos.rs | 2 +- crates/ide-db/src/assists.rs | 2 +- crates/ide-db/src/imports/import_assets.rs | 2 +- crates/ide-db/src/imports/insert_use/tests.rs | 2 +- crates/ide-db/src/imports/merge_imports.rs | 2 +- crates/ide-db/src/lib.rs | 2 +- crates/ide-db/src/rename.rs | 12 +- crates/ide-db/src/search.rs | 4 +- crates/ide-db/src/symbol_index.rs | 2 +- .../src/syntax_helpers/format_string_exprs.rs | 20 +- crates/ide-db/src/syntax_helpers/node_ext.rs | 29 +- .../test_symbol_index_collection.txt | 134 +- crates/ide-db/src/tests/sourcegen_lints.rs | 6 +- .../src/handlers/break_outside_of_loop.rs | 40 +- .../src/handlers/json_is_not_rust.rs | 2 +- .../src/handlers/mismatched_arg_count.rs | 2 +- .../src/handlers/missing_fields.rs | 4 +- .../src/handlers/missing_unsafe.rs | 1 + .../src/handlers/no_such_field.rs | 8 +- .../src/handlers/private_assoc_item.rs | 124 + .../src/handlers/private_field.rs | 68 + .../src/handlers/type_mismatch.rs | 4 +- .../src/handlers/unlinked_file.rs | 22 +- .../src/handlers/unresolved_macro_call.rs | 2 +- .../src/handlers/unresolved_module.rs | 2 +- .../src/handlers/unresolved_proc_macro.rs | 2 +- crates/ide-diagnostics/src/lib.rs | 8 +- crates/ide-diagnostics/src/tests.rs | 4 +- crates/ide-diagnostics/src/tests/sourcegen.rs | 4 +- crates/ide-ssr/src/parsing.rs | 2 +- crates/ide-ssr/src/tests.rs | 4 +- crates/ide/src/call_hierarchy.rs | 27 +- crates/ide/src/doc_links.rs | 8 +- crates/ide/src/doc_links/intra_doc_links.rs | 4 +- crates/ide/src/doc_links/tests.rs | 2 +- crates/ide/src/expand_macro.rs | 2 +- crates/ide/src/extend_selection.rs | 2 +- crates/ide/src/goto_declaration.rs | 123 +- crates/ide/src/goto_definition.rs | 2 +- crates/ide/src/goto_implementation.rs | 2 +- crates/ide/src/highlight_related.rs | 21 +- crates/ide/src/hover.rs | 101 +- crates/ide/src/hover/render.rs | 109 +- crates/ide/src/hover/tests.rs | 470 ++- crates/ide/src/inlay_hints.rs | 2930 +---------------- crates/ide/src/inlay_hints/adjustment.rs | 630 ++++ crates/ide/src/inlay_hints/bind_pat.rs | 978 ++++++ crates/ide/src/inlay_hints/binding_mode.rs | 142 + crates/ide/src/inlay_hints/chaining.rs | 665 ++++ crates/ide/src/inlay_hints/closing_brace.rs | 196 ++ crates/ide/src/inlay_hints/closure_ret.rs | 49 + crates/ide/src/inlay_hints/discriminant.rs | 142 + crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 325 ++ crates/ide/src/inlay_hints/implicit_static.rs | 75 + crates/ide/src/inlay_hints/param_name.rs | 546 +++ crates/ide/src/lib.rs | 5 +- crates/ide/src/markup.rs | 2 +- crates/ide/src/moniker.rs | 2 +- crates/ide/src/navigation_target.rs | 4 +- crates/ide/src/rename.rs | 15 +- crates/ide/src/runnables.rs | 12 +- crates/ide/src/shuffle_crate_graph.rs | 1 + crates/ide/src/signature_help.rs | 60 +- crates/ide/src/static_index.rs | 13 +- crates/ide/src/status.rs | 4 +- .../ide/src/syntax_highlighting/highlight.rs | 3 +- crates/ide/src/syntax_highlighting/html.rs | 2 +- crates/ide/src/syntax_highlighting/tests.rs | 20 + crates/ide/src/syntax_tree.rs | 2 +- crates/ide/src/typing.rs | 2 +- crates/ide/src/typing/on_enter.rs | 11 +- crates/limit/src/lib.rs | 2 +- crates/mbe/src/benchmark.rs | 12 +- crates/mbe/src/expander/matcher.rs | 227 +- crates/mbe/src/expander/transcriber.rs | 8 +- crates/mbe/src/lib.rs | 2 +- crates/mbe/src/parser.rs | 56 +- crates/mbe/src/syntax_bridge.rs | 4 +- crates/mbe/src/syntax_bridge/tests.rs | 6 +- crates/mbe/src/to_parser_input.rs | 2 +- crates/mbe/src/tt_iter.rs | 48 +- crates/parser/src/grammar.rs | 6 +- crates/parser/src/grammar/expressions.rs | 2 +- crates/parser/src/grammar/expressions/atom.rs | 22 + crates/parser/src/grammar/paths.rs | 9 +- crates/parser/src/grammar/patterns.rs | 87 +- crates/parser/src/lexed_str.rs | 2 +- crates/parser/src/output.rs | 2 +- crates/parser/src/parser.rs | 12 +- crates/parser/src/shortcuts.rs | 4 +- crates/parser/src/syntax_kind/generated.rs | 9 +- crates/parser/src/tests.rs | 24 +- .../src/tests/sourcegen_inline_tests.rs | 6 +- .../parser/inline/ok/0058_range_pat.rast | 106 + .../parser/inline/ok/0058_range_pat.rs | 9 +- .../inline/ok/0166_half_open_range_pat.rast | 43 + .../inline/ok/0166_half_open_range_pat.rs | 5 + ...03_closure_body_underscore_assignment.rast | 32 + ...0203_closure_body_underscore_assignment.rs | 1 + .../parser/inline/ok/0204_yeet_expr.rast | 31 + .../parser/inline/ok/0204_yeet_expr.rs | 4 + crates/proc-macro-api/src/lib.rs | 2 +- crates/proc-macro-api/src/msg/flat.rs | 8 +- crates/proc-macro-api/src/process.rs | 2 +- crates/proc-macro-api/src/version.rs | 2 +- .../abis/abi_1_58/proc_macro/bridge/client.rs | 2 +- .../src/abis/abi_1_58/proc_macro/mod.rs | 8 +- .../src/abis/abi_1_58/ra_server.rs | 18 +- .../abis/abi_1_63/proc_macro/bridge/client.rs | 2 +- .../src/abis/abi_1_63/ra_server.rs | 18 +- .../src/abis/abi_sysroot/ra_server.rs | 7 +- crates/proc-macro-srv/src/abis/mod.rs | 2 +- crates/proc-macro-srv/src/lib.rs | 18 +- crates/proc-macro-srv/src/tests/mod.rs | 6 +- crates/proc-macro-srv/src/tests/utils.rs | 4 +- crates/proc-macro-test/build.rs | 17 +- crates/profile/src/hprof.rs | 12 +- crates/profile/src/lib.rs | 4 +- crates/profile/src/memory_usage.rs | 2 +- crates/profile/src/stop_watch.rs | 10 +- crates/project-model/src/build_scripts.rs | 15 +- crates/project-model/src/cargo_workspace.rs | 6 +- crates/project-model/src/cfg_flag.rs | 2 +- crates/project-model/src/lib.rs | 3 +- crates/project-model/src/manifest_path.rs | 2 +- crates/project-model/src/project_json.rs | 2 +- crates/project-model/src/rustc_cfg.rs | 8 +- crates/project-model/src/sysroot.rs | 6 +- .../project-model/src/target_data_layout.rs | 40 + crates/project-model/src/tests.rs | 29 +- crates/project-model/src/workspace.rs | 118 +- crates/rust-analyzer/src/bin/logger.rs | 10 +- crates/rust-analyzer/src/bin/main.rs | 6 +- crates/rust-analyzer/src/caps.rs | 4 +- crates/rust-analyzer/src/cli.rs | 4 +- .../rust-analyzer/src/cli/analysis_stats.rs | 18 +- crates/rust-analyzer/src/cli/diagnostics.rs | 4 +- crates/rust-analyzer/src/cli/flags.rs | 2 +- crates/rust-analyzer/src/cli/highlight.rs | 2 +- crates/rust-analyzer/src/cli/lsif.rs | 4 +- .../rust-analyzer/src/cli/progress_report.rs | 4 +- crates/rust-analyzer/src/cli/scip.rs | 12 +- crates/rust-analyzer/src/cli/ssr.rs | 2 +- crates/rust-analyzer/src/cli/symbols.rs | 2 +- crates/rust-analyzer/src/config.rs | 248 +- .../src/config/patch_old_style.rs | 19 +- crates/rust-analyzer/src/diagnostics.rs | 3 +- .../rust-analyzer/src/diagnostics/to_proto.rs | 14 +- crates/rust-analyzer/src/dispatch.rs | 2 +- crates/rust-analyzer/src/from_proto.rs | 17 +- crates/rust-analyzer/src/global_state.rs | 4 +- crates/rust-analyzer/src/handlers.rs | 71 +- .../src/integrated_benchmarks.rs | 4 +- crates/rust-analyzer/src/lib.rs | 2 +- crates/rust-analyzer/src/lsp_ext.rs | 27 +- crates/rust-analyzer/src/lsp_utils.rs | 2 +- crates/rust-analyzer/src/main_loop.rs | 216 +- crates/rust-analyzer/src/reload.rs | 19 +- crates/rust-analyzer/src/semantic_tokens.rs | 10 +- crates/rust-analyzer/src/task_pool.rs | 3 +- crates/rust-analyzer/src/to_proto.rs | 54 +- crates/rust-analyzer/tests/slow-tests/main.rs | 22 +- .../tests/slow-tests/sourcegen.rs | 8 +- .../rust-analyzer/tests/slow-tests/support.rs | 8 +- .../rust-analyzer/tests/slow-tests/testdir.rs | 4 +- crates/rust-analyzer/tests/slow-tests/tidy.rs | 18 +- crates/sourcegen/src/lib.rs | 13 +- crates/stdx/src/hash.rs | 2 +- crates/stdx/src/panic_context.rs | 4 +- crates/syntax/rust.ungram | 4 + crates/syntax/src/algo.rs | 7 +- crates/syntax/src/ast.rs | 1 + crates/syntax/src/ast/edit.rs | 4 +- crates/syntax/src/ast/edit_in_place.rs | 16 +- crates/syntax/src/ast/generated/nodes.rs | 53 +- crates/syntax/src/ast/make.rs | 40 +- crates/syntax/src/ast/prec.rs | 328 ++ crates/syntax/src/ast/token_ext.rs | 8 +- crates/syntax/src/fuzz.rs | 6 +- crates/syntax/src/hacks.rs | 2 +- crates/syntax/src/ptr.rs | 2 +- crates/syntax/src/ted.rs | 6 +- crates/syntax/src/tests.rs | 6 +- crates/syntax/src/tests/ast_src.rs | 5 +- crates/syntax/src/tests/sourcegen_ast.rs | 10 +- crates/syntax/src/validation.rs | 2 +- crates/test-utils/src/assert_linear.rs | 4 +- crates/test-utils/src/bench_fixture.rs | 4 +- crates/test-utils/src/fixture.rs | 37 +- crates/test-utils/src/lib.rs | 17 +- crates/test-utils/src/minicore.rs | 55 +- crates/toolchain/src/lib.rs | 2 +- crates/tt/src/buffer.rs | 2 +- crates/tt/src/lib.rs | 26 +- crates/vfs/src/file_set.rs | 7 +- crates/vfs/src/lib.rs | 7 +- crates/vfs/src/path_interner.rs | 7 +- crates/vfs/src/vfs_path.rs | 2 +- docs/dev/README.md | 7 +- docs/dev/lsp-extensions.md | 67 +- docs/dev/style.md | 2 +- docs/user/generated_config.adoc | 57 +- editors/code/package-lock.json | 352 +- editors/code/package.json | 119 +- editors/code/src/client.ts | 48 +- editors/code/src/commands.ts | 21 +- editors/code/src/config.ts | 4 + editors/code/src/diagnostics.ts | 212 ++ editors/code/src/lsp_ext.ts | 6 +- editors/code/src/main.ts | 65 +- lib/la-arena/src/map.rs | 8 + lib/lsp-server/examples/goto_def.rs | 12 +- lib/lsp-server/src/lib.rs | 40 +- xtask/Cargo.toml | 1 + xtask/src/dist.rs | 52 +- xtask/src/flags.rs | 15 + xtask/src/install.rs | 2 +- xtask/src/main.rs | 2 + xtask/src/metrics.rs | 2 +- xtask/src/publish.rs | 109 + xtask/src/publish/notes.rs | 631 ++++ xtask/src/release.rs | 4 +- xtask/src/release/changelog.rs | 30 +- xtask/test_data/expected.md | 81 + xtask/test_data/input.adoc | 90 + 395 files changed, 14558 insertions(+), 5744 deletions(-) create mode 100644 crates/hir-def/src/layout.rs create mode 100644 crates/hir-ty/src/lang_items.rs create mode 100644 crates/hir-ty/src/layout.rs create mode 100644 crates/hir-ty/src/layout/adt.rs create mode 100644 crates/hir-ty/src/layout/target.rs create mode 100644 crates/hir-ty/src/layout/tests.rs rename crates/ide-assists/src/handlers/{move_format_string_arg.rs => extract_expressions_from_format_string.rs} (84%) create mode 100644 crates/ide-assists/src/handlers/inline_macro.rs create mode 100644 crates/ide-assists/src/handlers/move_const_to_impl.rs create mode 100644 crates/ide-assists/src/handlers/remove_parentheses.rs create mode 100644 crates/ide-assists/src/handlers/replace_arith_op.rs create mode 100644 crates/ide-assists/src/handlers/unqualify_method_call.rs create mode 100644 crates/ide-diagnostics/src/handlers/private_assoc_item.rs create mode 100644 crates/ide-diagnostics/src/handlers/private_field.rs create mode 100644 crates/ide/src/inlay_hints/adjustment.rs create mode 100644 crates/ide/src/inlay_hints/bind_pat.rs create mode 100644 crates/ide/src/inlay_hints/binding_mode.rs create mode 100644 crates/ide/src/inlay_hints/chaining.rs create mode 100644 crates/ide/src/inlay_hints/closing_brace.rs create mode 100644 crates/ide/src/inlay_hints/closure_ret.rs create mode 100644 crates/ide/src/inlay_hints/discriminant.rs create mode 100644 crates/ide/src/inlay_hints/fn_lifetime_fn.rs create mode 100644 crates/ide/src/inlay_hints/implicit_static.rs create mode 100644 crates/ide/src/inlay_hints/param_name.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs create mode 100644 crates/project-model/src/target_data_layout.rs create mode 100644 crates/syntax/src/ast/prec.rs create mode 100644 editors/code/src/diagnostics.ts create mode 100644 xtask/src/publish.rs create mode 100644 xtask/src/publish/notes.rs create mode 100644 xtask/test_data/expected.md create mode 100644 xtask/test_data/input.adoc diff --git a/Cargo.lock b/Cargo.lock index 41c5d36671de0..d27ae416f04b9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -112,6 +112,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + [[package]] name = "camino" version = "1.1.1" @@ -171,9 +177,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5499d415d855b5094366a824815341893ad3de0ecb6048c430118bdae6d27402" +checksum = "4df80a3fbc1f0e59f560eeeebca94bf655566a8ad3023c210a109deb6056455a" dependencies = [ "proc-macro2", "quote", @@ -183,9 +189,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3800118c76a48507b0eece3a01f3a429b5c478d203c493096e6040c67ab960e1" +checksum = "f39e5272016916956298cceea5147006f897972c274a768ed4d6e074efe5d3fb" dependencies = [ "bitflags", "chalk-derive", @@ -194,9 +200,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1baf60628fd73104d1f8562586a52d48f37f1e84435aab2e62674b1fd935b8c8" +checksum = "d9d60b42ad7478d3e027e2f9ea4e99fbbb8fdee0c8c3cf068be269f57e603618" dependencies = [ "chalk-derive", "chalk-ir", @@ -207,9 +213,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.86.0" +version = "0.88.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e9c3c068f9358786348e58a1b94ef0a5cf90a9810fc1f10fda896f0b5d80185" +checksum = "ab30620ea5b36819525eaab2204f4b8e1842fc7ee36826424a28bef59ae7fecf" dependencies = [ "chalk-derive", "chalk-ir", @@ -510,6 +516,8 @@ dependencies = [ "fst", "hashbrown", "hir-expand", + "hkalbasi-rustc-ap-rustc_abi", + "hkalbasi-rustc-ap-rustc_index", "indexmap", "itertools", "la-arena", @@ -555,6 +563,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", + "bitflags", "chalk-derive", "chalk-ir", "chalk-recursive", @@ -564,6 +573,7 @@ dependencies = [ "expect-test", "hir-def", "hir-expand", + "hkalbasi-rustc-ap-rustc_index", "itertools", "la-arena", "limit", @@ -581,6 +591,27 @@ dependencies = [ "typed-arena", ] +[[package]] +name = "hkalbasi-rustc-ap-rustc_abi" +version = "0.0.20221221" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adabaadad9aa7576f97af02241cdf5554d62fb3d51a84cb05d77ba28edd3013f" +dependencies = [ + "bitflags", + "hkalbasi-rustc-ap-rustc_index", + "tracing", +] + +[[package]] +name = "hkalbasi-rustc-ap-rustc_index" +version = "0.0.20221221" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4d3c48474e09afb0f5efbd6f758e05411699301a113c47d454d28ec7059d00e" +dependencies = [ + "arrayvec", + "smallvec", +] + [[package]] name = "home" version = "0.5.4" @@ -631,6 +662,7 @@ dependencies = [ "ide-db", "itertools", "profile", + "smallvec", "sourcegen", "stdx", "syntax", @@ -1750,6 +1782,33 @@ dependencies = [ "tikv-jemalloc-sys", ] +[[package]] +name = "time" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +dependencies = [ + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +dependencies = [ + "time-core", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -2148,4 +2207,18 @@ dependencies = [ "write-json", "xflags", "xshell", + "zip", +] + +[[package]] +name = "zip" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", + "time", ] diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 5b7828a26996e..6f83ea40e76f6 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -162,6 +162,7 @@ impl ChangeFixture { Ok(Vec::new()), false, origin, + meta.target_data_layout.as_deref().map(Arc::from), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none()); @@ -197,6 +198,7 @@ impl ChangeFixture { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); } else { for (from, to, prelude) in crate_deps { @@ -210,6 +212,8 @@ impl ChangeFixture { .unwrap(); } } + let target_layout = + crate_graph.iter().next().and_then(|it| crate_graph[it].target_layout.clone()); if let Some(mini_core) = mini_core { let core_file = file_id; @@ -234,6 +238,7 @@ impl ChangeFixture { Ok(Vec::new()), false, CrateOrigin::Lang(LangCrateOrigin::Core), + target_layout.clone(), ); for krate in all_crates { @@ -271,6 +276,7 @@ impl ChangeFixture { Ok(proc_macro), true, CrateOrigin::CratesIo { repo: None, name: None }, + target_layout, ); for krate in all_crates { @@ -391,6 +397,7 @@ struct FileMeta { edition: Edition, env: Env, introduce_new_source_root: Option, + target_data_layout: Option, } fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option) { @@ -400,9 +407,9 @@ fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option) { Some((version, url)) => { (version, CrateOrigin::CratesIo { repo: Some(url.to_owned()), name: None }) } - _ => panic!("Bad crates.io parameter: {}", data), + _ => panic!("Bad crates.io parameter: {data}"), }, - _ => panic!("Bad string for crate origin: {}", b), + _ => panic!("Bad string for crate origin: {b}"), }; (a.to_owned(), origin, Some(version.to_string())) } else { @@ -432,8 +439,9 @@ impl From for FileMeta { introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind { "local" => SourceRootKind::Local, "library" => SourceRootKind::Library, - invalid => panic!("invalid source root kind '{}'", invalid), + invalid => panic!("invalid source root kind '{invalid}'"), }), + target_data_layout: f.target_data_layout, } } } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index e7f0c4ec29bf4..5fa4a80249509 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -128,7 +128,7 @@ impl fmt::Display for CrateName { impl ops::Deref for CrateName { type Target = str; fn deref(&self) -> &str { - &*self.0 + &self.0 } } @@ -211,7 +211,7 @@ impl fmt::Display for CrateDisplayName { impl ops::Deref for CrateDisplayName { type Target = str; fn deref(&self) -> &str { - &*self.crate_name + &self.crate_name } } @@ -270,6 +270,7 @@ pub struct CrateData { pub display_name: Option, pub cfg_options: CfgOptions, pub potential_cfg_options: CfgOptions, + pub target_layout: Option>, pub env: Env, pub dependencies: Vec, pub proc_macro: ProcMacroLoadResult, @@ -328,6 +329,7 @@ impl CrateGraph { proc_macro: ProcMacroLoadResult, is_proc_macro: bool, origin: CrateOrigin, + target_layout: Option>, ) -> CrateId { let data = CrateData { root_file_id, @@ -340,6 +342,7 @@ impl CrateGraph { proc_macro, dependencies: Vec::new(), origin, + target_layout, is_proc_macro, }; let crate_id = CrateId(self.arena.len() as u32); @@ -615,8 +618,8 @@ impl CyclicDependenciesError { impl fmt::Display for CyclicDependenciesError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let render = |(id, name): &(CrateId, Option)| match name { - Some(it) => format!("{}({:?})", it, id), - None => format!("{:?}", id), + Some(it) => format!("{it}({id:?})"), + None => format!("{id:?}"), }; let path = self.path.iter().rev().map(render).collect::>().join(" -> "); write!( @@ -649,6 +652,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -661,6 +665,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -673,6 +678,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -699,6 +705,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -711,6 +718,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -734,6 +742,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -746,6 +755,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -758,6 +768,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -781,6 +792,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -793,6 +805,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); assert!(graph .add_dep( diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index da11e4ae7bb96..55a51d3bbb2c7 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -75,9 +75,9 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { } fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { - let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id)); + let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let text = db.file_text(file_id); - SourceFile::parse(&*text) + SourceFile::parse(&text) } /// We don't want to give HIR knowledge of source roots, hence we extract these diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index fd9e31ed3b4f5..5f4eefa836619 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -44,7 +44,7 @@ impl fmt::Display for CfgAtom { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { CfgAtom::Flag(name) => name.fmt(f), - CfgAtom::KeyValue { key, value } => write!(f, "{} = {:?}", key, value), + CfgAtom::KeyValue { key, value } => write!(f, "{key} = {value:?}"), } } } diff --git a/crates/cfg/src/lib.rs b/crates/cfg/src/lib.rs index d78ef4fb11e7f..30709c968dacf 100644 --- a/crates/cfg/src/lib.rs +++ b/crates/cfg/src/lib.rs @@ -37,7 +37,7 @@ impl fmt::Debug for CfgOptions { .iter() .map(|atom| match atom { CfgAtom::Flag(it) => it.to_string(), - CfgAtom::KeyValue { key, value } => format!("{}={}", key, value), + CfgAtom::KeyValue { key, value } => format!("{key}={value}"), }) .collect::>(); items.sort(); @@ -175,7 +175,7 @@ impl fmt::Display for InactiveReason { atom.fmt(f)?; } let is_are = if self.enabled.len() == 1 { "is" } else { "are" }; - write!(f, " {} enabled", is_are)?; + write!(f, " {is_are} enabled")?; if !self.disabled.is_empty() { f.write_str(" and ")?; @@ -194,7 +194,7 @@ impl fmt::Display for InactiveReason { atom.fmt(f)?; } let is_are = if self.disabled.len() == 1 { "is" } else { "are" }; - write!(f, " {} disabled", is_are)?; + write!(f, " {is_are} disabled")?; } Ok(()) diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 8f93dad06e3f5..11f7b068ecb16 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -47,6 +47,7 @@ pub enum FlycheckConfig { features: Vec, extra_args: Vec, extra_env: FxHashMap, + ansi_color_output: bool, }, CustomCommand { command: String, @@ -60,9 +61,9 @@ pub enum FlycheckConfig { impl fmt::Display for FlycheckConfig { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command), + FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"), FlycheckConfig::CustomCommand { command, args, .. } => { - write!(f, "{} {}", command, args.join(" ")) + write!(f, "{command} {}", args.join(" ")) } } } @@ -293,15 +294,24 @@ impl FlycheckActor { extra_args, features, extra_env, + ansi_color_output, } => { let mut cmd = Command::new(toolchain::cargo()); cmd.arg(command); cmd.current_dir(&self.root); - cmd.args(&["--workspace", "--message-format=json", "--manifest-path"]) - .arg(self.root.join("Cargo.toml").as_os_str()); + cmd.arg("--workspace"); + + cmd.arg(if *ansi_color_output { + "--message-format=json-diagnostic-rendered-ansi" + } else { + "--message-format=json" + }); + + cmd.arg("--manifest-path"); + cmd.arg(self.root.join("Cargo.toml").as_os_str()); for target in target_triples { - cmd.args(&["--target", target.as_str()]); + cmd.args(["--target", target.as_str()]); } if *all_targets { cmd.arg("--all-targets"); @@ -360,13 +370,20 @@ impl FlycheckActor { } } -struct JodChild(GroupChild); +struct JodGroupChild(GroupChild); + +impl Drop for JodGroupChild { + fn drop(&mut self) { + _ = self.0.kill(); + _ = self.0.wait(); + } +} /// A handle to a cargo process used for fly-checking. struct CargoHandle { /// The handle to the actual cargo process. As we cannot cancel directly from with /// a read syscall dropping and therefore terminating the process is our best option. - child: JodChild, + child: JodGroupChild, thread: jod_thread::JoinHandle>, receiver: Receiver, } @@ -374,7 +391,7 @@ struct CargoHandle { impl CargoHandle { fn spawn(mut command: Command) -> std::io::Result { command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null()); - let mut child = command.group_spawn().map(JodChild)?; + let mut child = command.group_spawn().map(JodGroupChild)?; let stdout = child.0.inner().stdout.take().unwrap(); let stderr = child.0.inner().stderr.take().unwrap(); @@ -401,8 +418,7 @@ impl CargoHandle { Ok(()) } else { Err(io::Error::new(io::ErrorKind::Other, format!( - "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}", - exit_status, error + "Cargo watcher failed, the command produced no valid metadata (exit code: {exit_status:?}):\n{error}" ))) } } @@ -467,7 +483,7 @@ impl CargoActor { ); match output { Ok(_) => Ok((read_at_least_one_message, error)), - Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))), + Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))), } } } diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 22f98ea7cd450..698be76656cce 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -33,6 +33,8 @@ base-db = { path = "../base-db", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } profile = { path = "../profile", version = "0.0.0" } hir-expand = { path = "../hir-expand", version = "0.0.0" } +rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false } +rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } mbe = { path = "../mbe", version = "0.0.0" } cfg = { path = "../cfg", version = "0.0.0" } tt = { path = "../tt", version = "0.0.0" } diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index 938db032fbc8b..db3b419488147 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -1,6 +1,6 @@ //! Defines hir-level representation of structs, enums and unions -use std::{num::NonZeroU32, sync::Arc}; +use std::sync::Arc; use base_db::CrateId; use either::Either; @@ -9,6 +9,7 @@ use hir_expand::{ HirFileId, InFile, }; use la_arena::{Arena, ArenaMap}; +use rustc_abi::{Integer, IntegerType}; use syntax::ast::{self, HasName, HasVisibility}; use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; @@ -18,6 +19,7 @@ use crate::{ db::DefDatabase, intern::Interned, item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, + layout::{Align, ReprFlags, ReprOptions}, nameres::diagnostics::DefDiagnostic, src::HasChildSource, src::HasSource, @@ -34,16 +36,18 @@ use cfg::CfgOptions; pub struct StructData { pub name: Name, pub variant_data: Arc, - pub repr: Option, + pub repr: Option, pub visibility: RawVisibility, + pub rustc_has_incoherent_inherent_impls: bool, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct EnumData { pub name: Name, pub variants: Arena, - pub repr: Option, + pub repr: Option, pub visibility: RawVisibility, + pub rustc_has_incoherent_inherent_impls: bool, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -67,80 +71,91 @@ pub struct FieldData { pub visibility: RawVisibility, } -#[derive(Copy, Debug, Clone, PartialEq, Eq)] -pub enum ReprKind { - C, - BuiltinInt { builtin: Either, is_c: bool }, - Transparent, - Default, -} - -#[derive(Copy, Debug, Clone, PartialEq, Eq)] -pub struct ReprData { - pub kind: ReprKind, - pub packed: bool, - pub align: Option, -} - fn repr_from_value( db: &dyn DefDatabase, krate: CrateId, item_tree: &ItemTree, of: AttrOwner, -) -> Option { +) -> Option { item_tree.attrs(db, krate, of).by_key("repr").tt_values().find_map(parse_repr_tt) } -fn parse_repr_tt(tt: &Subtree) -> Option { +fn parse_repr_tt(tt: &Subtree) -> Option { match tt.delimiter { Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {} _ => return None, } - let mut data = ReprData { kind: ReprKind::Default, packed: false, align: None }; + let mut flags = ReprFlags::empty(); + let mut int = None; + let mut max_align: Option = None; + let mut min_pack: Option = None; let mut tts = tt.token_trees.iter().peekable(); while let Some(tt) = tts.next() { if let TokenTree::Leaf(Leaf::Ident(ident)) = tt { - match &*ident.text { + flags.insert(match &*ident.text { "packed" => { - data.packed = true; - if let Some(TokenTree::Subtree(_)) = tts.peek() { + let pack = if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); - } + if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { + lit.text.parse().unwrap_or_default() + } else { + 0 + } + } else { + 0 + }; + let pack = Align::from_bytes(pack).unwrap(); + min_pack = + Some(if let Some(min_pack) = min_pack { min_pack.min(pack) } else { pack }); + ReprFlags::empty() } "align" => { if let Some(TokenTree::Subtree(tt)) = tts.peek() { tts.next(); if let Some(TokenTree::Leaf(Leaf::Literal(lit))) = tt.token_trees.first() { if let Ok(align) = lit.text.parse() { - data.align = Some(align); + let align = Align::from_bytes(align).ok(); + max_align = max_align.max(align); } } } + ReprFlags::empty() } - "C" => { - if let ReprKind::BuiltinInt { is_c, .. } = &mut data.kind { - *is_c = true; - } else { - data.kind = ReprKind::C; - } - } - "transparent" => data.kind = ReprKind::Transparent, + "C" => ReprFlags::IS_C, + "transparent" => ReprFlags::IS_TRANSPARENT, repr => { - let is_c = matches!(data.kind, ReprKind::C); if let Some(builtin) = BuiltinInt::from_suffix(repr) .map(Either::Left) .or_else(|| BuiltinUint::from_suffix(repr).map(Either::Right)) { - data.kind = ReprKind::BuiltinInt { builtin, is_c }; + int = Some(match builtin { + Either::Left(bi) => match bi { + BuiltinInt::Isize => IntegerType::Pointer(true), + BuiltinInt::I8 => IntegerType::Fixed(Integer::I8, true), + BuiltinInt::I16 => IntegerType::Fixed(Integer::I16, true), + BuiltinInt::I32 => IntegerType::Fixed(Integer::I32, true), + BuiltinInt::I64 => IntegerType::Fixed(Integer::I64, true), + BuiltinInt::I128 => IntegerType::Fixed(Integer::I128, true), + }, + Either::Right(bu) => match bu { + BuiltinUint::Usize => IntegerType::Pointer(false), + BuiltinUint::U8 => IntegerType::Fixed(Integer::I8, false), + BuiltinUint::U16 => IntegerType::Fixed(Integer::I16, false), + BuiltinUint::U32 => IntegerType::Fixed(Integer::I32, false), + BuiltinUint::U64 => IntegerType::Fixed(Integer::I64, false), + BuiltinUint::U128 => IntegerType::Fixed(Integer::I128, false), + }, + }); } + ReprFlags::empty() } - } + }) } } - Some(data) + Some(ReprOptions { int, align: max_align, pack: min_pack, flags, field_shuffle_seed: 0 }) } impl StructData { @@ -157,6 +172,10 @@ impl StructData { let item_tree = loc.id.item_tree(db); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); let strukt = &item_tree[loc.id.value]; let (variant_data, diagnostics) = lower_fields( @@ -175,6 +194,7 @@ impl StructData { variant_data: Arc::new(variant_data), repr, visibility: item_tree[strukt.visibility].clone(), + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -194,6 +214,11 @@ impl StructData { let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); let cfg_options = db.crate_graph()[loc.container.krate].cfg_options.clone(); + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); + let union = &item_tree[loc.id.value]; let (variant_data, diagnostics) = lower_fields( db, @@ -211,6 +236,7 @@ impl StructData { variant_data: Arc::new(variant_data), repr, visibility: item_tree[union.visibility].clone(), + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -231,6 +257,10 @@ impl EnumData { let item_tree = loc.id.item_tree(db); let cfg_options = db.crate_graph()[krate].cfg_options.clone(); let repr = repr_from_value(db, krate, &item_tree, ModItem::from(loc.id.value).into()); + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.krate, ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); let enum_ = &item_tree[loc.id.value]; let mut variants = Arena::new(); @@ -271,6 +301,7 @@ impl EnumData { variants, repr, visibility: item_tree[enum_.visibility].clone(), + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -281,10 +312,10 @@ impl EnumData { Some(id) } - pub fn variant_body_type(&self) -> Either { + pub fn variant_body_type(&self) -> IntegerType { match self.repr { - Some(ReprData { kind: ReprKind::BuiltinInt { builtin, .. }, .. }) => builtin, - _ => Either::Left(BuiltinInt::Isize), + Some(ReprOptions { int: Some(builtin), .. }) => builtin, + _ => IntegerType::Pointer(true), } } } diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 2b39c6f8da86b..ab5d180e1bb9e 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -712,7 +712,7 @@ impl AttrSourceMap { self.source .get(ast_idx) .map(|it| InFile::new(file_id, it)) - .unwrap_or_else(|| panic!("cannot find attr at index {:?}", id)) + .unwrap_or_else(|| panic!("cannot find attr at index {id:?}")) } } diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 759f3b8c04b6c..78fbaa9d7d353 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -372,7 +372,7 @@ impl Body { /// Retrieves all ident patterns this pattern shares the ident with. pub fn ident_patterns_for<'slf>(&'slf self, pat: &'slf PatId) -> &'slf [PatId] { match self.or_pats.get(pat) { - Some(pats) => &**pats, + Some(pats) => pats, None => std::slice::from_ref(pat), } } diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index ccc01c3efca51..e8da24e3addaa 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -371,6 +371,10 @@ impl ExprCollector<'_> { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Yield { expr }, syntax_ptr) } + ast::Expr::YeetExpr(e) => { + let expr = e.expr().map(|e| self.collect_expr(e)); + self.alloc_expr(Expr::Yeet { expr }, syntax_ptr) + } ast::Expr::RecordExpr(e) => { let path = e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new); diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 162d173d52404..10b9b26bbeaa1 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -32,7 +32,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo Some(name) => name.to_string(), None => "_".to_string(), }; - format!("const {} = ", name) + format!("const {name} = ") } DefWithBodyId::VariantId(it) => { needs_semi = false; @@ -42,7 +42,7 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo Some(name) => name.to_string(), None => "_".to_string(), }; - format!("{}", name) + format!("{name}") } }; @@ -247,6 +247,15 @@ impl<'a> Printer<'a> { self.print_expr(*expr); } } + Expr::Yeet { expr } => { + w!(self, "do"); + self.whitespace(); + w!(self, "yeet"); + if let Some(expr) = expr { + self.whitespace(); + self.print_expr(*expr); + } + } Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr: _ } => { match path { Some(path) => self.print_path(path), diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index 45f64ebb06007..2617d4288a3a1 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -47,7 +47,7 @@ pub struct ScopeData { impl ExprScopes { pub(crate) fn expr_scopes_query(db: &dyn DefDatabase, def: DefWithBodyId) -> Arc { let body = db.body(def); - let mut scopes = ExprScopes::new(&*body); + let mut scopes = ExprScopes::new(&body); scopes.shrink_to_fit(); Arc::new(scopes) } diff --git a/crates/hir-def/src/builtin_attr.rs b/crates/hir-def/src/builtin_attr.rs index 39581b33a8da2..f7c1e683d0d20 100644 --- a/crates/hir-def/src/builtin_attr.rs +++ b/crates/hir-def/src/builtin_attr.rs @@ -350,6 +350,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ ), ungated!(rustc_const_unstable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk), ungated!(rustc_const_stable, Normal, template!(List: r#"feature = "name""#), DuplicatesOk), + ungated!(rustc_safe_intrinsic, Normal, template!(List: r#"feature = "name""#), DuplicatesOk), gated!( allow_internal_unstable, Normal, template!(Word, List: "feat1, feat2, ..."), DuplicatesOk, "allow_internal_unstable side-steps feature gating and stability checks", diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 9c76969086485..e6b05f27a5447 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -13,7 +13,9 @@ use crate::{ intern::Interned, item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId}, nameres::{ - attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, proc_macro::ProcMacroKind, + attr_resolution::ResolvedAttr, + diagnostics::DefDiagnostic, + proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind}, DefMap, }, type_ref::{TraitRef, TypeBound, TypeRef}, @@ -168,6 +170,7 @@ pub struct TypeAliasData { pub type_ref: Option>, pub visibility: RawVisibility, pub is_extern: bool, + pub rustc_has_incoherent_inherent_impls: bool, /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl). pub bounds: Vec>, } @@ -186,11 +189,17 @@ impl TypeAliasData { item_tree[typ.visibility].clone() }; + let rustc_has_incoherent_inherent_impls = item_tree + .attrs(db, loc.container.module(db).krate(), ModItem::from(loc.id.value).into()) + .by_key("rustc_has_incoherent_inherent_impls") + .exists(); + Arc::new(TypeAliasData { name: typ.name.clone(), type_ref: typ.type_ref.clone(), visibility, is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)), + rustc_has_incoherent_inherent_impls, bounds: typ.bounds.to_vec(), }) } @@ -202,6 +211,7 @@ pub struct TraitData { pub items: Vec<(Name, AssocItemId)>, pub is_auto: bool, pub is_unsafe: bool, + pub rustc_has_incoherent_inherent_impls: bool, pub visibility: RawVisibility, /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore /// method calls to this trait's methods when the receiver is an array and the crate edition is @@ -224,18 +234,17 @@ impl TraitData { let item_tree = tree_id.item_tree(db); let tr_def = &item_tree[tree_id.value]; let _cx = stdx::panic_context::enter(format!( - "trait_data_query({:?} -> {:?} -> {:?})", - tr, tr_loc, tr_def + "trait_data_query({tr:?} -> {tr_loc:?} -> {tr_def:?})" )); let name = tr_def.name.clone(); let is_auto = tr_def.is_auto; let is_unsafe = tr_def.is_unsafe; let visibility = item_tree[tr_def.visibility].clone(); - let skip_array_during_method_dispatch = item_tree - .attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()) - .by_key("rustc_skip_array_during_method_dispatch") - .exists(); - + let attrs = item_tree.attrs(db, module_id.krate(), ModItem::from(tree_id.value).into()); + let skip_array_during_method_dispatch = + attrs.by_key("rustc_skip_array_during_method_dispatch").exists(); + let rustc_has_incoherent_inherent_impls = + attrs.by_key("rustc_has_incoherent_inherent_impls").exists(); let (items, attribute_calls, diagnostics) = match &tr_def.items { Some(items) => { let mut collector = AssocItemCollector::new( @@ -258,6 +267,7 @@ impl TraitData { is_unsafe, visibility, skip_array_during_method_dispatch, + rustc_has_incoherent_inherent_impls, }), diagnostics.into(), ) @@ -339,6 +349,10 @@ impl ImplData { pub struct Macro2Data { pub name: Name, pub visibility: RawVisibility, + // It's a bit wasteful as currently this is only for builtin `Default` derive macro, but macro2 + // are rarely used in practice so I think it's okay for now. + /// Derive helpers, if this is a derive rustc_builtin_macro + pub helpers: Option>, } impl Macro2Data { @@ -347,9 +361,18 @@ impl Macro2Data { let item_tree = loc.id.item_tree(db); let makro = &item_tree[loc.id.value]; + let helpers = item_tree + .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into()) + .by_key("rustc_builtin_macro") + .tt_values() + .next() + .and_then(|attr| parse_macro_name_and_helper_attrs(&attr.token_trees)) + .map(|(_, helpers)| helpers); + Arc::new(Macro2Data { name: makro.name.clone(), visibility: item_tree[makro.visibility].clone(), + helpers, }) } } @@ -519,7 +542,7 @@ impl<'a> AssocItemCollector<'a> { if !attrs.is_cfg_enabled(self.expander.cfg_options()) { self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id.local_id, - InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()), + InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()), attrs.cfg().unwrap(), self.expander.cfg_options().clone(), )); @@ -528,7 +551,7 @@ impl<'a> AssocItemCollector<'a> { 'attrs: for attr in &*attrs { let ast_id = - AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()); + AstId::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()); let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id }; if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro( @@ -595,10 +618,8 @@ impl<'a> AssocItemCollector<'a> { let ast_id_map = self.db.ast_id_map(self.expander.current_file_id()); let call = ast_id_map.get(call.ast_id).to_node(&root); - let _cx = stdx::panic_context::enter(format!( - "collect_items MacroCall: {}", - call - )); + let _cx = + stdx::panic_context::enter(format!("collect_items MacroCall: {call}")); let res = self.expander.enter_expand::(self.db, call); if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res { diff --git a/crates/hir-def/src/expr.rs b/crates/hir-def/src/expr.rs index 1626465502071..7b6569421195d 100644 --- a/crates/hir-def/src/expr.rs +++ b/crates/hir-def/src/expr.rs @@ -36,6 +36,13 @@ pub(crate) fn dummy_expr_id() -> ExprId { pub type PatId = Idx; +#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +pub enum ExprOrPatId { + ExprId(ExprId), + PatId(PatId), +} +stdx::impl_from!(ExprId, PatId for ExprOrPatId); + #[derive(Debug, Clone, Eq, PartialEq)] pub struct Label { pub name: Name, @@ -137,6 +144,9 @@ pub enum Expr { Yield { expr: Option, }, + Yeet { + expr: Option, + }, RecordLit { path: Option>, fields: Box<[RecordLitField]>, @@ -313,7 +323,10 @@ impl Expr { arms.iter().map(|arm| arm.expr).for_each(f); } Expr::Continue { .. } => {} - Expr::Break { expr, .. } | Expr::Return { expr } | Expr::Yield { expr } => { + Expr::Break { expr, .. } + | Expr::Return { expr } + | Expr::Yield { expr } + | Expr::Yeet { expr } => { if let &Some(expr) = expr { f(expr); } diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index c70e6fdccdcde..ddd7ad99e9ad3 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -107,7 +107,7 @@ fn find_path_inner( } // - if the item is in the prelude, return the name from there - if let Some(value) = find_in_prelude(db, &crate_root.def_map(db), item, from) { + if let value @ Some(_) = find_in_prelude(db, &crate_root.def_map(db), &def_map, item, from) { return value; } @@ -176,7 +176,7 @@ fn find_path_for_module( // - if relative paths are fine, check if we are searching for a parent if prefixed.filter(PrefixKind::is_absolute).is_none() { - if let modpath @ Some(_) = find_self_super(&def_map, module_id, from) { + if let modpath @ Some(_) = find_self_super(def_map, module_id, from) { return modpath; } } @@ -205,7 +205,8 @@ fn find_path_for_module( } } - if let Some(value) = find_in_prelude(db, &root_def_map, ItemInNs::Types(module_id.into()), from) + if let value @ Some(_) = + find_in_prelude(db, &root_def_map, &def_map, ItemInNs::Types(module_id.into()), from) { return value; } @@ -234,23 +235,41 @@ fn find_in_scope( }) } +/// Returns single-segment path (i.e. without any prefix) if `item` is found in prelude and its +/// name doesn't clash in current scope. fn find_in_prelude( db: &dyn DefDatabase, root_def_map: &DefMap, + local_def_map: &DefMap, item: ItemInNs, from: ModuleId, -) -> Option> { - if let Some(prelude_module) = root_def_map.prelude() { - // Preludes in block DefMaps are ignored, only the crate DefMap is searched - let prelude_def_map = prelude_module.def_map(db); - let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; - if let Some((name, vis)) = prelude_scope.name_of(item) { - if vis.is_visible_from(db, from) { - return Some(Some(ModPath::from_segments(PathKind::Plain, Some(name.clone())))); - } - } +) -> Option { + let prelude_module = root_def_map.prelude()?; + // Preludes in block DefMaps are ignored, only the crate DefMap is searched + let prelude_def_map = prelude_module.def_map(db); + let prelude_scope = &prelude_def_map[prelude_module.local_id].scope; + let (name, vis) = prelude_scope.name_of(item)?; + if !vis.is_visible_from(db, from) { + return None; + } + + // Check if the name is in current scope and it points to the same def. + let found_and_same_def = + local_def_map.with_ancestor_maps(db, from.local_id, &mut |def_map, local_id| { + let per_ns = def_map[local_id].scope.get(name); + let same_def = match item { + ItemInNs::Types(it) => per_ns.take_types()? == it, + ItemInNs::Values(it) => per_ns.take_values()? == it, + ItemInNs::Macros(it) => per_ns.take_macros()? == it, + }; + Some(same_def) + }); + + if found_and_same_def.unwrap_or(true) { + Some(ModPath::from_segments(PathKind::Plain, Some(name.clone()))) + } else { + None } - None } fn find_self_super(def_map: &DefMap, item: ModuleId, from: ModuleId) -> Option { @@ -512,7 +531,7 @@ mod tests { fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option) { let (db, pos) = TestDB::with_position(ra_fixture); let module = db.module_at_position(pos); - let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path)); + let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let ast_path = parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap(); @@ -531,7 +550,7 @@ mod tests { let found_path = find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind, false); - assert_eq!(found_path, Some(mod_path), "{:?}", prefix_kind); + assert_eq!(found_path, Some(mod_path), "{prefix_kind:?}"); } fn check_found_path( @@ -808,6 +827,48 @@ pub mod prelude { ); } + #[test] + fn shadowed_prelude() { + check_found_path( + r#" +//- /main.rs crate:main deps:std +struct S; +$0 +//- /std.rs crate:std +pub mod prelude { + pub mod rust_2018 { + pub struct S; + } +} +"#, + "std::prelude::rust_2018::S", + "std::prelude::rust_2018::S", + "std::prelude::rust_2018::S", + "std::prelude::rust_2018::S", + ); + } + + #[test] + fn imported_prelude() { + check_found_path( + r#" +//- /main.rs crate:main deps:std +use S; +$0 +//- /std.rs crate:std +pub mod prelude { + pub mod rust_2018 { + pub struct S; + } +} +"#, + "S", + "S", + "S", + "S", + ); + } + #[test] fn enum_variant_from_prelude() { let code = r#" diff --git a/crates/hir-def/src/generics.rs b/crates/hir-def/src/generics.rs index 469b28c2d9ede..f74559f5d6634 100644 --- a/crates/hir-def/src/generics.rs +++ b/crates/hir-def/src/generics.rs @@ -142,8 +142,8 @@ pub enum WherePredicateTypeTarget { impl GenericParams { /// Iterator of type_or_consts field - pub fn iter<'a>( - &'a self, + pub fn iter( + &self, ) -> impl DoubleEndedIterator, &TypeOrConstParamData)> { self.type_or_consts.iter() } diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 688055e430bdf..1ce191942ec0d 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -239,7 +239,7 @@ impl fmt::Debug for ImportMap { ItemInNs::Values(_) => "v", ItemInNs::Macros(_) => "m", }; - format!("- {} ({})", info.path, ns) + format!("- {} ({ns})", info.path) }) .collect(); @@ -389,12 +389,12 @@ impl Query { /// Searches dependencies of `krate` for an importable path matching `query`. /// /// This returns a list of items that could be imported from dependencies of `krate`. -pub fn search_dependencies<'a>( - db: &'a dyn DefDatabase, +pub fn search_dependencies( + db: &dyn DefDatabase, krate: CrateId, query: Query, ) -> FxHashSet { - let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query)); + let _p = profile::span("search_dependencies").detail(|| format!("{query:?}")); let graph = db.crate_graph(); let import_maps: Vec<_> = @@ -545,7 +545,7 @@ mod tests { None } })?; - return Some(format!("{}::{}", dependency_imports.path_of(trait_)?, assoc_item_name)); + return Some(format!("{}::{assoc_item_name}", dependency_imports.path_of(trait_)?)); } None } @@ -585,7 +585,7 @@ mod tests { let map = db.import_map(krate); - Some(format!("{}:\n{:?}\n", name, map)) + Some(format!("{name}:\n{map:?}\n")) }) .sorted() .collect::(); diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs index 7721221c44475..c7b213b7e9814 100644 --- a/crates/hir-def/src/item_scope.rs +++ b/crates/hir-def/src/item_scope.rs @@ -96,7 +96,7 @@ pub(crate) enum BuiltinShadowMode { /// Legacy macros can only be accessed through special methods like `get_legacy_macros`. /// Other methods will only resolve values, types and module scoped macros only. impl ItemScope { - pub fn entries<'a>(&'a self) -> impl Iterator + 'a { + pub fn entries(&self) -> impl Iterator + '_ { // FIXME: shadowing self.types .keys() @@ -159,18 +159,17 @@ impl ItemScope { pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { let (def, mut iter) = match item { ItemInNs::Macros(def) => { - return self - .macros - .iter() - .find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis))); + return self.macros.iter().find_map(|(name, &(other_def, vis))| { + (other_def == def).then_some((name, vis)) + }); } ItemInNs::Types(def) => (def, self.types.iter()), ItemInNs::Values(def) => (def, self.values.iter()), }; - iter.find_map(|(name, &(other_def, vis))| (other_def == def).then(|| (name, vis))) + iter.find_map(|(name, &(other_def, vis))| (other_def == def).then_some((name, vis))) } - pub(crate) fn traits<'a>(&'a self) -> impl Iterator + 'a { + pub(crate) fn traits(&self) -> impl Iterator + '_ { self.types .values() .filter_map(|&(def, _)| match def { @@ -327,7 +326,7 @@ impl ItemScope { changed } - pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator, PerNs)> + 'a { + pub(crate) fn resolutions(&self) -> impl Iterator, PerNs)> + '_ { self.entries().map(|(name, res)| (Some(name.clone()), res)).chain( self.unnamed_trait_imports .iter() diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 0aa531eff71f6..80297f8adf16e 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -105,7 +105,7 @@ pub struct ItemTree { impl ItemTree { pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc { - let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id)); + let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}")); let syntax = match db.parse_or_expand(file_id) { Some(node) => node, None => return Default::default(), @@ -132,7 +132,7 @@ impl ItemTree { ctx.lower_macro_stmts(stmts) }, _ => { - panic!("cannot create item tree from {:?} {}", syntax, syntax); + panic!("cannot create item tree from {syntax:?} {syntax}"); }, } }; diff --git a/crates/hir-def/src/layout.rs b/crates/hir-def/src/layout.rs new file mode 100644 index 0000000000000..6bb4cd94f8a07 --- /dev/null +++ b/crates/hir-def/src/layout.rs @@ -0,0 +1,96 @@ +//! Definitions needed for computing data layout of types. + +use std::cmp; + +use la_arena::{Idx, RawIdx}; +pub use rustc_abi::{ + Abi, AbiAndPrefAlign, AddressSpace, Align, Endian, FieldsShape, Integer, IntegerType, + LayoutCalculator, Niche, Primitive, ReprFlags, ReprOptions, Scalar, Size, StructKind, + TargetDataLayout, TargetDataLayoutErrors, WrappingRange, +}; + +use crate::LocalEnumVariantId; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct RustcEnumVariantIdx(pub LocalEnumVariantId); + +impl rustc_index::vec::Idx for RustcEnumVariantIdx { + fn new(idx: usize) -> Self { + RustcEnumVariantIdx(Idx::from_raw(RawIdx::from(idx as u32))) + } + + fn index(self) -> usize { + u32::from(self.0.into_raw()) as usize + } +} + +pub type Layout = rustc_abi::LayoutS; +pub type TagEncoding = rustc_abi::TagEncoding; +pub type Variants = rustc_abi::Variants; + +pub trait IntegerExt { + fn repr_discr( + dl: &TargetDataLayout, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> Result<(Integer, bool), LayoutError>; +} + +impl IntegerExt for Integer { + /// Finds the appropriate Integer type and signedness for the given + /// signed discriminant range and `#[repr]` attribute. + /// N.B.: `u128` values above `i128::MAX` will be treated as signed, but + /// that shouldn't affect anything, other than maybe debuginfo. + fn repr_discr( + dl: &TargetDataLayout, + repr: &ReprOptions, + min: i128, + max: i128, + ) -> Result<(Integer, bool), LayoutError> { + // Theoretically, negative values could be larger in unsigned representation + // than the unsigned representation of the signed minimum. However, if there + // are any negative values, the only valid unsigned representation is u128 + // which can fit all i128 values, so the result remains unaffected. + let unsigned_fit = Integer::fit_unsigned(cmp::max(min as u128, max as u128)); + let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max)); + + if let Some(ity) = repr.int { + let discr = Integer::from_attr(dl, ity); + let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; + if discr < fit { + return Err(LayoutError::UserError( + "Integer::repr_discr: `#[repr]` hint too small for \ + discriminant range of enum " + .to_string(), + )); + } + return Ok((discr, ity.is_signed())); + } + + let at_least = if repr.c() { + // This is usually I32, however it can be different on some platforms, + // notably hexagon and arm-none/thumb-none + dl.c_enum_min_size + } else { + // repr(Rust) enums try to be as small as possible + Integer::I8 + }; + + // If there are no negative values, we can use the unsigned fit. + Ok(if min >= 0 { + (cmp::max(unsigned_fit, at_least), false) + } else { + (cmp::max(signed_fit, at_least), true) + }) + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub enum LayoutError { + UserError(String), + SizeOverflow, + HasPlaceholder, + NotImplemented, + Unknown, +} diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 5c7aa72349f6e..8267ef09cb0a2 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -34,6 +34,7 @@ pub mod adt; pub mod data; pub mod generics; pub mod lang_item; +pub mod layout; pub mod expr; pub mod body; diff --git a/crates/hir-def/src/macro_expansion_tests.rs b/crates/hir-def/src/macro_expansion_tests.rs index 81b9c5c4bfaf9..79c85d118316a 100644 --- a/crates/hir-def/src/macro_expansion_tests.rs +++ b/crates/hir-def/src/macro_expansion_tests.rs @@ -170,7 +170,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream } let pp = pretty_print_macro_expansion( parse.syntax_node(), - show_token_ids.then(|| &*token_map), + show_token_ids.then_some(&*token_map), ); let indent = IndentLevel::from_node(call.syntax()); let pp = reindent(indent, pp); @@ -179,7 +179,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream if tree { let tree = format!("{:#?}", parse.syntax_node()) .split_inclusive('\n') - .map(|line| format!("// {}", line)) + .map(|line| format!("// {line}")) .collect::(); format_to!(expn_text, "\n{}", tree) } diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index c04cd1651926d..bb45266725c6b 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -163,7 +163,8 @@ macro_rules! compile_error { } // This expands to nothing (since it's in item position), but emits an error. -compile_error!("error!"); +compile_error!("error, with an escaped quote: \""); +compile_error!(r"this is a raw string"); "#, expect![[r##" #[rustc_builtin_macro] @@ -172,7 +173,8 @@ macro_rules! compile_error { ($msg:expr,) => ({ /* compiler built-in */ }) } -/* error: error! */ +/* error: error, with an escaped quote: " */ +/* error: this is a raw string */ "##]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 457e43925c635..2d5f2a692e5da 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -1630,3 +1630,48 @@ const _: i32 = -0--1--2; "#]], ); } + +#[test] +fn test_punct_without_space() { + // Puncts are "glued" greedily. + check( + r#" +macro_rules! foo { + (: : :) => { "1 1 1" }; + (: ::) => { "1 2" }; + (:: :) => { "2 1" }; + + (: : : :) => { "1 1 1 1" }; + (:: : :) => { "2 1 1" }; + (: :: :) => { "1 2 1" }; + (: : ::) => { "1 1 2" }; + (:: ::) => { "2 2" }; +} + +fn test() { + foo!(:::); + foo!(: :::); + foo!(::::); +} +"#, + expect![[r#" +macro_rules! foo { + (: : :) => { "1 1 1" }; + (: ::) => { "1 2" }; + (:: :) => { "2 1" }; + + (: : : :) => { "1 1 1 1" }; + (:: : :) => { "2 1 1" }; + (: :: :) => { "1 2 1" }; + (: : ::) => { "1 1 2" }; + (:: ::) => { "2 2" }; +} + +fn test() { + "2 1"; + "1 2 1"; + "2 2"; +} +"#]], + ); +} diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index fc90c6e9f370f..26f16542cbb26 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -136,3 +136,52 @@ macro_rules! m { ($($i:ident)? $vis:vis) => () } "#]], ) } + +// For this test and the one below, see rust-lang/rust#86730. +#[test] +fn expr_dont_match_let_expr() { + check( + r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + foo!(let a = 3); +} +"#, + expect![[r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + /* error: no rule matches input tokens */missing; +} +"#]], + ); +} + +#[test] +fn expr_dont_match_inline_const() { + check( + r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + foo!(const { 3 }); +} +"#, + expect![[r#" +macro_rules! foo { + ($e:expr) => { $e } +} + +fn test() { + /* error: no rule matches input tokens */missing; +} +"#]], + ); +} diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 9b4ce9f97c86f..f42b0079d7697 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -457,7 +457,7 @@ impl DefMap { for (name, child) in map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0)) { - let path = format!("{}::{}", path, name); + let path = format!("{path}::{name}"); buf.push('\n'); go(buf, map, &path, *child); } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index b0dd01f9dbea2..160203b778344 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -40,7 +40,7 @@ use crate::{ diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, - proc_macro::{ProcMacroDef, ProcMacroKind}, + proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroDef, ProcMacroKind}, BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode, }, path::{ImportAlias, ModPath, PathKind}, @@ -67,7 +67,7 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T let dep_def_map = db.crate_def_map(dep.crate_id); let dep_root = dep_def_map.module_id(dep_def_map.root); - deps.insert(dep.as_name(), dep_root.into()); + deps.insert(dep.as_name(), dep_root); if dep.is_prelude() && !tree_id.is_block() { def_map.extern_prelude.insert(dep.as_name(), dep_root); @@ -1017,7 +1017,7 @@ impl DefCollector<'_> { None => true, Some(old_vis) => { let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| { - panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr); + panic!("`Tr as _` imports with unrelated visibilities {old_vis:?} and {vis:?} (trait {tr:?})"); }); if max_vis == old_vis { @@ -1094,7 +1094,7 @@ impl DefCollector<'_> { ast_id, *expand_to, self.def_map.krate, - &resolver_def_id, + resolver_def_id, &mut |_err| (), ); if let Ok(Ok(call_id)) = call_id { @@ -1110,7 +1110,7 @@ impl DefCollector<'_> { *derive_attr, *derive_pos as u32, self.def_map.krate, - &resolver, + resolver, ); if let Ok((macro_id, def_id, call_id)) = id { @@ -1345,7 +1345,7 @@ impl DefCollector<'_> { // Missing proc macros are non-fatal, so they are handled specially. DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate) } - _ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()), + _ => DefDiagnostic::macro_error(module_id, loc.kind, err.to_string()), }; self.def_map.diagnostics.push(diag); @@ -2005,6 +2005,7 @@ impl ModCollector<'_, '_> { let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast()); // Case 1: builtin macros + let mut helpers_opt = None; let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into()); let expander = if attrs.by_key("rustc_builtin_macro").exists() { if let Some(expander) = find_builtin_macro(&mac.name) { @@ -2013,6 +2014,25 @@ impl ModCollector<'_, '_> { Either::Right(it) => MacroExpander::BuiltInEager(it), } } else if let Some(expander) = find_builtin_derive(&mac.name) { + if let Some(attr) = attrs.by_key("rustc_builtin_macro").tt_values().next() { + // NOTE: The item *may* have both `#[rustc_builtin_macro]` and `#[proc_macro_derive]`, + // in which case rustc ignores the helper attributes from the latter, but it + // "doesn't make sense in practice" (see rust-lang/rust#87027). + if let Some((name, helpers)) = + parse_macro_name_and_helper_attrs(&attr.token_trees) + { + // NOTE: rustc overrides the name if the macro name if it's different from the + // macro name, but we assume it isn't as there's no such case yet. FIXME if + // the following assertion fails. + stdx::always!( + name == mac.name, + "built-in macro {} has #[rustc_builtin_macro] which declares different name {}", + mac.name, + name + ); + helpers_opt = Some(helpers); + } + } MacroExpander::BuiltInDerive(expander) } else if let Some(expander) = find_builtin_attr(&mac.name) { MacroExpander::BuiltInAttr(expander) @@ -2037,6 +2057,12 @@ impl ModCollector<'_, '_> { macro_id, &self.item_tree[mac.visibility], ); + if let Some(helpers) = helpers_opt { + self.def_collector + .def_map + .exported_derives + .insert(macro_id_to_def_id(self.def_collector.db, macro_id.into()), helpers); + } } fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) { @@ -2059,7 +2085,7 @@ impl ModCollector<'_, '_> { .scope .get_legacy_macro(name) .and_then(|it| it.last()) - .map(|&it| macro_id_to_def_id(self.def_collector.db, it.into())) + .map(|&it| macro_id_to_def_id(self.def_collector.db, it)) }, ) }) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index ca7bcc814e8f8..4c263846d27d2 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -34,7 +34,7 @@ impl ModDir { let path = match attr_path.map(SmolStr::as_str) { None => { let mut path = self.dir_path.clone(); - path.push(&name.to_smol_str()); + path.push(&name.unescaped().to_smol_str()); path } Some(attr_path) => { @@ -74,12 +74,12 @@ impl ModDir { candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner)) } None if file_id.is_include_macro(db.upcast()) => { - candidate_files.push(format!("{}.rs", name)); - candidate_files.push(format!("{}/mod.rs", name)); + candidate_files.push(format!("{name}.rs")); + candidate_files.push(format!("{name}/mod.rs")); } None => { - candidate_files.push(format!("{}{}.rs", self.dir_path.0, name)); - candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name)); + candidate_files.push(format!("{}{name}.rs", self.dir_path.0)); + candidate_files.push(format!("{}{name}/mod.rs", self.dir_path.0)); } }; @@ -91,7 +91,7 @@ impl ModDir { let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() { (DirPath::empty(), false) } else { - (DirPath::new(format!("{}/", name)), true) + (DirPath::new(format!("{name}/")), true) }; if let Some(mod_dir) = self.child(dir_path, root_non_dir_owner) { return Ok((file_id, is_mod_rs, mod_dir)); @@ -156,7 +156,7 @@ impl DirPath { } else { attr }; - let res = format!("{}{}", base, attr); + let res = format!("{base}{attr}"); res } } diff --git a/crates/hir-def/src/nameres/path_resolution.rs b/crates/hir-def/src/nameres/path_resolution.rs index 20d39ec6cb92e..1d9d5cccded23 100644 --- a/crates/hir-def/src/nameres/path_resolution.rs +++ b/crates/hir-def/src/nameres/path_resolution.rs @@ -170,8 +170,8 @@ impl DefMap { ) -> ResolvePathResult { let graph = db.crate_graph(); let _cx = stdx::panic_context::enter(format!( - "DefMap {:?} crate_name={:?} block={:?} path={}", - self.krate, graph[self.krate].display_name, self.block, path + "DefMap {:?} crate_name={:?} block={:?} path={path}", + self.krate, graph[self.krate].display_name, self.block )); let mut segments = path.segments().iter().enumerate(); @@ -390,7 +390,7 @@ impl DefMap { .get_legacy_macro(name) // FIXME: shadowing .and_then(|it| it.last()) - .map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public)); + .map_or_else(PerNs::none, |&m| PerNs::macros(m, Visibility::Public)); let from_scope = self[module].scope.get(name); let from_builtin = match self.block { Some(_) => { diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index 52b79cd0fdda2..06b23392cfe46 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -37,45 +37,53 @@ impl Attrs { Some(ProcMacroDef { name: func_name.clone(), kind: ProcMacroKind::Attr }) } else if self.by_key("proc_macro_derive").exists() { let derive = self.by_key("proc_macro_derive").tt_values().next()?; + let def = parse_macro_name_and_helper_attrs(&derive.token_trees) + .map(|(name, helpers)| ProcMacroDef { name, kind: ProcMacroKind::CustomDerive { helpers } }); - match &*derive.token_trees { - // `#[proc_macro_derive(Trait)]` - [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some(ProcMacroDef { - name: trait_name.as_name(), - kind: ProcMacroKind::CustomDerive { helpers: Box::new([]) }, - }), - - // `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]` - [ - TokenTree::Leaf(Leaf::Ident(trait_name)), - TokenTree::Leaf(Leaf::Punct(comma)), - TokenTree::Leaf(Leaf::Ident(attributes)), - TokenTree::Subtree(helpers) - ] if comma.char == ',' && attributes.text == "attributes" => - { - let helpers = helpers.token_trees.iter() - .filter(|tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ',')) - .map(|tt| { - match tt { - TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()), - _ => None - } - }) - .collect::>>()?; - - Some(ProcMacroDef { - name: trait_name.as_name(), - kind: ProcMacroKind::CustomDerive { helpers }, - }) - } - - _ => { - tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive); - None - } + if def.is_none() { + tracing::trace!("malformed `#[proc_macro_derive]`: {}", derive); } + + def } else { None } } } + +// This fn is intended for `#[proc_macro_derive(..)]` and `#[rustc_builtin_macro(..)]`, which have +// the same strucuture. +#[rustfmt::skip] +pub(crate) fn parse_macro_name_and_helper_attrs(tt: &[TokenTree]) -> Option<(Name, Box<[Name]>)> { + match tt { + // `#[proc_macro_derive(Trait)]` + // `#[rustc_builtin_macro(Trait)]` + [TokenTree::Leaf(Leaf::Ident(trait_name))] => Some((trait_name.as_name(), Box::new([]))), + + // `#[proc_macro_derive(Trait, attributes(helper1, helper2, ...))]` + // `#[rustc_builtin_macro(Trait, attributes(helper1, helper2, ...))]` + [ + TokenTree::Leaf(Leaf::Ident(trait_name)), + TokenTree::Leaf(Leaf::Punct(comma)), + TokenTree::Leaf(Leaf::Ident(attributes)), + TokenTree::Subtree(helpers) + ] if comma.char == ',' && attributes.text == "attributes" => + { + let helpers = helpers + .token_trees + .iter() + .filter( + |tt| !matches!(tt, TokenTree::Leaf(Leaf::Punct(comma)) if comma.char == ','), + ) + .map(|tt| match tt { + TokenTree::Leaf(Leaf::Ident(helper)) => Some(helper.as_name()), + _ => None, + }) + .collect::>>()?; + + Some((trait_name.as_name(), helpers)) + } + + _ => None, + } +} diff --git a/crates/hir-def/src/nameres/tests/incremental.rs b/crates/hir-def/src/nameres/tests/incremental.rs index 2e8cb3621fce6..f5190b76db058 100644 --- a/crates/hir-def/src/nameres/tests/incremental.rs +++ b/crates/hir-def/src/nameres/tests/incremental.rs @@ -13,7 +13,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: let events = db.log_executed(|| { db.crate_def_map(krate); }); - assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") } db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string())); @@ -21,7 +21,7 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: let events = db.log_executed(|| { db.crate_def_map(krate); }); - assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") } } @@ -94,7 +94,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); assert_eq!(module_data.scope.resolutions().count(), 1); }); - assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}") } db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string())); @@ -104,7 +104,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); assert_eq!(module_data.scope.resolutions().count(), 1); }); - assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) + assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") } } diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs index 3ece1379ad774..fe0ad4f3863c4 100644 --- a/crates/hir-def/src/nameres/tests/macros.rs +++ b/crates/hir-def/src/nameres/tests/macros.rs @@ -822,6 +822,28 @@ fn derive() {} ); } +#[test] +fn resolves_derive_helper_rustc_builtin_macro() { + cov_mark::check!(resolved_derive_helper); + // This is NOT the correct usage of `default` helper attribute, but we don't resolve helper + // attributes on non mod items in hir nameres. + check( + r#" +//- minicore: derive, default +#[derive(Default)] +#[default] +enum E { + A, + B, +} +"#, + expect![[r#" + crate + E: t + "#]], + ); +} + #[test] fn unresolved_attr_with_cfg_attr_hang() { // Another regression test for https://github.com/rust-lang/rust-analyzer/issues/8905 diff --git a/crates/hir-def/src/nameres/tests/mod_resolution.rs b/crates/hir-def/src/nameres/tests/mod_resolution.rs index c575bf7cac255..a019312884787 100644 --- a/crates/hir-def/src/nameres/tests/mod_resolution.rs +++ b/crates/hir-def/src/nameres/tests/mod_resolution.rs @@ -156,6 +156,43 @@ pub struct Baz; ); } +#[test] +fn module_resolution_works_for_inline_raw_modules() { + check( + r#" +//- /lib.rs +mod r#async { + pub mod a; + pub mod r#async; +} +use self::r#async::a::Foo; +use self::r#async::r#async::Bar; + +//- /async/a.rs +pub struct Foo; + +//- /async/async.rs +pub struct Bar; +"#, + expect![[r#" + crate + Bar: t v + Foo: t v + r#async: t + + crate::r#async + a: t + r#async: t + + crate::r#async::a + Foo: t v + + crate::r#async::r#async + Bar: t v + "#]], + ); +} + #[test] fn module_resolution_decl_path() { check( diff --git a/crates/hir-def/src/pretty.rs b/crates/hir-def/src/pretty.rs index 933970d10e472..befd0c5ffa055 100644 --- a/crates/hir-def/src/pretty.rs +++ b/crates/hir-def/src/pretty.rs @@ -92,7 +92,7 @@ pub(crate) fn print_generic_args(generics: &GenericArgs, buf: &mut dyn Write) -> pub(crate) fn print_generic_arg(arg: &GenericArg, buf: &mut dyn Write) -> fmt::Result { match arg { GenericArg::Type(ty) => print_type_ref(ty, buf), - GenericArg::Const(c) => write!(buf, "{}", c), + GenericArg::Const(c) => write!(buf, "{c}"), GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name), } } @@ -118,7 +118,7 @@ pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Re Mutability::Shared => "*const", Mutability::Mut => "*mut", }; - write!(buf, "{} ", mtbl)?; + write!(buf, "{mtbl} ")?; print_type_ref(pointee, buf)?; } TypeRef::Reference(pointee, lt, mtbl) => { @@ -130,13 +130,13 @@ pub(crate) fn print_type_ref(type_ref: &TypeRef, buf: &mut dyn Write) -> fmt::Re if let Some(lt) = lt { write!(buf, "{} ", lt.name)?; } - write!(buf, "{}", mtbl)?; + write!(buf, "{mtbl}")?; print_type_ref(pointee, buf)?; } TypeRef::Array(elem, len) => { write!(buf, "[")?; print_type_ref(elem, buf)?; - write!(buf, "; {}]", len)?; + write!(buf, "; {len}]")?; } TypeRef::Slice(elem) => { write!(buf, "[")?; diff --git a/crates/hir-def/src/resolver.rs b/crates/hir-def/src/resolver.rs index 070f6837133a3..1ef7f9577fe8b 100644 --- a/crates/hir-def/src/resolver.rs +++ b/crates/hir-def/src/resolver.rs @@ -381,7 +381,7 @@ impl Resolver { }); def_map[module_id].scope.legacy_macros().for_each(|(name, macs)| { macs.iter().for_each(|&mac| { - res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac)))); + res.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))); }) }); def_map.extern_prelude().for_each(|(name, &def)| { @@ -517,10 +517,7 @@ impl Scope { }); m.def_map[m.module_id].scope.legacy_macros().for_each(|(name, macs)| { macs.iter().for_each(|&mac| { - acc.add( - name, - ScopeDef::ModuleDef(ModuleDefId::MacroId(MacroId::from(mac))), - ); + acc.add(name, ScopeDef::ModuleDef(ModuleDefId::MacroId(mac))); }) }); } diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 0c886ac4da9db..58d192f9fe008 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -115,7 +115,8 @@ pub fn pseudo_derive_attr_expansion( }; let mut token_trees = Vec::new(); - for tt in (&args.token_trees) + for tt in args + .token_trees .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))) { token_trees.push(mk_leaf('#')); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 7b19518e25a84..5522bdf3b3fe2 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -379,15 +379,10 @@ fn compile_error_expand( tt: &tt::Subtree, ) -> ExpandResult { let err = match &*tt.token_trees { - [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => { - let text = it.text.as_str(); - if text.starts_with('"') && text.ends_with('"') { - // FIXME: does not handle raw strings - ExpandError::Other(text[1..text.len() - 1].into()) - } else { - ExpandError::Other("`compile_error!` argument must be a string".into()) - } - } + [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { + Some(unquoted) => ExpandError::Other(unquoted.into()), + None => ExpandError::Other("`compile_error!` argument must be a string".into()), + }, _ => ExpandError::Other("`compile_error!` argument must be a string".into()), }; @@ -454,7 +449,7 @@ fn concat_bytes_expand( match token.kind() { syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), syntax::SyntaxKind::BYTE_STRING => { - let components = unquote_byte_string(lit).unwrap_or_else(Vec::new); + let components = unquote_byte_string(lit).unwrap_or_default(); components.into_iter().for_each(|x| bytes.push(x.to_string())); } _ => { @@ -676,7 +671,7 @@ fn option_env_expand( let expanded = match get_env_inner(db, arg_id, &key) { None => quote! { #DOLLAR_CRATE::option::Option::None::<&str> }, - Some(s) => quote! { #DOLLAR_CRATE::option::Some(#s) }, + Some(s) => quote! { #DOLLAR_CRATE::option::Option::Some(#s) }, }; ExpandResult::ok(ExpandedEager::new(expanded)) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 87e4db03984ab..b28e60187deff 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -240,7 +240,7 @@ fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc { } fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option { - match file_id.0 { + match file_id.repr() { HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()), HirFileIdRepr::MacroFile(macro_file) => { // FIXME: Note how we convert from `Parse` to `SyntaxNode` here, @@ -444,7 +444,7 @@ fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult { return ExpandResult::only_err(ExpandError::Other( - format!("invalid macro definition: {}", err).into(), + format!("invalid macro definition: {err}").into(), )) } }; diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index 5fd099aea7d64..a1474c44e6c6f 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -161,7 +161,7 @@ pub fn expand_eager_macro( Ok(Ok(db.intern_macro_call(loc))) } else { - panic!("called `expand_eager_macro` on non-eager macro def {:?}", def); + panic!("called `expand_eager_macro` on non-eager macro def {def:?}"); } } @@ -208,7 +208,7 @@ fn eager_macro_recur( // Collect replacement for child in children { let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { - Some(path) => macro_resolver(path.clone()).ok_or_else(|| UnresolvedMacro { path })?, + Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?, None => { diagnostic_sink(ExpandError::Other("malformed macro invocation".into())); continue; diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index a4abe75626e6d..75d364d5f846b 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -366,7 +366,7 @@ mod tests { fixups.append, ); - let actual = format!("{}\n", tt); + let actual = format!("{tt}\n"); expect.indent(false); expect.assert_eq(&actual); diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index d60734372c0ce..df1e20256ca31 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -17,7 +17,7 @@ use crate::{ db::{self, AstDatabase}, fixup, name::{AsName, Name}, - HirFileId, HirFileIdRepr, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, + HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile, }; #[derive(Clone, Debug)] @@ -216,9 +216,9 @@ fn make_hygiene_info( impl HygieneFrame { pub(crate) fn new(db: &dyn AstDatabase, file_id: HirFileId) -> HygieneFrame { - let (info, krate, local_inner) = match file_id.0 { - HirFileIdRepr::FileId(_) => (None, None, false), - HirFileIdRepr::MacroFile(macro_file) => { + let (info, krate, local_inner) = match file_id.macro_file() { + None => (None, None, false), + Some(macro_file) => { let loc = db.lookup_intern_macro_call(macro_file.macro_call_id); let info = make_hygiene_info(db, macro_file, &loc).map(|info| (loc.kind.file_id(), info)); diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 7352b003a491c..bc5f9f3b8afd4 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -23,7 +23,11 @@ pub use mbe::{Origin, ValueResult}; use std::{fmt, hash::Hash, iter, sync::Arc}; -use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind}; +use base_db::{ + impl_intern_key, + salsa::{self, InternId}, + CrateId, FileId, FileRange, ProcMacroKind, +}; use either::Either; use syntax::{ algo::{self, skip_trivia_token}, @@ -79,26 +83,12 @@ impl fmt::Display for ExpandError { /// finite (because everything bottoms out at the real `FileId`) and small /// (`MacroCallId` uses the location interning. You can check details here: /// ). +/// +/// The two variants are encoded in a single u32 which are differentiated by the MSB. +/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a +/// `MacroCallId`. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct HirFileId(HirFileIdRepr); - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -enum HirFileIdRepr { - FileId(FileId), - MacroFile(MacroFile), -} - -impl From for HirFileId { - fn from(id: FileId) -> Self { - HirFileId(HirFileIdRepr::FileId(id)) - } -} - -impl From for HirFileId { - fn from(id: MacroFile) -> Self { - HirFileId(HirFileIdRepr::MacroFile(id)) - } -} +pub struct HirFileId(u32); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct MacroFile { @@ -172,13 +162,37 @@ pub enum MacroCallKind { }, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +enum HirFileIdRepr { + FileId(FileId), + MacroFile(MacroFile), +} + +impl From for HirFileId { + fn from(FileId(id): FileId) -> Self { + assert!(id < Self::MAX_FILE_ID); + HirFileId(id) + } +} + +impl From for HirFileId { + fn from(MacroFile { macro_call_id: MacroCallId(id) }: MacroFile) -> Self { + let id = id.as_u32(); + assert!(id < Self::MAX_FILE_ID); + HirFileId(id | Self::MACRO_FILE_TAG_MASK) + } +} + impl HirFileId { + const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK; + const MACRO_FILE_TAG_MASK: u32 = 1 << 31; + /// For macro-expansion files, returns the file original source file the /// expansion originated from. pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId { let mut file_id = self; loop { - match file_id.0 { + match file_id.repr() { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); @@ -194,7 +208,7 @@ impl HirFileId { pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 { let mut level = 0; let mut curr = self; - while let HirFileIdRepr::MacroFile(macro_file) = curr.0 { + while let Some(macro_file) = curr.macro_file() { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); level += 1; @@ -205,25 +219,17 @@ impl HirFileId { /// If this is a macro call, returns the syntax node of the call. pub fn call_node(self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - Some(loc.kind.to_node(db)) - } - } + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + Some(loc.kind.to_node(db)) } /// If this is a macro call, returns the syntax node of the very first macro call this file resides in. pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> { - let mut call = match self.0 { - HirFileIdRepr::FileId(_) => return None, - HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { - db.lookup_intern_macro_call(macro_call_id).kind.to_node(db) - } - }; + let mut call = + db.lookup_intern_macro_call(self.macro_file()?.macro_call_id).kind.to_node(db); loop { - match call.file_id.0 { + match call.file_id.repr() { HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)), HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db); @@ -234,84 +240,74 @@ impl HirFileId { /// Return expansion information if it is a macro-expansion file pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let arg_tt = loc.kind.arg(db)?; + let arg_tt = loc.kind.arg(db)?; - let macro_def = db.macro_def(loc.def).ok()?; - let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; - let macro_arg = db.macro_arg(macro_file.macro_call_id)?; + let macro_def = db.macro_def(loc.def).ok()?; + let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?; + let macro_arg = db.macro_arg(macro_file.macro_call_id)?; - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(_) - if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => - { - return None - } - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) - .and_then(Either::left)? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - Some(ExpansionInfo { - expanded: InFile::new(self, parse.syntax_node()), - arg: InFile::new(loc.kind.file_id(), arg_tt), - attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), - macro_arg, - macro_def, - exp_map, - }) + let def = loc.def.ast_id().left().and_then(|id| { + let def_tt = match id.to_node(db) { + ast::Macro::MacroRules(mac) => mac.token_tree()?, + ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => { + return None + } + ast::Macro::MacroDef(mac) => mac.body()?, + }; + Some(InFile::new(id.file_id, def_tt)) + }); + let attr_input_or_mac_def = def.or_else(|| match loc.kind { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + let tt = ast_id + .to_node(db) + .doc_comments_and_attrs() + .nth(invoc_attr_index as usize) + .and_then(Either::left)? + .token_tree()?; + Some(InFile::new(ast_id.file_id, tt)) } - } + _ => None, + }); + + Some(ExpansionInfo { + expanded: InFile::new(self, parse.syntax_node()), + arg: InFile::new(loc.kind.file_id(), arg_tt), + attr_input_or_mac_def, + macro_arg_shift: mbe::Shift::new(¯o_arg.0), + macro_arg, + macro_def, + exp_map, + }) } /// Indicate it is macro file generated for builtin derive pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option> { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - let attr = match loc.def.kind { - MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), - _ => return None, - }; - Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) - } - } + let macro_file = self.macro_file()?; + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + let attr = match loc.def.kind { + MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db), + _ => return None, + }; + Some(attr.with_value(ast::Attr::cast(attr.value.clone())?)) } pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::FileId(_) => false, - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)) } + None => false, } } /// Return whether this file is an include macro pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. })) } @@ -321,8 +317,8 @@ impl HirFileId { /// Return whether this file is an attr macro pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { .. }) } @@ -333,23 +329,36 @@ impl HirFileId { /// Return whether this file is the pseudo expansion of the derive attribute. /// See [`crate::builtin_attr_macro::derive_attr_expand`]. pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool { - match self.0 { - HirFileIdRepr::MacroFile(macro_file) => { + match self.macro_file() { + Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. }) } - _ => false, + None => false, } } + #[inline] pub fn is_macro(self) -> bool { - matches!(self.0, HirFileIdRepr::MacroFile(_)) + self.0 & Self::MACRO_FILE_TAG_MASK != 0 } + #[inline] pub fn macro_file(self) -> Option { - match self.0 { - HirFileIdRepr::FileId(_) => None, - HirFileIdRepr::MacroFile(m) => Some(m), + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => None, + _ => Some(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), + } + } + + fn repr(self) -> HirFileIdRepr { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => HirFileIdRepr::FileId(FileId(self.0)), + _ => HirFileIdRepr::MacroFile(MacroFile { + macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)), + }), } } } @@ -442,7 +451,7 @@ impl MacroCallKind { pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().0 { + match kind.file_id().repr() { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } @@ -467,7 +476,7 @@ impl MacroCallKind { pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange { let mut kind = self; let file_id = loop { - match kind.file_id().0 { + match kind.file_id().repr() { HirFileIdRepr::MacroFile(file) => { kind = db.lookup_intern_macro_call(file.macro_call_id).kind; } @@ -779,7 +788,7 @@ impl<'a> InFile<&'a SyntaxNode> { /// For attributes and derives, this will point back to the attribute only. /// For the entire item `InFile::use original_file_range_full`. pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some(res) = self.original_file_range_opt(db) { @@ -846,7 +855,7 @@ impl InFile { /// Falls back to the macro call range if the node cannot be mapped up fully. pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() }, HirFileIdRepr::MacroFile(mac_file) => { if let Some(res) = self.original_file_range_opt(db) { @@ -861,7 +870,7 @@ impl InFile { /// Attempts to map the syntax node back up its macro calls. pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option { - match self.file_id.0 { + match self.file_id.repr() { HirFileIdRepr::FileId(file_id) => { Some(FileRange { file_id, range: self.value.text_range() }) } diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 2679a1c360267..e8b3e312aab7a 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -62,7 +62,7 @@ impl<'a> UnescapedName<'a> { it.clone() } } - Repr::TupleField(it) => SmolStr::new(&it.to_string()), + Repr::TupleField(it) => SmolStr::new(it.to_string()), } } } @@ -139,7 +139,7 @@ impl Name { pub fn to_smol_str(&self) -> SmolStr { match &self.0 { Repr::Text(it) => it.clone(), - Repr::TupleField(it) => SmolStr::new(&it.to_string()), + Repr::TupleField(it) => SmolStr::new(it.to_string()), } } @@ -338,44 +338,6 @@ pub mod known { test_case, recursion_limit, feature, - // Safe intrinsics - abort, - add_with_overflow, - black_box, - bitreverse, - bswap, - caller_location, - ctlz, - ctpop, - cttz, - discriminant_value, - forget, - likely, - maxnumf32, - maxnumf64, - min_align_of_val, - min_align_of, - minnumf32, - minnumf64, - mul_with_overflow, - needs_drop, - ptr_guaranteed_eq, - ptr_guaranteed_ne, - rotate_left, - rotate_right, - rustc_peek, - saturating_add, - saturating_sub, - size_of_val, - size_of, - sub_with_overflow, - type_id, - type_name, - unlikely, - variant_count, - wrapping_add, - wrapping_mul, - wrapping_sub, // known methods of lang items eq, ne, @@ -419,6 +381,8 @@ pub mod known { shr, sub_assign, sub, + unsafe_cell, + va_list ); // self/Self cannot be used as an identifier diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index e839e97bf02d8..c0a7bc7ca8815 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -233,7 +233,7 @@ mod tests { let quoted = quote!(#a); assert_eq!(quoted.to_string(), "hello"); - let t = format!("{:?}", quoted); + let t = format!("{quoted:?}"); assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295"); } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index a1d6835bfaed3..ae837ac6dce88 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -13,18 +13,20 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.5" arrayvec = "0.7.2" +bitflags = "1.3.2" smallvec = "1.10.0" ena = "0.14.0" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.86.0", default-features = false } -chalk-ir = "0.86.0" -chalk-recursive = { version = "0.86.0", default-features = false } -chalk-derive = "0.86.0" +chalk-solve = { version = "0.88.0", default-features = false } +chalk-ir = "0.88.0" +chalk-recursive = { version = "0.88.0", default-features = false } +chalk-derive = "0.88.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.15.0" typed-arena = "2.0.1" +rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } stdx = { path = "../stdx", version = "0.0.0" } hir-def = { path = "../hir-def", version = "0.0.0" } diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index 78911d8dc0772..cbcf8f74c556d 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -82,11 +82,11 @@ pub(crate) fn autoderef_step( } // FIXME: replace uses of this with Autoderef above -pub fn autoderef<'a>( - db: &'a dyn HirDatabase, +pub fn autoderef( + db: &dyn HirDatabase, env: Arc, ty: Canonical, -) -> impl Iterator> + 'a { +) -> impl Iterator> + '_ { let mut table = InferenceTable::new(db, env); let ty = table.instantiate_canonical(ty); let mut autoderef = Autoderef::new(&mut table, ty); diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index 9ae752556d890..d5ef0c22dec83 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -142,7 +142,7 @@ impl TyBuilder { match (a.data(Interner), e) { (chalk_ir::GenericArgData::Ty(_), ParamKind::Type) | (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (), - _ => panic!("Mismatched kinds: {:?}, {:?}, {:?}", a, self.vec, self.param_kinds), + _ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds), } } } diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 43c3451cab37b..1c2b8de7f784f 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -568,6 +568,7 @@ fn well_known_trait_from_lang_attr(name: &str) -> Option { "sized" => WellKnownTrait::Sized, "unpin" => WellKnownTrait::Unpin, "unsize" => WellKnownTrait::Unsize, + "tuple_trait" => WellKnownTrait::Tuple, _ => return None, }) } @@ -585,6 +586,7 @@ fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { WellKnownTrait::FnOnce => "fn_once", WellKnownTrait::Generator => "generator", WellKnownTrait::Sized => "sized", + WellKnownTrait::Tuple => "tuple_trait", WellKnownTrait::Unpin => "unpin", WellKnownTrait::Unsize => "unsize", } diff --git a/crates/hir-ty/src/consteval.rs b/crates/hir-ty/src/consteval.rs index 2c0c6e0b8394f..8df70330fa9eb 100644 --- a/crates/hir-ty/src/consteval.rs +++ b/crates/hir-ty/src/consteval.rs @@ -90,14 +90,14 @@ impl Display for ComputedExpr { ComputedExpr::Literal(l) => match l { Literal::Int(x, _) => { if *x >= 10 { - write!(f, "{} ({:#X})", x, x) + write!(f, "{x} ({x:#X})") } else { x.fmt(f) } } Literal::Uint(x, _) => { if *x >= 10 { - write!(f, "{} ({:#X})", x, x) + write!(f, "{x} ({x:#X})") } else { x.fmt(f) } @@ -131,7 +131,7 @@ fn scalar_max(scalar: &Scalar) -> i128 { IntTy::I16 => i16::MAX as i128, IntTy::I32 => i32::MAX as i128, IntTy::I64 => i64::MAX as i128, - IntTy::I128 => i128::MAX as i128, + IntTy::I128 => i128::MAX, }, Scalar::Uint(x) => match x { chalk_ir::UintTy::Usize => usize::MAX as i128, @@ -139,7 +139,7 @@ fn scalar_max(scalar: &Scalar) -> i128 { chalk_ir::UintTy::U16 => u16::MAX as i128, chalk_ir::UintTy::U32 => u32::MAX as i128, chalk_ir::UintTy::U64 => u64::MAX as i128, - chalk_ir::UintTy::U128 => i128::MAX as i128, // ignore too big u128 for now + chalk_ir::UintTy::U128 => i128::MAX, // ignore too big u128 for now }, Scalar::Float(_) => 0, } @@ -351,15 +351,17 @@ pub fn eval_const( .infer .assoc_resolutions_for_expr(expr_id) .ok_or(ConstEvalError::SemanticError("unresolved assoc item"))? + .0 { hir_def::AssocItemId::FunctionId(_) => { Err(ConstEvalError::NotSupported("assoc function")) } + // FIXME use actual impl for trait assoc const hir_def::AssocItemId::ConstId(c) => ctx.db.const_eval(c), hir_def::AssocItemId::TypeAliasId(_) => { Err(ConstEvalError::NotSupported("assoc type alias")) } - } + }; } }; match pr { @@ -402,7 +404,7 @@ pub(crate) fn path_to_const( args_lazy: impl FnOnce() -> Generics, debruijn: DebruijnIndex, ) -> Option { - match resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) { + match resolver.resolve_path_in_value_ns_fully(db.upcast(), path) { Some(ValueNs::GenericParam(p)) => { let ty = db.const_param_ty(p); let args = args_lazy(); @@ -509,10 +511,10 @@ pub(crate) fn const_eval_query_variant( ) } -pub(crate) fn eval_to_const<'a>( +pub(crate) fn eval_to_const( expr: Idx, mode: ParamLoweringMode, - ctx: &mut InferenceContext<'a>, + ctx: &mut InferenceContext<'_>, args: impl FnOnce() -> Generics, debruijn: DebruijnIndex, ) -> Const { diff --git a/crates/hir-ty/src/consteval/tests.rs b/crates/hir-ty/src/consteval/tests.rs index b76506f6ebc2c..3c930c077b3be 100644 --- a/crates/hir-ty/src/consteval/tests.rs +++ b/crates/hir-ty/src/consteval/tests.rs @@ -14,7 +14,7 @@ fn check_number(ra_fixture: &str, answer: i128) { match r { ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer), ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128), - x => panic!("Expected number but found {:?}", x), + x => panic!("Expected number but found {x:?}"), } } @@ -25,7 +25,6 @@ fn eval_goal(ra_fixture: &str) -> Result { let scope = &def_map[module_id.local_id].scope; let const_id = scope .declarations() - .into_iter() .find_map(|x| match x { hir_def::ModuleDefId::ConstId(x) => { if db.const_data(x).name.as_ref()?.to_string() == "GOAL" { @@ -126,7 +125,7 @@ fn enums() { assert_eq!(name, "E::A"); assert_eq!(val, 1); } - x => panic!("Expected enum but found {:?}", x), + x => panic!("Expected enum but found {x:?}"), } } diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 932fce83563d7..54b244620fba3 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -3,20 +3,23 @@ use std::sync::Arc; -use arrayvec::ArrayVec; use base_db::{impl_intern_key, salsa, CrateId, Upcast}; use hir_def::{ - db::DefDatabase, expr::ExprId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, - FunctionId, GenericDefId, ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, + db::DefDatabase, + expr::ExprId, + layout::{Layout, LayoutError, TargetDataLayout}, + AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GenericDefId, + ImplId, LifetimeParamId, LocalFieldId, TypeOrConstParamId, VariantId, }; use la_arena::ArenaMap; +use smallvec::SmallVec; use crate::{ chalk_db, consteval::{ComputedExpr, ConstEvalError}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, Binders, CallableDefId, FnDefId, GenericArg, ImplTraitId, InferenceResult, Interner, PolyFnSig, - QuantifiedWhereClause, ReturnTypeImplTraits, TraitRef, Ty, TyDefId, ValueTyDefId, + QuantifiedWhereClause, ReturnTypeImplTraits, Substitution, TraitRef, Ty, TyDefId, ValueTyDefId, }; use hir_expand::name::Name; @@ -57,6 +60,13 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::invoke(crate::lower::field_types_query)] fn field_types(&self, var: VariantId) -> Arc>>; + #[salsa::invoke(crate::layout::layout_of_adt_query)] + #[salsa::cycle(crate::layout::layout_of_adt_recover)] + fn layout_of_adt(&self, def: AdtId, subst: Substitution) -> Result; + + #[salsa::invoke(crate::layout::target_data_layout_query)] + fn target_data_layout(&self, krate: CrateId) -> Arc; + #[salsa::invoke(crate::lower::callable_item_sig)] fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; @@ -92,10 +102,15 @@ pub trait HirDatabase: DefDatabase + Upcast { fn inherent_impls_in_block(&self, block: BlockId) -> Option>; /// Collects all crates in the dependency graph that have impls for the - /// given fingerprint. This is only used for primitive types; for - /// user-defined types we just look at the crate where the type is defined. - #[salsa::invoke(crate::method_resolution::inherent_impl_crates_query)] - fn inherent_impl_crates(&self, krate: CrateId, fp: TyFingerprint) -> ArrayVec; + /// given fingerprint. This is only used for primitive types and types + /// annotated with `rustc_has_incoherent_inherent_impls`; for other types + /// we just look at the crate where the type is defined. + #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)] + fn incoherent_inherent_impl_crates( + &self, + krate: CrateId, + fp: TyFingerprint, + ) -> SmallVec<[CrateId; 2]>; #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] fn trait_impls_in_crate(&self, krate: CrateId) -> Arc; diff --git a/crates/hir-ty/src/diagnostics/match_check.rs b/crates/hir-ty/src/diagnostics/match_check.rs index d51ad72bd27b1..8b0f051b46b62 100644 --- a/crates/hir-ty/src/diagnostics/match_check.rs +++ b/crates/hir-ty/src/diagnostics/match_check.rs @@ -12,16 +12,16 @@ pub(crate) mod usefulness; use chalk_ir::Mutability; use hir_def::{ - adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, HasModule, LocalFieldId, - VariantId, + adt::VariantData, body::Body, expr::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::Name; use stdx::{always, never}; use crate::{ db::HirDatabase, display::{HirDisplay, HirDisplayError, HirFormatter}, infer::BindingMode, + lang_items::is_box, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, }; @@ -386,7 +386,7 @@ impl HirDisplay for Pat { } subpattern.hir_fmt(f) } - PatKind::LiteralBool { value } => write!(f, "{}", value), + PatKind::LiteralBool { value } => write!(f, "{value}"), PatKind::Or { pats } => f.write_joined(pats.iter(), " | "), } } @@ -405,13 +405,6 @@ where } } -fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool { - let owned_box = name![owned_box].to_smol_str(); - let krate = adt.module(db.upcast()).krate(); - let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); - Some(adt) == box_adt -} - pub(crate) trait PatternFoldable: Sized { fn fold_with(&self, folder: &mut F) -> Self { self.super_fold_with(folder) diff --git a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs index 47d60fc41e700..d130827a77e84 100644 --- a/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs +++ b/crates/hir-ty/src/diagnostics/match_check/deconstruct_pat.rs @@ -372,7 +372,7 @@ impl Constructor { hir_def::AdtId::UnionId(id) => id.into(), } } - _ => panic!("bad constructor {:?} for adt {:?}", self, adt), + _ => panic!("bad constructor {self:?} for adt {adt:?}"), } } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index a22a4b170f61c..66e813eed8b4a 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -16,7 +16,7 @@ use hir_def::{ path::{Path, PathKind}, type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef}, visibility::Visibility, - HasModule, ItemContainerId, Lookup, ModuleId, TraitId, + HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId, }; use hir_expand::{hygiene::Hygiene, name::Name}; use itertools::Itertools; @@ -35,9 +35,27 @@ use crate::{ TraitRefExt, Ty, TyExt, TyKind, WhereClause, }; +pub trait HirWrite: fmt::Write { + fn start_location_link(&mut self, location: ModuleDefId); + fn end_location_link(&mut self); +} + +// String will ignore link metadata +impl HirWrite for String { + fn start_location_link(&mut self, _: ModuleDefId) {} + + fn end_location_link(&mut self) {} +} + +// `core::Formatter` will ignore metadata +impl HirWrite for fmt::Formatter<'_> { + fn start_location_link(&mut self, _: ModuleDefId) {} + fn end_location_link(&mut self) {} +} + pub struct HirFormatter<'a> { pub db: &'a dyn HirDatabase, - fmt: &'a mut dyn fmt::Write, + fmt: &'a mut dyn HirWrite, buf: String, curr_size: usize, pub(crate) max_size: Option, @@ -45,6 +63,16 @@ pub struct HirFormatter<'a> { display_target: DisplayTarget, } +impl HirFormatter<'_> { + fn start_location_link(&mut self, location: ModuleDefId) { + self.fmt.start_location_link(location); + } + + fn end_location_link(&mut self) { + self.fmt.end_location_link(); + } +} + pub trait HirDisplay { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError>; @@ -148,13 +176,13 @@ impl<'a> HirFormatter<'a> { let mut first = true; for e in iter { if !first { - write!(self, "{}", sep)?; + write!(self, "{sep}")?; } first = false; // Abbreviate multiple omitted types with a single ellipsis. if self.should_truncate() { - return write!(self, "{}", TYPE_HINT_TRUNCATION); + return write!(self, "{TYPE_HINT_TRUNCATION}"); } e.hir_fmt(self)?; @@ -245,12 +273,9 @@ pub struct HirDisplayWrapper<'a, T> { display_target: DisplayTarget, } -impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> -where - T: HirDisplay, -{ - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.t.hir_fmt(&mut HirFormatter { +impl HirDisplayWrapper<'_, T> { + pub fn write_to(&self, f: &mut F) -> Result<(), HirDisplayError> { + self.t.hir_fmt(&mut HirFormatter { db: self.db, fmt: f, buf: String::with_capacity(20), @@ -258,7 +283,16 @@ where max_size: self.max_size, omit_verbose_types: self.omit_verbose_types, display_target: self.display_target, - }) { + }) + } +} + +impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> +where + T: HirDisplay, +{ + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.write_to(f) { Ok(()) => Ok(()), Err(HirDisplayError::FmtError) => Err(fmt::Error), Err(HirDisplayError::DisplaySourceCodeError(_)) => { @@ -286,7 +320,7 @@ impl HirDisplay for Interned { impl HirDisplay for ProjectionTy { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } let trait_ref = self.trait_ref(f.db); @@ -308,7 +342,7 @@ impl HirDisplay for ProjectionTy { impl HirDisplay for OpaqueTy { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } self.substitution.at(Interner, 0).hir_fmt(f) @@ -351,7 +385,7 @@ impl HirDisplay for BoundVar { impl HirDisplay for Ty { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } match self.kind(Interner) { @@ -530,6 +564,7 @@ impl HirDisplay for Ty { } } TyKind::Adt(AdtId(def_id), parameters) => { + f.start_location_link((*def_id).into()); match f.display_target { DisplayTarget::Diagnostics | DisplayTarget::Test => { let name = match *def_id { @@ -537,7 +572,7 @@ impl HirDisplay for Ty { hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(), hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(), }; - write!(f, "{}", name)?; + write!(f, "{name}")?; } DisplayTarget::SourceCode { module_id } => { if let Some(path) = find_path::find_path( @@ -546,7 +581,7 @@ impl HirDisplay for Ty { module_id, false, ) { - write!(f, "{}", path)?; + write!(f, "{path}")?; } else { return Err(HirDisplayError::DisplaySourceCodeError( DisplaySourceCodeError::PathNotFound, @@ -554,6 +589,7 @@ impl HirDisplay for Ty { } } } + f.end_location_link(); if parameters.len(Interner) > 0 { let parameters_to_write = if f.display_target.is_source_code() @@ -701,7 +737,7 @@ impl HirDisplay for Ty { if sig.params().is_empty() { write!(f, "||")?; } else if f.should_truncate() { - write!(f, "|{}|", TYPE_HINT_TRUNCATION)?; + write!(f, "|{TYPE_HINT_TRUNCATION}|")?; } else { write!(f, "|")?; f.write_joined(sig.params(), ", ")?; @@ -892,7 +928,7 @@ pub fn write_bounds_like_dyn_trait_with_prefix( default_sized: SizedByDefault, f: &mut HirFormatter<'_>, ) -> Result<(), HirDisplayError> { - write!(f, "{}", prefix)?; + write!(f, "{prefix}")?; if !predicates.is_empty() || predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. }) { @@ -1020,7 +1056,7 @@ fn fmt_trait_ref( use_as: bool, ) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } tr.self_type_parameter(Interner).hir_fmt(f)?; @@ -1047,7 +1083,7 @@ impl HirDisplay for TraitRef { impl HirDisplay for WhereClause { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { if f.should_truncate() { - return write!(f, "{}", TYPE_HINT_TRUNCATION); + return write!(f, "{TYPE_HINT_TRUNCATION}"); } match self { @@ -1098,7 +1134,6 @@ impl HirDisplay for LifetimeData { write!(f, "{}", param_data.name) } LifetimeData::Static => write!(f, "'static"), - LifetimeData::Empty(_) => Ok(()), LifetimeData::Erased => Ok(()), LifetimeData::Phantom(_, _) => Ok(()), } @@ -1162,7 +1197,7 @@ impl HirDisplay for TypeRef { hir_def::type_ref::Mutability::Shared => "*const ", hir_def::type_ref::Mutability::Mut => "*mut ", }; - write!(f, "{}", mutability)?; + write!(f, "{mutability}")?; inner.hir_fmt(f)?; } TypeRef::Reference(inner, lifetime, mutability) => { @@ -1174,13 +1209,13 @@ impl HirDisplay for TypeRef { if let Some(lifetime) = lifetime { write!(f, "{} ", lifetime.name)?; } - write!(f, "{}", mutability)?; + write!(f, "{mutability}")?; inner.hir_fmt(f)?; } TypeRef::Array(inner, len) => { write!(f, "[")?; inner.hir_fmt(f)?; - write!(f, "; {}]", len)?; + write!(f, "; {len}]")?; } TypeRef::Slice(inner) => { write!(f, "[")?; @@ -1197,7 +1232,7 @@ impl HirDisplay for TypeRef { for index in 0..function_parameters.len() { let (param_name, param_type) = &function_parameters[index]; if let Some(name) = param_name { - write!(f, "{}: ", name)?; + write!(f, "{name}: ")?; } param_type.hir_fmt(f)?; @@ -1373,7 +1408,7 @@ impl HirDisplay for hir_def::path::GenericArg { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match self { hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f), - hir_def::path::GenericArg::Const(c) => write!(f, "{}", c), + hir_def::path::GenericArg::Const(c) => write!(f, "{c}"), hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name), } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 0b3c23f5747ad..6b59f1c20daa2 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -19,10 +19,11 @@ use std::sync::Arc; use chalk_ir::{cast::Cast, ConstValue, DebruijnIndex, Mutability, Safety, Scalar, TypeFlags}; use hir_def::{ body::Body, - builtin_type::BuiltinType, + builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, - expr::{BindingAnnotation, ExprId, PatId}, + expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId}, lang_item::LangItemTarget, + layout::Integer, path::{path, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::TypeRef, @@ -33,7 +34,7 @@ use hir_expand::name::{name, Name}; use itertools::Either; use la_arena::ArenaMap; use rustc_hash::FxHashMap; -use stdx::{always, impl_from}; +use stdx::always; use crate::{ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany, @@ -70,8 +71,26 @@ pub(crate) fn infer_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Arc ctx.collect_static(&db.static_data(s)), DefWithBodyId::VariantId(v) => { ctx.return_ty = TyBuilder::builtin(match db.enum_data(v.parent).variant_body_type() { - Either::Left(builtin) => BuiltinType::Int(builtin), - Either::Right(builtin) => BuiltinType::Uint(builtin), + hir_def::layout::IntegerType::Pointer(signed) => match signed { + true => BuiltinType::Int(BuiltinInt::Isize), + false => BuiltinType::Uint(BuiltinUint::Usize), + }, + hir_def::layout::IntegerType::Fixed(size, signed) => match signed { + true => BuiltinType::Int(match size { + Integer::I8 => BuiltinInt::I8, + Integer::I16 => BuiltinInt::I16, + Integer::I32 => BuiltinInt::I32, + Integer::I64 => BuiltinInt::I64, + Integer::I128 => BuiltinInt::I128, + }), + false => BuiltinType::Uint(match size { + Integer::I8 => BuiltinUint::U8, + Integer::I16 => BuiltinUint::U16, + Integer::I32 => BuiltinUint::U32, + Integer::I64 => BuiltinUint::U64, + Integer::I128 => BuiltinUint::U128, + }), + }, }); } } @@ -101,13 +120,6 @@ pub(crate) fn normalize(db: &dyn HirDatabase, owner: DefWithBodyId, ty: Ty) -> T table.resolve_completely(ty_with_vars) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] -enum ExprOrPatId { - ExprId(ExprId), - PatId(PatId), -} -impl_from!(ExprId, PatId for ExprOrPatId); - /// Binding modes inferred for patterns. /// #[derive(Copy, Clone, Debug, Eq, PartialEq)] @@ -189,6 +201,8 @@ pub(crate) type InferResult = Result, TypeError>; #[derive(Debug, PartialEq, Eq, Clone)] pub enum InferenceDiagnostic { NoSuchField { expr: ExprId }, + PrivateField { expr: ExprId, field: FieldId }, + PrivateAssocItem { id: ExprOrPatId, item: AssocItemId }, BreakOutsideOfLoop { expr: ExprId, is_break: bool }, MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize }, } @@ -330,7 +344,7 @@ pub struct InferenceResult { /// For each struct literal or pattern, records the variant it resolves to. variant_resolutions: FxHashMap, /// For each associated item record what it resolves to - assoc_resolutions: FxHashMap, + assoc_resolutions: FxHashMap, pub diagnostics: Vec, pub type_of_expr: ArenaMap, /// For each pattern record the type it resolves to. @@ -360,11 +374,11 @@ impl InferenceResult { pub fn variant_resolution_for_pat(&self, id: PatId) -> Option { self.variant_resolutions.get(&id.into()).copied() } - pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_expr(&self, id: ExprId) -> Option<(AssocItemId, Substitution)> { + self.assoc_resolutions.get(&id.into()).cloned() } - pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option { - self.assoc_resolutions.get(&id.into()).copied() + pub fn assoc_resolutions_for_pat(&self, id: PatId) -> Option<(AssocItemId, Substitution)> { + self.assoc_resolutions.get(&id.into()).cloned() } pub fn type_mismatch_for_expr(&self, expr: ExprId) -> Option<&TypeMismatch> { self.type_mismatches.get(&expr.into()) @@ -484,7 +498,7 @@ impl<'a> InferenceContext<'a> { result: InferenceResult::default(), table: unify::InferenceTable::new(db, trait_env.clone()), trait_env, - return_ty: TyKind::Error.intern(Interner), // set in collect_fn_signature + return_ty: TyKind::Error.intern(Interner), // set in collect_* calls resume_yield_tys: None, db, owner, @@ -498,6 +512,8 @@ impl<'a> InferenceContext<'a> { fn resolve_all(self) -> InferenceResult { let InferenceContext { mut table, mut result, .. } = self; + table.fallback_if_possible(); + // FIXME resolve obligations as well (use Guidance if necessary) table.resolve_obligations_as_possible(); @@ -516,6 +532,9 @@ impl<'a> InferenceContext<'a> { for (_, subst) in result.method_resolutions.values_mut() { *subst = table.resolve_completely(subst.clone()); } + for (_, subst) in result.assoc_resolutions.values_mut() { + *subst = table.resolve_completely(subst.clone()); + } for adjustment in result.expr_adjustments.values_mut().flatten() { adjustment.target = table.resolve_completely(adjustment.target.clone()); } @@ -537,8 +556,20 @@ impl<'a> InferenceContext<'a> { let data = self.db.function_data(func); let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) .with_impl_trait_mode(ImplTraitLoweringMode::Param); - let param_tys = + let mut param_tys = data.params.iter().map(|(_, type_ref)| ctx.lower_ty(type_ref)).collect::>(); + // Check if function contains a va_list, if it does then we append it to the parameter types + // that are collected from the function data + if data.is_varargs() { + let va_list_ty = match self.resolve_va_list() { + Some(va_list) => TyBuilder::adt(self.db, va_list) + .fill_with_defaults(self.db, || self.table.new_type_var()) + .build(), + None => self.err_ty(), + }; + + param_tys.push(va_list_ty) + } for (ty, pat) in param_tys.into_iter().zip(self.body.params.iter()) { let ty = self.insert_type_vars(ty); let ty = self.normalize_associated_types_in(ty); @@ -551,14 +582,17 @@ impl<'a> InferenceContext<'a> { } else { &*data.ret_type }; - let return_ty = self.make_ty_with_mode(return_ty, ImplTraitLoweringMode::Opaque); - self.return_ty = return_ty; - if let Some(rpits) = self.db.return_type_impl_traits(func) { + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) + .with_impl_trait_mode(ImplTraitLoweringMode::Opaque); + let return_ty = ctx.lower_ty(return_ty); + let return_ty = self.insert_type_vars(return_ty); + + let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) { // RPIT opaque types use substitution of their parent function. let fn_placeholders = TyBuilder::placeholder_subst(self.db, func); - self.return_ty = fold_tys( - self.return_ty.clone(), + fold_tys( + return_ty, |ty, _| { let opaque_ty_id = match ty.kind(Interner) { TyKind::OpaqueType(opaque_ty_id, _) => *opaque_ty_id, @@ -579,14 +613,18 @@ impl<'a> InferenceContext<'a> { let (var_predicate, binders) = predicate .substitute(Interner, &var_subst) .into_value_and_skipped_binders(); - always!(binders.len(Interner) == 0); // quantified where clauses not yet handled + always!(binders.is_empty(Interner)); // quantified where clauses not yet handled self.push_obligation(var_predicate.cast(Interner)); } var }, DebruijnIndex::INNERMOST, - ); - } + ) + } else { + return_ty + }; + + self.return_ty = self.normalize_associated_types_in(return_ty); } fn infer_body(&mut self) { @@ -609,8 +647,8 @@ impl<'a> InferenceContext<'a> { self.result.variant_resolutions.insert(id, variant); } - fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { - self.result.assoc_resolutions.insert(id, item); + fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId, subs: Substitution) { + self.result.assoc_resolutions.insert(id, (item, subs)); } fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { @@ -621,23 +659,14 @@ impl<'a> InferenceContext<'a> { self.result.diagnostics.push(diagnostic); } - fn make_ty_with_mode( - &mut self, - type_ref: &TypeRef, - impl_trait_mode: ImplTraitLoweringMode, - ) -> Ty { + fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { // FIXME use right resolver for block - let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver) - .with_impl_trait_mode(impl_trait_mode); + let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); let ty = ctx.lower_ty(type_ref); let ty = self.insert_type_vars(ty); self.normalize_associated_types_in(ty) } - fn make_ty(&mut self, type_ref: &TypeRef) -> Ty { - self.make_ty_with_mode(type_ref, ImplTraitLoweringMode::Disallowed) - } - fn err_ty(&self) -> Ty { self.result.standard_types.unknown.clone() } @@ -656,7 +685,7 @@ impl<'a> InferenceContext<'a> { } } - /// Replaces Ty::Unknown by a new type var, so we can maybe still infer it. + /// Replaces `Ty::Error` by a new type var, so we can maybe still infer it. fn insert_type_vars_shallow(&mut self, ty: Ty) -> Ty { match ty.kind(Interner) { TyKind::Error => self.table.new_type_var(), @@ -983,6 +1012,11 @@ impl<'a> InferenceContext<'a> { let trait_ = self.resolve_ops_index()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } + + fn resolve_va_list(&self) -> Option { + let struct_ = self.resolve_lang_item(name![va_list])?.as_struct()?; + Some(struct_.into()) + } } /// When inferring an expression, we propagate downward whatever type hint we diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index b1f4de8260775..8f9cdac3784c7 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -1,13 +1,12 @@ //! Type inference for expressions. use std::{ - collections::hash_map::Entry, iter::{repeat, repeat_with}, mem, }; use chalk_ir::{ - cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind, + cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyKind, TyVariableKind, }; use hir_def::{ expr::{ @@ -35,8 +34,8 @@ use crate::{ primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, utils::{generics, Generics}, - AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, - Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnPointer, FnSig, FnSubst, + Interner, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, }; use super::{ @@ -152,11 +151,20 @@ impl<'a> InferenceContext<'a> { .1 } Expr::TryBlock { body } => { - self.with_breakable_ctx(BreakableKind::Border, self.err_ty(), None, |this| { - let _inner = this.infer_expr(*body, expected); + // The type that is returned from the try block + let try_ty = self.table.new_type_var(); + if let Some(ty) = expected.only_has_type(&mut self.table) { + self.unify(&try_ty, &ty); + } + + // The ok-ish type that is expected from the last expression + let ok_ty = self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_ok()); + + self.with_breakable_ctx(BreakableKind::Block, ok_ty.clone(), None, |this| { + this.infer_expr(*body, &Expectation::has_type(ok_ty)); }); - // FIXME should be std::result::Result<{inner}, _> - self.err_ty() + + try_ty } Expr::Async { body } => { let ret_ty = self.table.new_type_var(); @@ -326,6 +334,7 @@ impl<'a> InferenceContext<'a> { let (param_tys, ret_ty) = match res { Some(res) => { let adjustments = auto_deref_adjust_steps(&derefs); + // FIXME: Handle call adjustments for Fn/FnMut self.write_expr_adj(*callee, adjustments); res } @@ -465,6 +474,12 @@ impl<'a> InferenceContext<'a> { TyKind::Error.intern(Interner) } } + Expr::Yeet { expr } => { + if let &Some(expr) = expr { + self.infer_expr_inner(expr, &Expectation::None); + } + TyKind::Never.intern(Interner) + } Expr::RecordLit { path, fields, spread, .. } => { let (ty, def_id) = self.resolve_variant(path.as_deref(), false); if let Some(variant) = def_id { @@ -506,6 +521,7 @@ impl<'a> InferenceContext<'a> { let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none()); let mut autoderef = Autoderef::new(&mut self.table, receiver_ty); + let mut private_field = None; let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| { let (field_id, parameters) = match derefed_ty.kind(Interner) { TyKind::Tuple(_, substs) => { @@ -532,13 +548,8 @@ impl<'a> InferenceContext<'a> { let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id] .is_visible_from(self.db.upcast(), self.resolver.module()); if !is_visible { - // Write down the first field resolution even if it is not visible - // This aids IDE features for private fields like goto def and in - // case of autoderef finding an applicable field, this will be - // overwritten in a following cycle - if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr) - { - entry.insert(field_id); + if private_field.is_none() { + private_field = Some(field_id); } return None; } @@ -557,7 +568,17 @@ impl<'a> InferenceContext<'a> { let ty = self.normalize_associated_types_in(ty); ty } - _ => self.err_ty(), + _ => { + // Write down the first private field resolution if we found no field + // This aids IDE features for private fields like goto def + if let Some(field) = private_field { + self.result.field_resolutions.insert(tgt_expr, field); + self.result + .diagnostics + .push(InferenceDiagnostic::PrivateField { expr: tgt_expr, field }); + } + self.err_ty() + } }; ty } @@ -940,7 +961,7 @@ impl<'a> InferenceContext<'a> { Expr::RecordLit { path, fields, .. } => { let subs = fields.iter().map(|f| (f.name.clone(), f.expr)); - self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs) + self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs, subs) } Expr::Underscore => rhs_ty.clone(), _ => { @@ -1018,14 +1039,38 @@ impl<'a> InferenceContext<'a> { self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone())); let ret_ty = match method_ty.callable_sig(self.db) { - Some(sig) => sig.ret().clone(), + Some(sig) => { + let p_left = &sig.params()[0]; + if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) { + if let &TyKind::Ref(mtbl, _, _) = p_left.kind(Interner) { + self.write_expr_adj( + lhs, + vec![Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)), + target: p_left.clone(), + }], + ); + } + } + let p_right = &sig.params()[1]; + if matches!(op, BinaryOp::CmpOp(..)) { + if let &TyKind::Ref(mtbl, _, _) = p_right.kind(Interner) { + self.write_expr_adj( + rhs, + vec![Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)), + target: p_right.clone(), + }], + ); + } + } + sig.ret().clone() + } None => self.err_ty(), }; let ret_ty = self.normalize_associated_types_in(ret_ty); - // FIXME: record autoref adjustments - // use knowledge of built-in binary ops, which can sometimes help inference if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) { self.unify(&builtin_rhs, &rhs_ty); @@ -1122,20 +1167,26 @@ impl<'a> InferenceContext<'a> { let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); let resolved = method_resolution::lookup_method( - &canonicalized_receiver.value, self.db, + &canonicalized_receiver.value, self.trait_env.clone(), &traits_in_scope, VisibleFromModule::Filter(self.resolver.module()), method_name, ); let (receiver_ty, method_ty, substs) = match resolved { - Some((adjust, func)) => { + Some((adjust, func, visible)) => { let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty); let generics = generics(self.db.upcast(), func.into()); let substs = self.substs_for_method_call(generics, generic_args); self.write_expr_adj(receiver, adjustments); self.write_method_resolution(tgt_expr, func, substs.clone()); + if !visible { + self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { + id: tgt_expr.into(), + item: func.into(), + }) + } (ty, self.db.value_ty(func.into()), substs) } None => ( @@ -1309,7 +1360,7 @@ impl<'a> InferenceContext<'a> { ty, c, ParamLoweringMode::Placeholder, - || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()), + || generics(this.db.upcast(), this.resolver.generic_def().unwrap()), DebruijnIndex::INNERMOST, ) }, diff --git a/crates/hir-ty/src/infer/pat.rs b/crates/hir-ty/src/infer/pat.rs index 53259d66dec64..f154dac8e879d 100644 --- a/crates/hir-ty/src/infer/pat.rs +++ b/crates/hir-ty/src/infer/pat.rs @@ -153,7 +153,7 @@ impl<'a> InferenceContext<'a> { ) -> Ty { let mut expected = self.resolve_ty_shallow(expected); - if is_non_ref_pat(&self.body, pat) { + if is_non_ref_pat(self.body, pat) { let mut pat_adjustments = Vec::new(); while let Some((inner, _lifetime, mutability)) = expected.as_reference() { pat_adjustments.push(expected.clone()); @@ -220,7 +220,7 @@ impl<'a> InferenceContext<'a> { ), Pat::Record { path: p, args: fields, ellipsis: _ } => { let subs = fields.iter().map(|f| (f.name.clone(), f.pat)); - self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat.into(), subs) + self.infer_record_pat_like(p.as_deref(), &expected, default_bm, pat, subs) } Pat::Path(path) => { // FIXME use correct resolver for the surrounding expression diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index ebe9d6fb5e014..8bd17c0f39f4d 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -7,13 +7,15 @@ use hir_def::{ AdtId, AssocItemId, EnumVariantId, ItemContainerId, Lookup, }; use hir_expand::name::Name; +use stdx::never; use crate::{ builder::ParamKind, consteval, method_resolution::{self, VisibleFromModule}, utils::generics, - Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, ValueTyDefId, + InferenceDiagnostic, Interner, Substitution, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, + ValueTyDefId, }; use super::{ExprOrPatId, InferenceContext, TraitRef}; @@ -212,7 +214,7 @@ impl<'a> InferenceContext<'a> { AssocItemId::TypeAliasId(_) => unreachable!(), }; - self.write_assoc_resolution(id, item); + self.write_assoc_resolution(id, item, trait_ref.substitution.clone()); Some((def, Some(trait_ref.substitution))) } @@ -233,7 +235,8 @@ impl<'a> InferenceContext<'a> { let canonical_ty = self.canonicalize(ty.clone()); let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast()); - method_resolution::iterate_method_candidates( + let mut not_visible = None; + let res = method_resolution::iterate_method_candidates( &canonical_ty.value, self.db, self.table.trait_env.clone(), @@ -241,7 +244,7 @@ impl<'a> InferenceContext<'a> { VisibleFromModule::Filter(self.resolver.module()), Some(name), method_resolution::LookupMode::Path, - move |_ty, item| { + |_ty, item, visible| { let (def, container) = match item { AssocItemId::FunctionId(f) => { (ValueNs::FunctionId(f), f.lookup(self.db.upcast()).container) @@ -259,7 +262,7 @@ impl<'a> InferenceContext<'a> { let impl_self_ty = self.db.impl_self_ty(impl_id).substitute(Interner, &impl_substs); self.unify(&impl_self_ty, &ty); - Some(impl_substs) + impl_substs } ItemContainerId::TraitId(trait_) => { // we're picking this method @@ -268,15 +271,32 @@ impl<'a> InferenceContext<'a> { .fill_with_inference_vars(&mut self.table) .build(); self.push_obligation(trait_ref.clone().cast(Interner)); - Some(trait_ref.substitution) + trait_ref.substitution + } + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { + never!("assoc item contained in module/extern block"); + return None; } - ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, }; - self.write_assoc_resolution(id, item); - Some((def, substs)) + if visible { + Some((def, item, Some(substs), true)) + } else { + if not_visible.is_none() { + not_visible = Some((def, item, Some(substs), false)); + } + None + } }, - ) + ); + let res = res.or(not_visible); + if let Some((_, item, Some(ref substs), visible)) = res { + self.write_assoc_resolution(id, item, substs.clone()); + if !visible { + self.push_diagnostic(InferenceDiagnostic::PrivateAssocItem { id, item }) + } + } + res.map(|(def, _, substs, _)| (def, substs)) } fn resolve_enum_variant_on_ty( diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 12f45f00f9c4d..e7ddd1591fe8a 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -1,6 +1,6 @@ //! Unification and canonicalization logic. -use std::{fmt, mem, sync::Arc}; +use std::{fmt, iter, mem, sync::Arc}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy, @@ -128,9 +128,13 @@ pub(crate) fn unify( )) } -#[derive(Copy, Clone, Debug)] -pub(crate) struct TypeVariableData { - diverging: bool, +bitflags::bitflags! { + #[derive(Default)] + pub(crate) struct TypeVariableFlags: u8 { + const DIVERGING = 1 << 0; + const INTEGER = 1 << 1; + const FLOAT = 1 << 2; + } } type ChalkInferenceTable = chalk_solve::infer::InferenceTable; @@ -140,14 +144,14 @@ pub(crate) struct InferenceTable<'a> { pub(crate) db: &'a dyn HirDatabase, pub(crate) trait_env: Arc, var_unification_table: ChalkInferenceTable, - type_variable_table: Vec, + type_variable_table: Vec, pending_obligations: Vec>>, } pub(crate) struct InferenceTableSnapshot { var_table_snapshot: chalk_solve::infer::InferenceSnapshot, pending_obligations: Vec>>, - type_variable_table_snapshot: Vec, + type_variable_table_snapshot: Vec, } impl<'a> InferenceTable<'a> { @@ -169,19 +173,19 @@ impl<'a> InferenceTable<'a> { /// result. pub(super) fn propagate_diverging_flag(&mut self) { for i in 0..self.type_variable_table.len() { - if !self.type_variable_table[i].diverging { + if !self.type_variable_table[i].contains(TypeVariableFlags::DIVERGING) { continue; } let v = InferenceVar::from(i as u32); let root = self.var_unification_table.inference_var_root(v); if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) { - data.diverging = true; + *data |= TypeVariableFlags::DIVERGING; } } } pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) { - self.type_variable_table[iv.index() as usize].diverging = diverging; + self.type_variable_table[iv.index() as usize].set(TypeVariableFlags::DIVERGING, diverging); } fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty { @@ -189,7 +193,7 @@ impl<'a> InferenceTable<'a> { _ if self .type_variable_table .get(iv.index() as usize) - .map_or(false, |data| data.diverging) => + .map_or(false, |data| data.contains(TypeVariableFlags::DIVERGING)) => { TyKind::Never } @@ -247,10 +251,8 @@ impl<'a> InferenceTable<'a> { } fn extend_type_variable_table(&mut self, to_index: usize) { - self.type_variable_table.extend( - (0..1 + to_index - self.type_variable_table.len()) - .map(|_| TypeVariableData { diverging: false }), - ); + let count = to_index - self.type_variable_table.len() + 1; + self.type_variable_table.extend(iter::repeat(TypeVariableFlags::default()).take(count)); } fn new_var(&mut self, kind: TyVariableKind, diverging: bool) -> Ty { @@ -258,7 +260,15 @@ impl<'a> InferenceTable<'a> { // Chalk might have created some type variables for its own purposes that we don't know about... self.extend_type_variable_table(var.index() as usize); assert_eq!(var.index() as usize, self.type_variable_table.len() - 1); - self.type_variable_table[var.index() as usize].diverging = diverging; + let flags = self.type_variable_table.get_mut(var.index() as usize).unwrap(); + if diverging { + *flags |= TypeVariableFlags::DIVERGING; + } + if matches!(kind, TyVariableKind::Integer) { + *flags |= TypeVariableFlags::INTEGER; + } else if matches!(kind, TyVariableKind::Float) { + *flags |= TypeVariableFlags::FLOAT; + } var.to_ty_with_kind(Interner, kind) } @@ -340,6 +350,51 @@ impl<'a> InferenceTable<'a> { self.resolve_with_fallback(t, &|_, _, d, _| d) } + /// Apply a fallback to unresolved scalar types. Integer type variables and float type + /// variables are replaced with i32 and f64, respectively. + /// + /// This method is only intended to be called just before returning inference results (i.e. in + /// `InferenceContext::resolve_all()`). + /// + /// FIXME: This method currently doesn't apply fallback to unconstrained general type variables + /// whereas rustc replaces them with `()` or `!`. + pub(super) fn fallback_if_possible(&mut self) { + let int_fallback = TyKind::Scalar(Scalar::Int(IntTy::I32)).intern(Interner); + let float_fallback = TyKind::Scalar(Scalar::Float(FloatTy::F64)).intern(Interner); + + let scalar_vars: Vec<_> = self + .type_variable_table + .iter() + .enumerate() + .filter_map(|(index, flags)| { + let kind = if flags.contains(TypeVariableFlags::INTEGER) { + TyVariableKind::Integer + } else if flags.contains(TypeVariableFlags::FLOAT) { + TyVariableKind::Float + } else { + return None; + }; + + // FIXME: This is not really the nicest way to get `InferenceVar`s. Can we get them + // without directly constructing them from `index`? + let var = InferenceVar::from(index as u32).to_ty(Interner, kind); + Some(var) + }) + .collect(); + + for var in scalar_vars { + let maybe_resolved = self.resolve_ty_shallow(&var); + if let TyKind::InferenceVar(_, kind) = maybe_resolved.kind(Interner) { + let fallback = match kind { + TyVariableKind::Integer => &int_fallback, + TyVariableKind::Float => &float_fallback, + TyVariableKind::General => unreachable!(), + }; + self.unify(&var, fallback); + } + } + } + /// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that. pub(crate) fn unify>(&mut self, ty1: &T, ty2: &T) -> bool { let result = match self.try_unify(ty1, ty2) { diff --git a/crates/hir-ty/src/interner.rs b/crates/hir-ty/src/interner.rs index ca76e08fddb91..441503a300e5c 100644 --- a/crates/hir-ty/src/interner.rs +++ b/crates/hir-ty/src/interner.rs @@ -143,7 +143,7 @@ impl chalk_ir::interner::Interner for Interner { fn debug_goal(goal: &Goal, fmt: &mut fmt::Formatter<'_>) -> Option { let goal_data = goal.data(Interner); - Some(write!(fmt, "{:?}", goal_data)) + Some(write!(fmt, "{goal_data:?}")) } fn debug_goals( @@ -228,7 +228,7 @@ impl chalk_ir::interner::Interner for Interner { Interned::new(InternedWrapper(chalk_ir::TyData { kind, flags })) } - fn ty_data<'a>(self, ty: &'a Self::InternedType) -> &'a chalk_ir::TyData { + fn ty_data(self, ty: &Self::InternedType) -> &chalk_ir::TyData { &ty.0 } @@ -236,10 +236,7 @@ impl chalk_ir::interner::Interner for Interner { Interned::new(InternedWrapper(lifetime)) } - fn lifetime_data<'a>( - self, - lifetime: &'a Self::InternedLifetime, - ) -> &'a chalk_ir::LifetimeData { + fn lifetime_data(self, lifetime: &Self::InternedLifetime) -> &chalk_ir::LifetimeData { &lifetime.0 } @@ -247,7 +244,7 @@ impl chalk_ir::interner::Interner for Interner { Interned::new(InternedWrapper(constant)) } - fn const_data<'a>(self, constant: &'a Self::InternedConst) -> &'a chalk_ir::ConstData { + fn const_data(self, constant: &Self::InternedConst) -> &chalk_ir::ConstData { &constant.0 } @@ -267,10 +264,10 @@ impl chalk_ir::interner::Interner for Interner { parameter } - fn generic_arg_data<'a>( + fn generic_arg_data( self, - parameter: &'a Self::InternedGenericArg, - ) -> &'a chalk_ir::GenericArgData { + parameter: &Self::InternedGenericArg, + ) -> &chalk_ir::GenericArgData { parameter } @@ -285,11 +282,11 @@ impl chalk_ir::interner::Interner for Interner { data.into_iter().collect() } - fn goal_data<'a>(self, goal: &'a Self::InternedGoal) -> &'a GoalData { + fn goal_data(self, goal: &Self::InternedGoal) -> &GoalData { goal } - fn goals_data<'a>(self, goals: &'a Self::InternedGoals) -> &'a [Goal] { + fn goals_data(self, goals: &Self::InternedGoals) -> &[Goal] { goals } @@ -300,10 +297,7 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn substitution_data<'a>( - self, - substitution: &'a Self::InternedSubstitution, - ) -> &'a [GenericArg] { + fn substitution_data(self, substitution: &Self::InternedSubstitution) -> &[GenericArg] { &substitution.as_ref().0 } @@ -314,10 +308,10 @@ impl chalk_ir::interner::Interner for Interner { data } - fn program_clause_data<'a>( + fn program_clause_data( self, - clause: &'a Self::InternedProgramClause, - ) -> &'a chalk_ir::ProgramClauseData { + clause: &Self::InternedProgramClause, + ) -> &chalk_ir::ProgramClauseData { clause } @@ -328,10 +322,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn program_clauses_data<'a>( + fn program_clauses_data( self, - clauses: &'a Self::InternedProgramClauses, - ) -> &'a [chalk_ir::ProgramClause] { + clauses: &Self::InternedProgramClauses, + ) -> &[chalk_ir::ProgramClause] { clauses } @@ -342,10 +336,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn quantified_where_clauses_data<'a>( + fn quantified_where_clauses_data( self, - clauses: &'a Self::InternedQuantifiedWhereClauses, - ) -> &'a [chalk_ir::QuantifiedWhereClause] { + clauses: &Self::InternedQuantifiedWhereClauses, + ) -> &[chalk_ir::QuantifiedWhereClause] { clauses } @@ -356,10 +350,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn variable_kinds_data<'a>( + fn variable_kinds_data( self, - parameter_kinds: &'a Self::InternedVariableKinds, - ) -> &'a [chalk_ir::VariableKind] { + parameter_kinds: &Self::InternedVariableKinds, + ) -> &[chalk_ir::VariableKind] { ¶meter_kinds.as_ref().0 } @@ -370,10 +364,10 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn canonical_var_kinds_data<'a>( + fn canonical_var_kinds_data( self, - canonical_var_kinds: &'a Self::InternedCanonicalVarKinds, - ) -> &'a [chalk_ir::CanonicalVarKind] { + canonical_var_kinds: &Self::InternedCanonicalVarKinds, + ) -> &[chalk_ir::CanonicalVarKind] { canonical_var_kinds } @@ -384,10 +378,10 @@ impl chalk_ir::interner::Interner for Interner { data.into_iter().collect() } - fn constraints_data<'a>( + fn constraints_data( self, - constraints: &'a Self::InternedConstraints, - ) -> &'a [chalk_ir::InEnvironment>] { + constraints: &Self::InternedConstraints, + ) -> &[chalk_ir::InEnvironment>] { constraints } fn debug_closure_id( @@ -410,10 +404,7 @@ impl chalk_ir::interner::Interner for Interner { Ok(Interned::new(InternedWrapper(data.into_iter().collect::>()?))) } - fn variances_data<'a>( - self, - variances: &'a Self::InternedVariances, - ) -> &'a [chalk_ir::Variance] { + fn variances_data(self, variances: &Self::InternedVariances) -> &[chalk_ir::Variance] { variances } } diff --git a/crates/hir-ty/src/lang_items.rs b/crates/hir-ty/src/lang_items.rs new file mode 100644 index 0000000000000..afc54e729f9c3 --- /dev/null +++ b/crates/hir-ty/src/lang_items.rs @@ -0,0 +1,20 @@ +//! Functions to detect special lang items + +use hir_def::{AdtId, HasModule}; +use hir_expand::name; + +use crate::db::HirDatabase; + +pub fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool { + let owned_box = name![owned_box].to_smol_str(); + let krate = adt.module(db.upcast()).krate(); + let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); + Some(adt) == box_adt +} + +pub fn is_unsafe_cell(adt: AdtId, db: &dyn HirDatabase) -> bool { + let owned_box = name![unsafe_cell].to_smol_str(); + let krate = adt.module(db.upcast()).krate(); + let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); + Some(adt) == box_adt +} diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs new file mode 100644 index 0000000000000..7a1cca3143ec8 --- /dev/null +++ b/crates/hir-ty/src/layout.rs @@ -0,0 +1,279 @@ +//! Compute the binary representation of a type + +use std::sync::Arc; + +use base_db::CrateId; +use chalk_ir::{AdtId, TyKind}; +use hir_def::{ + layout::{ + Abi, FieldsShape, Integer, Layout, LayoutCalculator, LayoutError, Primitive, ReprOptions, + RustcEnumVariantIdx, Scalar, Size, StructKind, TargetDataLayout, Variants, WrappingRange, + }, + LocalFieldId, +}; +use stdx::never; + +use crate::{db::HirDatabase, Interner, Substitution, Ty}; + +use self::adt::struct_variant_idx; +pub use self::{ + adt::{layout_of_adt_query, layout_of_adt_recover}, + target::target_data_layout_query, +}; + +macro_rules! user_error { + ($x: expr) => { + return Err(LayoutError::UserError(format!($x))) + }; +} + +mod adt; +mod target; + +struct LayoutCx<'a> { + db: &'a dyn HirDatabase, + krate: CrateId, +} + +impl LayoutCalculator for LayoutCx<'_> { + type TargetDataLayoutRef = Arc; + + fn delay_bug(&self, txt: &str) { + never!("{}", txt); + } + + fn current_data_layout(&self) -> Arc { + self.db.target_data_layout(self.krate) + } +} + +fn scalar_unit(dl: &TargetDataLayout, value: Primitive) -> Scalar { + Scalar::Initialized { value, valid_range: WrappingRange::full(value.size(dl)) } +} + +fn scalar(dl: &TargetDataLayout, value: Primitive) -> Layout { + Layout::scalar(dl, scalar_unit(dl, value)) +} + +pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result { + let cx = LayoutCx { db, krate }; + let dl = &*cx.current_data_layout(); + Ok(match ty.kind(Interner) { + TyKind::Adt(AdtId(def), subst) => db.layout_of_adt(*def, subst.clone())?, + TyKind::Scalar(s) => match s { + chalk_ir::Scalar::Bool => Layout::scalar( + dl, + Scalar::Initialized { + value: Primitive::Int(Integer::I8, false), + valid_range: WrappingRange { start: 0, end: 1 }, + }, + ), + chalk_ir::Scalar::Char => Layout::scalar( + dl, + Scalar::Initialized { + value: Primitive::Int(Integer::I32, false), + valid_range: WrappingRange { start: 0, end: 0x10FFFF }, + }, + ), + chalk_ir::Scalar::Int(i) => scalar( + dl, + Primitive::Int( + match i { + chalk_ir::IntTy::Isize => dl.ptr_sized_integer(), + chalk_ir::IntTy::I8 => Integer::I8, + chalk_ir::IntTy::I16 => Integer::I16, + chalk_ir::IntTy::I32 => Integer::I32, + chalk_ir::IntTy::I64 => Integer::I64, + chalk_ir::IntTy::I128 => Integer::I128, + }, + true, + ), + ), + chalk_ir::Scalar::Uint(i) => scalar( + dl, + Primitive::Int( + match i { + chalk_ir::UintTy::Usize => dl.ptr_sized_integer(), + chalk_ir::UintTy::U8 => Integer::I8, + chalk_ir::UintTy::U16 => Integer::I16, + chalk_ir::UintTy::U32 => Integer::I32, + chalk_ir::UintTy::U64 => Integer::I64, + chalk_ir::UintTy::U128 => Integer::I128, + }, + false, + ), + ), + chalk_ir::Scalar::Float(f) => scalar( + dl, + match f { + chalk_ir::FloatTy::F32 => Primitive::F32, + chalk_ir::FloatTy::F64 => Primitive::F64, + }, + ), + }, + TyKind::Tuple(len, tys) => { + let kind = if *len == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; + + let fields = tys + .iter(Interner) + .map(|k| layout_of_ty(db, k.assert_ty_ref(Interner), krate)) + .collect::, _>>()?; + let fields = fields.iter().collect::>(); + let fields = fields.iter().collect::>(); + cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)? + } + TyKind::Array(element, count) => { + let count = match count.data(Interner).value { + chalk_ir::ConstValue::Concrete(c) => match c.interned { + hir_def::type_ref::ConstScalar::Int(x) => x as u64, + hir_def::type_ref::ConstScalar::UInt(x) => x as u64, + hir_def::type_ref::ConstScalar::Unknown => { + user_error!("unknown const generic parameter") + } + _ => user_error!("mismatched type of const generic parameter"), + }, + _ => return Err(LayoutError::HasPlaceholder), + }; + let element = layout_of_ty(db, element, krate)?; + let size = element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow)?; + + let abi = if count != 0 && matches!(element.abi, Abi::Uninhabited) { + Abi::Uninhabited + } else { + Abi::Aggregate { sized: true } + }; + + let largest_niche = if count != 0 { element.largest_niche } else { None }; + + Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Array { stride: element.size, count }, + abi, + largest_niche, + align: element.align, + size, + } + } + TyKind::Slice(element) => { + let element = layout_of_ty(db, element, krate)?; + Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Array { stride: element.size, count: 0 }, + abi: Abi::Aggregate { sized: false }, + largest_niche: None, + align: element.align, + size: Size::ZERO, + } + } + // Potentially-wide pointers. + TyKind::Ref(_, _, pointee) | TyKind::Raw(_, pointee) => { + let mut data_ptr = scalar_unit(dl, Primitive::Pointer); + if matches!(ty.kind(Interner), TyKind::Ref(..)) { + data_ptr.valid_range_mut().start = 1; + } + + // let pointee = tcx.normalize_erasing_regions(param_env, pointee); + // if pointee.is_sized(tcx.at(DUMMY_SP), param_env) { + // return Ok(tcx.intern_layout(LayoutS::scalar(cx, data_ptr))); + // } + + let unsized_part = struct_tail_erasing_lifetimes(db, pointee.clone()); + let metadata = match unsized_part.kind(Interner) { + TyKind::Slice(_) | TyKind::Str => { + scalar_unit(dl, Primitive::Int(dl.ptr_sized_integer(), false)) + } + TyKind::Dyn(..) => { + let mut vtable = scalar_unit(dl, Primitive::Pointer); + vtable.valid_range_mut().start = 1; + vtable + } + _ => { + // pointee is sized + return Ok(Layout::scalar(dl, data_ptr)); + } + }; + + // Effectively a (ptr, meta) tuple. + cx.scalar_pair(data_ptr, metadata) + } + TyKind::FnDef(_, _) => layout_of_unit(&cx, dl)?, + TyKind::Str => Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 }, + abi: Abi::Aggregate { sized: false }, + largest_niche: None, + align: dl.i8_align, + size: Size::ZERO, + }, + TyKind::Never => Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields: FieldsShape::Primitive, + abi: Abi::Uninhabited, + largest_niche: None, + align: dl.i8_align, + size: Size::ZERO, + }, + TyKind::Dyn(_) | TyKind::Foreign(_) => { + let mut unit = layout_of_unit(&cx, dl)?; + match unit.abi { + Abi::Aggregate { ref mut sized } => *sized = false, + _ => user_error!("bug"), + } + unit + } + TyKind::Function(_) => { + let mut ptr = scalar_unit(dl, Primitive::Pointer); + ptr.valid_range_mut().start = 1; + Layout::scalar(dl, ptr) + } + TyKind::Closure(_, _) + | TyKind::OpaqueType(_, _) + | TyKind::Generator(_, _) + | TyKind::GeneratorWitness(_, _) => return Err(LayoutError::NotImplemented), + TyKind::AssociatedType(_, _) + | TyKind::Error + | TyKind::Alias(_) + | TyKind::Placeholder(_) + | TyKind::BoundVar(_) + | TyKind::InferenceVar(_, _) => return Err(LayoutError::HasPlaceholder), + }) +} + +fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result { + cx.univariant::( + dl, + &[], + &ReprOptions::default(), + StructKind::AlwaysSized, + ) + .ok_or(LayoutError::Unknown) +} + +fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { + match pointee.kind(Interner) { + TyKind::Adt(AdtId(adt), subst) => match adt { + &hir_def::AdtId::StructId(i) => { + let data = db.struct_data(i); + let mut it = data.variant_data.fields().iter().rev(); + match it.next() { + Some((f, _)) => field_ty(db, i.into(), f, subst), + None => pointee, + } + } + _ => pointee, + }, + _ => pointee, + } +} + +fn field_ty( + db: &dyn HirDatabase, + def: hir_def::VariantId, + fd: LocalFieldId, + subst: &Substitution, +) -> Ty { + db.field_types(def)[fd].clone().substitute(Interner, subst) +} + +#[cfg(test)] +mod tests; diff --git a/crates/hir-ty/src/layout/adt.rs b/crates/hir-ty/src/layout/adt.rs new file mode 100644 index 0000000000000..23166a5a5223a --- /dev/null +++ b/crates/hir-ty/src/layout/adt.rs @@ -0,0 +1,134 @@ +//! Compute the binary representation of structs, unions and enums + +use std::ops::Bound; + +use hir_def::{ + adt::VariantData, + layout::{Integer, IntegerExt, Layout, LayoutCalculator, LayoutError, RustcEnumVariantIdx}, + AdtId, EnumVariantId, HasModule, LocalEnumVariantId, VariantId, +}; +use la_arena::RawIdx; +use smallvec::SmallVec; + +use crate::{db::HirDatabase, lang_items::is_unsafe_cell, layout::field_ty, Substitution}; + +use super::{layout_of_ty, LayoutCx}; + +pub(crate) fn struct_variant_idx() -> RustcEnumVariantIdx { + RustcEnumVariantIdx(LocalEnumVariantId::from_raw(RawIdx::from(0))) +} + +pub fn layout_of_adt_query( + db: &dyn HirDatabase, + def: AdtId, + subst: Substitution, +) -> Result { + let cx = LayoutCx { db, krate: def.module(db.upcast()).krate() }; + let dl = cx.current_data_layout(); + let handle_variant = |def: VariantId, var: &VariantData| { + var.fields() + .iter() + .map(|(fd, _)| layout_of_ty(db, &field_ty(db, def, fd, &subst), cx.krate)) + .collect::, _>>() + }; + let (variants, is_enum, is_union, repr) = match def { + AdtId::StructId(s) => { + let data = db.struct_data(s); + let mut r = SmallVec::<[_; 1]>::new(); + r.push(handle_variant(s.into(), &data.variant_data)?); + (r, false, false, data.repr.unwrap_or_default()) + } + AdtId::UnionId(id) => { + let data = db.union_data(id); + let mut r = SmallVec::new(); + r.push(handle_variant(id.into(), &data.variant_data)?); + (r, false, true, data.repr.unwrap_or_default()) + } + AdtId::EnumId(e) => { + let data = db.enum_data(e); + let r = data + .variants + .iter() + .map(|(idx, v)| { + handle_variant( + EnumVariantId { parent: e, local_id: idx }.into(), + &v.variant_data, + ) + }) + .collect::, _>>()?; + (r, true, false, data.repr.unwrap_or_default()) + } + }; + let variants = + variants.iter().map(|x| x.iter().collect::>()).collect::>(); + let variants = variants.iter().map(|x| x.iter().collect()).collect(); + if is_union { + cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown) + } else { + cx.layout_of_struct_or_enum( + &repr, + &variants, + is_enum, + is_unsafe_cell(def, db), + layout_scalar_valid_range(db, def), + |min, max| Integer::repr_discr(&dl, &repr, min, max).unwrap_or((Integer::I8, false)), + variants.iter_enumerated().filter_map(|(id, _)| { + let AdtId::EnumId(e) = def else { return None }; + let d = match db + .const_eval_variant(EnumVariantId { parent: e, local_id: id.0 }) + .ok()? + { + crate::consteval::ComputedExpr::Literal(l) => match l { + hir_def::expr::Literal::Int(i, _) => i, + hir_def::expr::Literal::Uint(i, _) => i as i128, + _ => return None, + }, + _ => return None, + }; + Some((id, d)) + }), + // FIXME: The current code for niche-filling relies on variant indices + // instead of actual discriminants, so enums with + // explicit discriminants (RFC #2363) would misbehave and we should disable + // niche optimization for them. + // The code that do it in rustc: + // repr.inhibit_enum_layout_opt() || def + // .variants() + // .iter_enumerated() + // .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32())) + repr.inhibit_enum_layout_opt(), + !is_enum + && variants + .iter() + .next() + .and_then(|x| x.last().map(|x| x.is_unsized())) + .unwrap_or(true), + ) + .ok_or(LayoutError::SizeOverflow) + } +} + +fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound, Bound) { + let attrs = db.attrs(def.into()); + let get = |name| { + let attr = attrs.by_key(name).tt_values(); + for tree in attr { + if let Some(x) = tree.token_trees.first() { + if let Ok(x) = x.to_string().parse() { + return Bound::Included(x); + } + } + } + Bound::Unbounded + }; + (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end")) +} + +pub fn layout_of_adt_recover( + _: &dyn HirDatabase, + _: &[String], + _: &AdtId, + _: &Substitution, +) -> Result { + user_error!("infinite sized recursive type"); +} diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs new file mode 100644 index 0000000000000..37b831652f565 --- /dev/null +++ b/crates/hir-ty/src/layout/target.rs @@ -0,0 +1,36 @@ +//! Target dependent parameters needed for layouts + +use std::sync::Arc; + +use base_db::CrateId; +use hir_def::layout::{Endian, Size, TargetDataLayout}; + +use crate::db::HirDatabase; + +pub fn target_data_layout_query(db: &dyn HirDatabase, krate: CrateId) -> Arc { + let crate_graph = db.crate_graph(); + let target_layout = &crate_graph[krate].target_layout; + let cfg_options = &crate_graph[krate].cfg_options; + Arc::new( + target_layout + .as_ref() + .and_then(|it| TargetDataLayout::parse_from_llvm_datalayout_string(it).ok()) + .unwrap_or_else(|| { + let endian = match cfg_options.get_cfg_values("target_endian").next() { + Some(x) if x.as_str() == "big" => Endian::Big, + _ => Endian::Little, + }; + let pointer_size = Size::from_bytes( + match cfg_options.get_cfg_values("target_pointer_width").next() { + Some(x) => match x.as_str() { + "16" => 2, + "32" => 4, + _ => 8, + }, + _ => 8, + }, + ); + TargetDataLayout { endian, pointer_size, ..TargetDataLayout::default() } + }), + ) +} diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs new file mode 100644 index 0000000000000..53838cf41d274 --- /dev/null +++ b/crates/hir-ty/src/layout/tests.rs @@ -0,0 +1,208 @@ +use base_db::fixture::WithFixture; +use chalk_ir::{AdtId, TyKind}; +use hir_def::{ + db::DefDatabase, + layout::{Layout, LayoutError}, +}; + +use crate::{test_db::TestDB, Interner, Substitution}; + +use super::layout_of_ty; + +fn eval_goal(ra_fixture: &str, minicore: &str) -> Result { + // using unstable cargo features failed, fall back to using plain rustc + let mut cmd = std::process::Command::new("rustc"); + cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1"); + let output = cmd.output().unwrap(); + assert!(output.status.success(), "{}", output.status); + let stdout = String::from_utf8(output.stdout).unwrap(); + let target_data_layout = + stdout.split_once(r#""data-layout": ""#).unwrap().1.split_once('"').unwrap().0.to_owned(); + + let ra_fixture = format!( + "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", + ); + + let (db, file_id) = TestDB::with_single_file(&ra_fixture); + let module_id = db.module_for_file(file_id); + let def_map = module_id.def_map(&db); + let scope = &def_map[module_id.local_id].scope; + let adt_id = scope + .declarations() + .find_map(|x| match x { + hir_def::ModuleDefId::AdtId(x) => { + let name = match x { + hir_def::AdtId::StructId(x) => db.struct_data(x).name.to_smol_str(), + hir_def::AdtId::UnionId(x) => db.union_data(x).name.to_smol_str(), + hir_def::AdtId::EnumId(x) => db.enum_data(x).name.to_smol_str(), + }; + (name == "Goal").then_some(x) + } + _ => None, + }) + .unwrap(); + let goal_ty = TyKind::Adt(AdtId(adt_id), Substitution::empty(Interner)).intern(Interner); + layout_of_ty(&db, &goal_ty, module_id.krate()) +} + +#[track_caller] +fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) { + let l = eval_goal(ra_fixture, minicore).unwrap(); + assert_eq!(l.size.bytes(), size); + assert_eq!(l.align.abi.bytes(), align); +} + +#[track_caller] +fn check_fail(ra_fixture: &str, e: LayoutError) { + let r = eval_goal(ra_fixture, ""); + assert_eq!(r, Err(e)); +} + +macro_rules! size_and_align { + (minicore: $($x:tt),*;$($t:tt)*) => { + { + #[allow(dead_code)] + $($t)* + check_size_and_align( + stringify!($($t)*), + &format!("//- minicore: {}\n", stringify!($($x),*)), + ::std::mem::size_of::() as u64, + ::std::mem::align_of::() as u64, + ); + } + }; + ($($t:tt)*) => { + { + #[allow(dead_code)] + $($t)* + check_size_and_align( + stringify!($($t)*), + "", + ::std::mem::size_of::() as u64, + ::std::mem::align_of::() as u64, + ); + } + }; +} + +#[test] +fn hello_world() { + size_and_align! { + struct Goal(i32); + } +} + +#[test] +fn field_order_optimization() { + size_and_align! { + struct Goal(u8, i32, u8); + } + size_and_align! { + #[repr(C)] + struct Goal(u8, i32, u8); + } +} + +#[test] +fn recursive() { + size_and_align! { + struct Goal { + left: &'static Goal, + right: &'static Goal, + } + } + size_and_align! { + struct BoxLike(*mut T); + struct Goal(BoxLike); + } + check_fail( + r#"struct Goal(Goal);"#, + LayoutError::UserError("infinite sized recursive type".to_string()), + ); + check_fail( + r#" + struct Foo(Foo); + struct Goal(Foo); + "#, + LayoutError::UserError("infinite sized recursive type".to_string()), + ); +} + +#[test] +fn generic() { + size_and_align! { + struct Pair(A, B); + struct Goal(Pair, i64>); + } + size_and_align! { + struct X { + field1: [i32; N], + field2: [u8; N], + } + struct Goal(X<1000>); + } +} + +#[test] +fn enums() { + size_and_align! { + enum Goal { + Quit, + Move { x: i32, y: i32 }, + ChangeColor(i32, i32, i32), + } + } +} + +#[test] +fn primitives() { + size_and_align! { + struct Goal(i32, i128, isize, usize, f32, f64, bool, char); + } +} + +#[test] +fn tuple() { + size_and_align! { + struct Goal((), (i32, u64, bool)); + } +} + +#[test] +fn non_zero() { + size_and_align! { + minicore: non_zero, option; + use core::num::NonZeroU8; + struct Goal(Option); + } +} + +#[test] +fn niche_optimization() { + size_and_align! { + minicore: option; + struct Goal(Option<&'static i32>); + } + size_and_align! { + minicore: option; + struct Goal(Option>); + } +} + +#[test] +fn enums_with_discriminants() { + size_and_align! { + enum Goal { + A = 1000, + B = 2000, + C = 3000, + } + } + size_and_align! { + enum Goal { + A = 254, + B, + C, // implicitly becomes 256, so we need two bytes + } + } +} diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 39514fc44e6c8..cbe6873c7d5f5 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -27,6 +27,8 @@ pub mod display; pub mod method_resolution; pub mod primitive; pub mod traits; +pub mod layout; +pub mod lang_items; #[cfg(test)] mod tests; @@ -38,7 +40,7 @@ use std::sync::Arc; use chalk_ir::{ fold::{Shift, TypeFoldable}, interner::HasInterner, - NoSolution, UniverseIndex, + NoSolution, }; use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId}; use hir_expand::name; @@ -46,7 +48,9 @@ use itertools::Either; use traits::FnTrait; use utils::Generics; -use crate::{consteval::unknown_const, db::HirDatabase, utils::generics}; +use crate::{ + consteval::unknown_const, db::HirDatabase, infer::unify::InferenceTable, utils::generics, +}; pub use autoderef::autoderef; pub use builder::{ParamKind, TyBuilder}; @@ -511,7 +515,7 @@ where let mut error_replacer = ErrorReplacer { vars: 0 }; let value = match t.clone().try_fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) { Ok(t) => t, - Err(_) => panic!("Encountered unbound or inference vars in {:?}", t), + Err(_) => panic!("Encountered unbound or inference vars in {t:?}"), }; let kinds = (0..error_replacer.vars).map(|_| { chalk_ir::CanonicalVarKind::new( @@ -531,54 +535,31 @@ pub fn callable_sig_from_fnonce( let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?; let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + let mut table = InferenceTable::new(db, env.clone()); let b = TyBuilder::trait_ref(db, fn_once_trait); if b.remaining() != 2 { return None; } - let fn_once = b.push(self_ty.clone()).fill_with_bound_vars(DebruijnIndex::INNERMOST, 0).build(); - let kinds = fn_once - .substitution - .iter(Interner) - .skip(1) - .map(|x| { - let vk = match x.data(Interner) { - chalk_ir::GenericArgData::Ty(_) => { - chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) - } - chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime, - chalk_ir::GenericArgData::Const(c) => { - chalk_ir::VariableKind::Const(c.data(Interner).ty.clone()) - } - }; - chalk_ir::WithKind::new(vk, UniverseIndex::ROOT) - }) - .collect::>(); - - // FIXME: chalk refuses to solve `>::Output == ^0.1`, so we first solve - // `>` and then replace `^0.0` with the concrete argument tuple. - let trait_env = env.env.clone(); - let obligation = InEnvironment { goal: fn_once.cast(Interner), environment: trait_env }; - let canonical = - Canonical { binders: CanonicalVarKinds::from_iter(Interner, kinds), value: obligation }; - let subst = match db.trait_solve(krate, canonical) { - Some(Solution::Unique(vars)) => vars.value.subst, - _ => return None, - }; - let args = subst.at(Interner, 0).ty(Interner)?; - let params = match args.kind(Interner) { - chalk_ir::TyKind::Tuple(_, subst) => { - subst.iter(Interner).filter_map(|arg| arg.ty(Interner).cloned()).collect::>() - } - _ => return None, - }; - let fn_once = - TyBuilder::trait_ref(db, fn_once_trait).push(self_ty.clone()).push(args.clone()).build(); - let projection = - TyBuilder::assoc_type_projection(db, output_assoc_type, Some(fn_once.substitution.clone())) - .build(); + // Register two obligations: + // - Self: FnOnce + // - >::Output == ?ret_ty + let args_ty = table.new_type_var(); + let trait_ref = b.push(self_ty.clone()).push(args_ty.clone()).build(); + let projection = TyBuilder::assoc_type_projection( + db, + output_assoc_type, + Some(trait_ref.substitution.clone()), + ) + .build(); + table.register_obligation(trait_ref.cast(Interner)); + let ret_ty = table.normalize_projection_ty(projection); + + let ret_ty = table.resolve_completely(ret_ty); + let args_ty = table.resolve_completely(args_ty); - let ret_ty = db.normalize_projection(projection, env); + let params = + args_ty.as_tuple()?.iter(Interner).map(|it| it.assert_ty_ref(Interner)).cloned().collect(); - Some(CallableSig::from_params_and_return(params, ret_ty.clone(), false, Safety::Safe)) + Some(CallableSig::from_params_and_return(params, ret_ty, false, Safety::Safe)) } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index baf9842d5fbf2..592410008a679 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -603,9 +603,8 @@ impl<'a> TyLoweringContext<'a> { } fn select_associated_type(&self, res: Option, segment: PathSegment<'_>) -> Ty { - let (def, res) = match (self.resolver.generic_def(), res) { - (Some(def), Some(res)) => (def, res), - _ => return TyKind::Error.intern(Interner), + let Some((def, res)) = self.resolver.generic_def().zip(res) else { + return TyKind::Error.intern(Interner); }; let ty = named_associated_type_shorthand_candidates( self.db, @@ -617,6 +616,21 @@ impl<'a> TyLoweringContext<'a> { return None; } + let parent_subst = t.substitution.clone(); + let parent_subst = match self.type_param_mode { + ParamLoweringMode::Placeholder => { + // if we're lowering to placeholders, we have to put them in now. + let generics = generics(self.db.upcast(), def); + let s = generics.placeholder_subst(self.db); + s.apply(parent_subst, Interner) + } + ParamLoweringMode::Variable => { + // We need to shift in the bound vars, since + // `named_associated_type_shorthand_candidates` does not do that. + parent_subst.shifted_in_from(Interner, self.in_binders) + } + }; + // FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent // generic params. It's inefficient to splice the `Substitution`s, so we may want // that method to optionally take parent `Substitution` as we already know them at @@ -632,22 +646,9 @@ impl<'a> TyLoweringContext<'a> { let substs = Substitution::from_iter( Interner, - substs.iter(Interner).take(len_self).chain(t.substitution.iter(Interner)), + substs.iter(Interner).take(len_self).chain(parent_subst.iter(Interner)), ); - let substs = match self.type_param_mode { - ParamLoweringMode::Placeholder => { - // if we're lowering to placeholders, we have to put - // them in now - let generics = generics(self.db.upcast(), def); - let s = generics.placeholder_subst(self.db); - s.apply(substs, Interner) - } - ParamLoweringMode::Variable => substs, - }; - // We need to shift in the bound vars, since - // associated_type_shorthand_candidates does not do that - let substs = substs.shifted_in_from(Interner, self.in_binders); Some( TyKind::Alias(AliasTy::Projection(ProjectionTy { associated_ty_id: to_assoc_type_id(associated_ty), @@ -779,7 +780,7 @@ impl<'a> TyLoweringContext<'a> { |_, c, ty| { const_or_path_to_chalk( self.db, - &self.resolver, + self.resolver, ty, c, self.type_param_mode, @@ -1190,9 +1191,9 @@ pub fn associated_type_shorthand_candidates( db: &dyn HirDatabase, def: GenericDefId, res: TypeNs, - cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, + mut cb: impl FnMut(&Name, TypeAliasId) -> Option, ) -> Option { - named_associated_type_shorthand_candidates(db, def, res, None, cb) + named_associated_type_shorthand_candidates(db, def, res, None, |name, _, id| cb(name, id)) } fn named_associated_type_shorthand_candidates( @@ -1202,6 +1203,9 @@ fn named_associated_type_shorthand_candidates( def: GenericDefId, res: TypeNs, assoc_name: Option, + // Do NOT let `cb` touch `TraitRef` outside of `TyLoweringContext`. Its substitution contains + // free `BoundVar`s that need to be shifted and only `TyLoweringContext` knows how to do that + // properly (see `TyLoweringContext::select_associated_type()`). mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, ) -> Option { let mut search = |t| { @@ -1792,8 +1796,7 @@ pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binde let impl_data = db.impl_data(impl_id); let resolver = impl_id.resolver(db.upcast()); let _cx = stdx::panic_context::enter(format!( - "impl_self_ty_query({:?} -> {:?} -> {:?})", - impl_id, impl_loc, impl_data + "impl_self_ty_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})" )); let generics = generics(db.upcast(), impl_id.into()); let ctx = @@ -1830,8 +1833,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option< let impl_data = db.impl_data(impl_id); let resolver = impl_id.resolver(db.upcast()); let _cx = stdx::panic_context::enter(format!( - "impl_trait_query({:?} -> {:?} -> {:?})", - impl_id, impl_loc, impl_data + "impl_trait_query({impl_id:?} -> {impl_loc:?} -> {impl_data:?})" )); let ctx = TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable); @@ -1850,7 +1852,7 @@ pub(crate) fn return_type_impl_traits( let ctx_ret = TyLoweringContext::new(db, &resolver) .with_impl_trait_mode(ImplTraitLoweringMode::Opaque) .with_type_param_mode(ParamLoweringMode::Variable); - let _ret = (&ctx_ret).lower_ty(&data.ret_type); + let _ret = ctx_ret.lower_ty(&data.ret_type); let generics = generics(db.upcast(), def.into()); let return_type_impl_traits = ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() }; @@ -1979,7 +1981,7 @@ fn fallback_bound_vars + HasInterner ArrayVec { +) -> SmallVec<[CrateId; 2]> { let _p = profile::span("inherent_impl_crates_query"); - let mut res = ArrayVec::new(); + let mut res = SmallVec::new(); let crate_graph = db.crate_graph(); + // should pass crate for finger print and do reverse deps + for krate in crate_graph.transitive_deps(krate) { - if res.is_full() { - // we don't currently look for or store more than two crates here, - // so don't needlessly look at more crates than necessary. - break; - } let impls = db.inherent_impls_in_crate(krate); if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) { res.push(krate); @@ -392,19 +388,40 @@ pub fn def_crates( db: &dyn HirDatabase, ty: &Ty, cur_crate: CrateId, -) -> Option> { - let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect()); - - let fp = TyFingerprint::for_inherent_impl(ty); - +) -> Option> { match ty.kind(Interner) { - TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())), - TyKind::Foreign(id) => { - mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast())) + &TyKind::Adt(AdtId(def_id), _) => { + let rustc_has_incoherent_inherent_impls = match def_id { + hir_def::AdtId::StructId(id) => { + db.struct_data(id).rustc_has_incoherent_inherent_impls + } + hir_def::AdtId::UnionId(id) => { + db.union_data(id).rustc_has_incoherent_inherent_impls + } + hir_def::AdtId::EnumId(id) => db.enum_data(id).rustc_has_incoherent_inherent_impls, + }; + Some(if rustc_has_incoherent_inherent_impls { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Adt(def_id)) + } else { + smallvec![def_id.module(db.upcast()).krate()] + }) + } + &TyKind::Foreign(id) => { + let alias = from_foreign_def_id(id); + Some(if db.type_alias_data(alias).rustc_has_incoherent_inherent_impls { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::ForeignType(id)) + } else { + smallvec![alias.module(db.upcast()).krate()] + }) + } + TyKind::Dyn(_) => { + let trait_id = ty.dyn_trait()?; + Some(if db.trait_data(trait_id).rustc_has_incoherent_inherent_impls { + db.incoherent_inherent_impl_crates(cur_crate, TyFingerprint::Dyn(trait_id)) + } else { + smallvec![trait_id.module(db.upcast()).krate()] + }) } - TyKind::Dyn(_) => ty - .dyn_trait() - .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))), // for primitives, there may be impls in various places (core and alloc // mostly). We just check the whole crate graph for crates with impls // (cached behind a query). @@ -412,10 +429,11 @@ pub fn def_crates( | TyKind::Str | TyKind::Slice(_) | TyKind::Array(..) - | TyKind::Raw(..) => { - Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive"))) - } - _ => return None, + | TyKind::Raw(..) => Some(db.incoherent_inherent_impl_crates( + cur_crate, + TyFingerprint::for_inherent_impl(ty).expect("fingerprint for primitive"), + )), + _ => None, } } @@ -470,14 +488,15 @@ pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> /// Look up the method with the given name. pub(crate) fn lookup_method( - ty: &Canonical, db: &dyn HirDatabase, + ty: &Canonical, env: Arc, traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: &Name, -) -> Option<(ReceiverAdjustments, FunctionId)> { - iterate_method_candidates( +) -> Option<(ReceiverAdjustments, FunctionId, bool)> { + let mut not_visible = None; + let res = iterate_method_candidates( ty, db, env, @@ -485,11 +504,16 @@ pub(crate) fn lookup_method( visible_from_module, Some(name), LookupMode::MethodCall, - |adjustments, f| match f { - AssocItemId::FunctionId(f) => Some((adjustments, f)), + |adjustments, f, visible| match f { + AssocItemId::FunctionId(f) if visible => Some((adjustments, f, true)), + AssocItemId::FunctionId(f) if not_visible.is_none() => { + not_visible = Some((adjustments, f, false)); + None + } _ => None, }, - ) + ); + res.or(not_visible) } /// Whether we're looking up a dotted method call (like `v.len()`) or a path @@ -601,7 +625,7 @@ pub(crate) fn iterate_method_candidates( visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option, + mut callback: impl FnMut(ReceiverAdjustments, AssocItemId, bool) -> Option, ) -> Option { let mut slot = None; iterate_method_candidates_dyn( @@ -612,9 +636,9 @@ pub(crate) fn iterate_method_candidates( visible_from_module, name, mode, - &mut |adj, item| { + &mut |adj, item, visible| { assert!(slot.is_none()); - if let Some(it) = callback(adj, item) { + if let Some(it) = callback(adj, item, visible) { slot = Some(it); return ControlFlow::Break(()); } @@ -624,6 +648,30 @@ pub(crate) fn iterate_method_candidates( slot } +pub fn lookup_impl_const( + db: &dyn HirDatabase, + env: Arc, + const_id: ConstId, + subs: Substitution, +) -> ConstId { + let trait_id = match const_id.lookup(db.upcast()).container { + ItemContainerId::TraitId(id) => id, + _ => return const_id, + }; + let substitution = Substitution::from_iter(Interner, subs.iter(Interner)); + let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution }; + + let const_data = db.const_data(const_id); + let name = match const_data.name.as_ref() { + Some(name) => name, + None => return const_id, + }; + + lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name) + .and_then(|assoc| if let AssocItemId::ConstId(id) = assoc { Some(id) } else { None }) + .unwrap_or(const_id) +} + /// Looks up the impl method that actually runs for the trait method `func`. /// /// Returns `func` if it's not a method defined in a trait or the lookup failed. @@ -645,15 +693,17 @@ pub fn lookup_impl_method( }; let name = &db.function_data(func).name; - lookup_impl_method_for_trait_ref(trait_ref, db, env, name).unwrap_or(func) + lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name) + .and_then(|assoc| if let AssocItemId::FunctionId(id) = assoc { Some(id) } else { None }) + .unwrap_or(func) } -fn lookup_impl_method_for_trait_ref( +fn lookup_impl_assoc_item_for_trait_ref( trait_ref: TraitRef, db: &dyn HirDatabase, env: Arc, name: &Name, -) -> Option { +) -> Option { let self_ty = trait_ref.self_type_parameter(Interner); let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?; let impls = db.trait_impls_in_deps(env.krate); @@ -663,7 +713,15 @@ fn lookup_impl_method_for_trait_ref( let impl_data = find_matching_impl(impls, table, trait_ref)?; impl_data.items.iter().find_map(|it| match it { - AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f), + AssocItemId::FunctionId(f) => { + (db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f)) + } + AssocItemId::ConstId(c) => db + .const_data(*c) + .name + .as_ref() + .map(|n| *n == *name) + .and_then(|result| if result { Some(AssocItemId::ConstId(*c)) } else { None }), _ => None, }) } @@ -719,7 +777,7 @@ pub fn iterate_path_candidates( name, LookupMode::Path, // the adjustments are not relevant for path lookup - &mut |_, id| callback(id), + &mut |_, id, _| callback(id), ) } @@ -731,7 +789,7 @@ pub fn iterate_method_candidates_dyn( visible_from_module: VisibleFromModule, name: Option<&Name>, mode: LookupMode, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { match mode { LookupMode::MethodCall => { @@ -795,7 +853,7 @@ fn iterate_method_candidates_with_autoref( traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) { // don't try to resolve methods on unknown types @@ -856,7 +914,7 @@ fn iterate_method_candidates_by_receiver( traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let mut table = InferenceTable::new(db, env); let receiver_ty = table.instantiate_canonical(receiver_ty.clone()); @@ -868,7 +926,7 @@ fn iterate_method_candidates_by_receiver( while let Some((self_ty, _)) = autoderef.next() { iterate_inherent_methods( &self_ty, - &mut autoderef.table, + autoderef.table, name, Some(&receiver_ty), Some(receiver_adjustments.clone()), @@ -883,7 +941,7 @@ fn iterate_method_candidates_by_receiver( while let Some((self_ty, _)) = autoderef.next() { iterate_trait_method_candidates( &self_ty, - &mut autoderef.table, + autoderef.table, traits_in_scope, name, Some(&receiver_ty), @@ -902,7 +960,7 @@ fn iterate_method_candidates_for_self_ty( traits_in_scope: &FxHashSet, visible_from_module: VisibleFromModule, name: Option<&Name>, - mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let mut table = InferenceTable::new(db, env); let self_ty = table.instantiate_canonical(self_ty.clone()); @@ -933,7 +991,7 @@ fn iterate_trait_method_candidates( name: Option<&Name>, receiver_ty: Option<&Ty>, receiver_adjustments: Option, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; let env = table.trait_env.clone(); @@ -964,9 +1022,11 @@ fn iterate_trait_method_candidates( for &(_, item) in data.items.iter() { // Don't pass a `visible_from_module` down to `is_valid_candidate`, // since only inherent methods should be included into visibility checking. - if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { - continue; - } + let visible = match is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; if !known_implemented { let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty); if db.trait_solve(env.krate, goal.cast(Interner)).is_none() { @@ -974,7 +1034,7 @@ fn iterate_trait_method_candidates( } } known_implemented = true; - callback(receiver_adjustments.clone().unwrap_or_default(), item)?; + callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; } } ControlFlow::Continue(()) @@ -987,7 +1047,7 @@ fn iterate_inherent_methods( receiver_ty: Option<&Ty>, receiver_adjustments: Option, visible_from_module: VisibleFromModule, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; let env = table.trait_env.clone(); @@ -1076,7 +1136,7 @@ fn iterate_inherent_methods( name: Option<&Name>, receiver_ty: Option<&Ty>, receiver_adjustments: Option, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, traits: impl Iterator, ) -> ControlFlow<()> { let db = table.db; @@ -1084,9 +1144,13 @@ fn iterate_inherent_methods( let data = db.trait_data(t); for &(_, item) in data.items.iter() { // We don't pass `visible_from_module` as all trait items should be visible. - if is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { - callback(receiver_adjustments.clone().unwrap_or_default(), item)?; - } + let visible = + match is_valid_candidate(table, name, receiver_ty, item, self_ty, None) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; + callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; } } ControlFlow::Continue(()) @@ -1100,17 +1164,25 @@ fn iterate_inherent_methods( receiver_ty: Option<&Ty>, receiver_adjustments: Option, visible_from_module: Option, - callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>, + callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId, bool) -> ControlFlow<()>, ) -> ControlFlow<()> { let db = table.db; let impls_for_self_ty = impls.for_self_ty(self_ty); for &impl_def in impls_for_self_ty { for &item in &db.impl_data(impl_def).items { - if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module) - { - continue; - } - callback(receiver_adjustments.clone().unwrap_or_default(), item)?; + let visible = match is_valid_candidate( + table, + name, + receiver_ty, + item, + self_ty, + visible_from_module, + ) { + IsValidCandidate::Yes => true, + IsValidCandidate::NotVisible => false, + IsValidCandidate::No => continue, + }; + callback(receiver_adjustments.clone().unwrap_or_default(), item, visible)?; } } ControlFlow::Continue(()) @@ -1139,7 +1211,7 @@ pub fn resolve_indexing_op( macro_rules! check_that { ($cond:expr) => { if !$cond { - return false; + return IsValidCandidate::No; } }; } @@ -1151,7 +1223,7 @@ fn is_valid_candidate( item: AssocItemId, self_ty: &Ty, visible_from_module: Option, -) -> bool { +) -> IsValidCandidate { let db = table.db; match item { AssocItemId::FunctionId(m) => { @@ -1162,31 +1234,37 @@ fn is_valid_candidate( check_that!(receiver_ty.is_none()); check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n))); - check_that!(visible_from_module.map_or(true, |from_module| { - let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module); - if !v { + + if let Some(from_module) = visible_from_module { + if !db.const_visibility(c).is_visible_from(db.upcast(), from_module) { cov_mark::hit!(const_candidate_not_visible); + return IsValidCandidate::NotVisible; } - v - })); + } if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container { let self_ty_matches = table.run_in_snapshot(|table| { let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id) .fill_with_inference_vars(table) .build(); - table.unify(&expected_self_ty, &self_ty) + table.unify(&expected_self_ty, self_ty) }); if !self_ty_matches { cov_mark::hit!(const_candidate_self_type_mismatch); - return false; + return IsValidCandidate::No; } } - true + IsValidCandidate::Yes } - _ => false, + _ => IsValidCandidate::No, } } +enum IsValidCandidate { + Yes, + No, + NotVisible, +} + fn is_valid_fn_candidate( table: &mut InferenceTable<'_>, fn_id: FunctionId, @@ -1194,19 +1272,17 @@ fn is_valid_fn_candidate( receiver_ty: Option<&Ty>, self_ty: &Ty, visible_from_module: Option, -) -> bool { +) -> IsValidCandidate { let db = table.db; let data = db.function_data(fn_id); check_that!(name.map_or(true, |n| n == &data.name)); - check_that!(visible_from_module.map_or(true, |from_module| { - let v = db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module); - if !v { + if let Some(from_module) = visible_from_module { + if !db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module) { cov_mark::hit!(autoderef_candidate_not_visible); + return IsValidCandidate::NotVisible; } - v - })); - + } table.run_in_snapshot(|table| { let container = fn_id.lookup(db.upcast()).container; let (impl_subst, expect_self_ty) = match container { @@ -1245,7 +1321,7 @@ fn is_valid_fn_candidate( // We need to consider the bounds on the impl to distinguish functions of the same name // for a type. let predicates = db.generic_predicates(impl_id.into()); - predicates + let valid = predicates .iter() .map(|predicate| { let (p, b) = predicate @@ -1260,12 +1336,16 @@ fn is_valid_fn_candidate( // It's ok to get ambiguity here, as we may not have enough information to prove // obligations. We'll check if the user is calling the selected method properly // later anyway. - .all(|p| table.try_obligation(p.cast(Interner)).is_some()) + .all(|p| table.try_obligation(p.cast(Interner)).is_some()); + match valid { + true => IsValidCandidate::Yes, + false => IsValidCandidate::No, + } } else { // For `ItemContainerId::TraitId`, we check if `self_ty` implements the trait in // `iterate_trait_method_candidates()`. // For others, this function shouldn't be called. - true + IsValidCandidate::Yes } }) } diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs index ebbc5410147c6..ba5d9c2412670 100644 --- a/crates/hir-ty/src/tests.rs +++ b/crates/hir-ty/src/tests.rs @@ -94,18 +94,19 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour types.insert(file_range, expected.trim_start_matches("type: ").to_string()); } else if expected.starts_with("expected") { mismatches.insert(file_range, expected); - } else if expected.starts_with("adjustments: ") { + } else if expected.starts_with("adjustments:") { adjustments.insert( file_range, expected - .trim_start_matches("adjustments: ") + .trim_start_matches("adjustments:") + .trim() .split(',') .map(|it| it.trim().to_string()) .filter(|it| !it.is_empty()) .collect(), ); } else { - panic!("unexpected annotation: {}", expected); + panic!("unexpected annotation: {expected}"); } had_annotations = true; } @@ -176,17 +177,17 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour assert_eq!(actual, expected); } if let Some(expected) = adjustments.remove(&range) { - if let Some(adjustments) = inference_result.expr_adjustments.get(&expr) { - assert_eq!( - expected, - adjustments - .iter() - .map(|Adjustment { kind, .. }| format!("{:?}", kind)) - .collect::>() - ); - } else { - panic!("expected {:?} adjustments, found none", expected); - } + let adjustments = inference_result + .expr_adjustments + .get(&expr) + .map_or_else(Default::default, |it| &**it); + assert_eq!( + expected, + adjustments + .iter() + .map(|Adjustment { kind, .. }| format!("{kind:?}")) + .collect::>() + ); } } diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index 7e3aecc2ae0ae..3e110abaf4b18 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -807,3 +807,37 @@ fn main() { "#, ); } + +#[test] +fn adjust_comparison_arguments() { + check_no_mismatches( + r" +//- minicore: eq +struct Struct; +impl core::cmp::PartialEq for Struct { + fn eq(&self, other: &Self) -> bool { true } +} +fn test() { + Struct == Struct; + // ^^^^^^ adjustments: Borrow(Ref(Not)) + // ^^^^^^ adjustments: Borrow(Ref(Not)) +}", + ); +} + +#[test] +fn adjust_assign_lhs() { + check_no_mismatches( + r" +//- minicore: add +struct Struct; +impl core::ops::AddAssign for Struct { + fn add_assign(&mut self, other: Self) {} +} +fn test() { + Struct += Struct; + // ^^^^^^ adjustments: Borrow(Ref(Mut)) + // ^^^^^^ adjustments: +}", + ); +} diff --git a/crates/hir-ty/src/tests/incremental.rs b/crates/hir-ty/src/tests/incremental.rs index 3e08e83e89a3e..073d6d9be2b9f 100644 --- a/crates/hir-ty/src/tests/incremental.rs +++ b/crates/hir-ty/src/tests/incremental.rs @@ -24,7 +24,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { db.infer(def); }); }); - assert!(format!("{:?}", events).contains("infer")) + assert!(format!("{events:?}").contains("infer")) } let new_text = " @@ -46,6 +46,6 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { db.infer(def); }); }); - assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) + assert!(!format!("{events:?}").contains("infer"), "{events:#?}") } } diff --git a/crates/hir-ty/src/tests/macros.rs b/crates/hir-ty/src/tests/macros.rs index b3adafaafd38d..8b75ec842a4f6 100644 --- a/crates/hir-ty/src/tests/macros.rs +++ b/crates/hir-ty/src/tests/macros.rs @@ -849,7 +849,7 @@ fn main() { //^^^^^^^^^^^^^^^^^ RegisterBlock } "#; - let fixture = format!("{}\n//- /foo.rs\n{}", fixture, data); + let fixture = format!("{fixture}\n//- /foo.rs\n{data}"); { let _b = bench("include macro"); diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index 5d76d185ffc04..6c7a5329970d7 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -1867,3 +1867,53 @@ fn g(a: T) { "#, ); } + +#[test] +fn incoherent_impls() { + check( + r#" +//- minicore: error, send +pub struct Box(T); +use core::error::Error; + +#[rustc_allow_incoherent_impl] +impl dyn Error { + pub fn downcast(self: Box) -> Result, Box> { + loop {} + } +} +#[rustc_allow_incoherent_impl] +impl dyn Error + Send { + /// Attempts to downcast the box to a concrete type. + pub fn downcast(self: Box) -> Result, Box> { + let err: Box = self; + // ^^^^ expected Box, got Box + // FIXME, type mismatch should not occur + ::downcast(err).map_err(|_| loop {}) + //^^^^^^^^^^^^^^^^^^^^^ type: fn downcast<{unknown}>(Box) -> Result, Box> + } +} +"#, + ); +} + +#[test] +fn fallback_private_methods() { + check( + r#" +mod module { + pub struct Struct; + + impl Struct { + fn func(&self) {} + } +} + +fn foo() { + let s = module::Struct; + s.func(); + //^^^^^^^^ type: () +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index 74de33117ee7d..9333e2693522b 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1080,3 +1080,15 @@ fn my_fn(#[cfg(feature = "feature")] u8: u8, u32: u32) {} "#, ); } + +#[test] +fn var_args() { + check_types( + r#" +#[lang = "va_list"] +pub struct VaListImpl<'f>; +fn my_fn(foo: ...) {} + //^^^ VaListImpl +"#, + ); +} diff --git a/crates/hir-ty/src/tests/regression.rs b/crates/hir-ty/src/tests/regression.rs index 4e46397459d5d..de6ae7fff8fbd 100644 --- a/crates/hir-ty/src/tests/regression.rs +++ b/crates/hir-ty/src/tests/regression.rs @@ -1723,3 +1723,24 @@ fn bar() -> ControlFlow<(), ()> { "#, ); } + +#[test] +fn assoc_type_shorthand_with_gats_in_binders() { + // c.f. test `issue_4885()` + check_no_mismatches( + r#" +trait Gats { + type Assoc; +} +trait Foo {} + +struct Bar<'a, B: Gats, A> { + field: &'a dyn Foo>, +} + +fn foo(b: Bar) { + let _ = b.field; +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index d7431443b83d5..146145523b242 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -2064,17 +2064,17 @@ fn fn_pointer_return() { fn block_modifiers_smoke_test() { check_infer( r#" -//- minicore: future +//- minicore: future, try async fn main() { let x = unsafe { 92 }; let y = async { async { () }.await }; - let z = try { () }; + let z: core::ops::ControlFlow<(), _> = try { () }; let w = const { 92 }; let t = 'a: { 92 }; } "#, expect![[r#" - 16..162 '{ ...2 }; }': () + 16..193 '{ ...2 }; }': () 26..27 'x': i32 30..43 'unsafe { 92 }': i32 30..43 'unsafe { 92 }': i32 @@ -2086,17 +2086,17 @@ async fn main() { 65..77 'async { () }': impl Future 65..83 'async ....await': () 73..75 '()': () - 95..96 'z': {unknown} - 99..109 'try { () }': () - 99..109 'try { () }': {unknown} - 105..107 '()': () - 119..120 'w': i32 - 123..135 'const { 92 }': i32 - 123..135 'const { 92 }': i32 - 131..133 '92': i32 - 145..146 't': i32 - 149..159 ''a: { 92 }': i32 - 155..157 '92': i32 + 95..96 'z': ControlFlow<(), ()> + 130..140 'try { () }': () + 130..140 'try { () }': ControlFlow<(), ()> + 136..138 '()': () + 150..151 'w': i32 + 154..166 'const { 92 }': i32 + 154..166 'const { 92 }': i32 + 162..164 '92': i32 + 176..177 't': i32 + 180..190 ''a: { 92 }': i32 + 186..188 '92': i32 "#]], ) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 3d7194b6f4468..d01fe0632859c 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -1388,6 +1388,22 @@ fn foo() -> (impl FnOnce(&str, T), impl Trait) { ); } +#[test] +fn return_pos_impl_trait_in_projection() { + // Note that the unused type param `X` is significant; see #13307. + check_no_mismatches( + r#" +//- minicore: sized +trait Future { type Output; } +impl Future for () { type Output = i32; } +type Foo = (::Output, F); +fn foo() -> Foo> { + (0, ()) +} +"#, + ) +} + #[test] fn dyn_trait() { check_infer( @@ -4084,3 +4100,68 @@ where "#, ); } + +#[test] +fn bin_op_with_scalar_fallback() { + // Extra impls are significant so that chalk doesn't give us definite guidances. + check_types( + r#" +//- minicore: add +use core::ops::Add; + +struct Vec2(T, T); + +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} +impl Add for Vec2 { + type Output = Self; + fn add(self, rhs: Self) -> Self::Output { loop {} } +} + +fn test() { + let a = Vec2(1, 2); + let b = Vec2(3, 4); + let c = a + b; + //^ Vec2 + let a = Vec2(1., 2.); + let b = Vec2(3., 4.); + let c = a + b; + //^ Vec2 +} +"#, + ); +} + +#[test] +fn trait_method_with_scalar_fallback() { + check_types( + r#" +trait Trait { + type Output; + fn foo(&self) -> Self::Output; +} +impl Trait for T { + type Output = T; + fn foo(&self) -> Self::Output { loop {} } +} +fn test() { + let a = 42; + let b = a.foo(); + //^ i32 + let a = 3.14; + let b = a.foo(); + //^ f64 +} +"#, + ); +} diff --git a/crates/hir-ty/src/tls.rs b/crates/hir-ty/src/tls.rs index 92711a24fe39f..b7e6ee6740be7 100644 --- a/crates/hir-ty/src/tls.rs +++ b/crates/hir-ty/src/tls.rs @@ -67,12 +67,12 @@ impl DebugContext<'_> { let trait_ref = projection_ty.trait_ref(self.0); let trait_params = trait_ref.substitution.as_slice(Interner); let self_ty = trait_ref.self_type_parameter(Interner); - write!(fmt, "<{:?} as {}", self_ty, trait_name)?; + write!(fmt, "<{self_ty:?} as {trait_name}")?; if trait_params.len() > 1 { write!( fmt, "<{}>", - trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))), + trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), )?; } write!(fmt, ">::{}", type_alias_data.name)?; @@ -83,7 +83,7 @@ impl DebugContext<'_> { write!( fmt, "<{}>", - proj_params.iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))), + proj_params.iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))), )?; } @@ -105,9 +105,9 @@ impl DebugContext<'_> { } }; match def { - CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name), + CallableDefId::FunctionId(_) => write!(fmt, "{{fn {name}}}"), CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => { - write!(fmt, "{{ctor {}}}", name) + write!(fmt, "{{ctor {name}}}") } } } diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index c425f35acfe7e..778a6b82047ef 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -18,7 +18,7 @@ use crate::{ }; /// This controls how much 'time' we give the Chalk solver before giving up. -const CHALK_SOLVER_FUEL: i32 = 100; +const CHALK_SOLVER_FUEL: i32 = 1000; #[derive(Debug, Copy, Clone)] pub(crate) struct ChalkContext<'a> { @@ -55,13 +55,10 @@ impl TraitEnvironment { } } - pub fn traits_in_scope_from_clauses<'a>( - &'a self, - ty: Ty, - ) -> impl Iterator + 'a { + pub fn traits_in_scope_from_clauses(&self, ty: Ty) -> impl Iterator + '_ { self.traits_from_clauses .iter() - .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then(|| *trait_id)) + .filter_map(move |(self_ty, trait_id)| (*self_ty == ty).then_some(*trait_id)) } } @@ -130,7 +127,7 @@ fn solve( let mut solve = || { let _ctx = if is_chalk_debug() || is_chalk_print() { - Some(panic_context::enter(format!("solving {:?}", goal))) + Some(panic_context::enter(format!("solving {goal:?}"))) } else { None }; diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index e54bcb421a222..9893566bd549c 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -17,7 +17,7 @@ use hir_def::{ ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, }; -use hir_expand::name::{known, Name}; +use hir_expand::name::Name; use itertools::Either; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; @@ -184,9 +184,7 @@ pub(crate) struct Generics { } impl Generics { - pub(crate) fn iter_id<'a>( - &'a self, - ) -> impl Iterator> + 'a { + pub(crate) fn iter_id(&self) -> impl Iterator> + '_ { self.iter().map(|(id, data)| match data { TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)), TypeOrConstParamData::ConstParamData(_) => { @@ -216,9 +214,9 @@ impl Generics { } /// Iterator over types and const params of parent. - pub(crate) fn iter_parent<'a>( - &'a self, - ) -> impl DoubleEndedIterator + 'a { + pub(crate) fn iter_parent( + &self, + ) -> impl DoubleEndedIterator { self.parent_generics().into_iter().flat_map(|it| { let to_toc_id = move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p); @@ -335,54 +333,18 @@ pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool { // Function in an `extern` block are always unsafe to call, except when it has // `"rust-intrinsic"` ABI there are a few exceptions. let id = block.lookup(db.upcast()).id; - !matches!( - id.item_tree(db.upcast())[id.value].abi.as_deref(), - Some("rust-intrinsic") if !is_intrinsic_fn_unsafe(&data.name) - ) + + let is_intrinsic = + id.item_tree(db.upcast())[id.value].abi.as_deref() == Some("rust-intrinsic"); + + if is_intrinsic { + // Intrinsics are unsafe unless they have the rustc_safe_intrinsic attribute + !data.attrs.by_key("rustc_safe_intrinsic").exists() + } else { + // Extern items are always unsafe + true + } } _ => false, } } - -/// Returns `true` if the given intrinsic is unsafe to call, or false otherwise. -fn is_intrinsic_fn_unsafe(name: &Name) -> bool { - // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106 - ![ - known::abort, - known::add_with_overflow, - known::bitreverse, - known::black_box, - known::bswap, - known::caller_location, - known::ctlz, - known::ctpop, - known::cttz, - known::discriminant_value, - known::forget, - known::likely, - known::maxnumf32, - known::maxnumf64, - known::min_align_of, - known::minnumf32, - known::minnumf64, - known::mul_with_overflow, - known::needs_drop, - known::ptr_guaranteed_eq, - known::ptr_guaranteed_ne, - known::rotate_left, - known::rotate_right, - known::rustc_peek, - known::saturating_add, - known::saturating_sub, - known::size_of, - known::sub_with_overflow, - known::type_id, - known::type_name, - known::unlikely, - known::variant_count, - known::wrapping_add, - known::wrapping_mul, - known::wrapping_sub, - ] - .contains(name) -} diff --git a/crates/hir/src/attrs.rs b/crates/hir/src/attrs.rs index 0bd3793400109..54425d69b6b7f 100644 --- a/crates/hir/src/attrs.rs +++ b/crates/hir/src/attrs.rs @@ -148,7 +148,7 @@ fn resolve_doc_path( let modpath = { // FIXME: this is not how we should get a mod path here - let ast_path = ast::SourceFile::parse(&format!("type T = {};", link)) + let ast_path = ast::SourceFile::parse(&format!("type T = {link};")) .syntax_node() .descendants() .find_map(ast::Path::cast)?; diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index c5dc60f1ec5f9..54d43fa8dc73b 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -10,7 +10,7 @@ use hir_def::path::ModPath; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; -use crate::{MacroKind, Type}; +use crate::{AssocItem, Field, MacroKind, Type}; macro_rules! diagnostics { ($($diag:ident,)*) => { @@ -41,6 +41,8 @@ diagnostics![ MissingMatchArms, MissingUnsafe, NoSuchField, + PrivateAssocItem, + PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, @@ -121,6 +123,19 @@ pub struct NoSuchField { pub field: InFile>, } +#[derive(Debug)] +pub struct PrivateAssocItem { + pub expr_or_pat: + InFile, Either, AstPtr>>>, + pub item: AssocItem, +} + +#[derive(Debug)] +pub struct PrivateField { + pub expr: InFile>, + pub field: Field, +} + #[derive(Debug)] pub struct BreakOutsideOfLoop { pub expr: InFile>, diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 27b2f445d73ca..5a4b2f3344968 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -79,7 +79,7 @@ impl HirDisplay for Function { } } match name { - Some(name) => write!(f, "{}: ", name)?, + Some(name) => write!(f, "{name}: ")?, None => f.write_str("_: ")?, } // FIXME: Use resolved `param.ty` or raw `type_ref`? @@ -327,7 +327,7 @@ fn write_generic_params( continue; } delim(f)?; - write!(f, "{}", name)?; + write!(f, "{name}")?; if let Some(default) = &ty.default { f.write_str(" = ")?; default.hir_fmt(f)?; @@ -335,7 +335,7 @@ fn write_generic_params( } TypeOrConstParamData::ConstParamData(c) => { delim(f)?; - write!(f, "const {}: ", name)?; + write!(f, "const {name}: ")?; c.ty.hir_fmt(f)?; } } @@ -372,7 +372,7 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f), WherePredicateTypeTarget::TypeOrConstParam(id) => { match ¶ms.type_or_consts[*id].name() { - Some(name) => write!(f, "{}", name), + Some(name) => write!(f, "{name}"), None => f.write_str("{unnamed}"), } } @@ -424,7 +424,7 @@ fn write_where_clause(def: GenericDefId, f: &mut HirFormatter<'_>) -> Result<(), if idx != 0 { f.write_str(", ")?; } - write!(f, "{}", lifetime)?; + write!(f, "{lifetime}")?; } f.write_str("> ")?; write_target(target, f)?; @@ -447,7 +447,7 @@ impl HirDisplay for Const { let data = f.db.const_data(self.id); f.write_str("const ")?; match &data.name { - Some(name) => write!(f, "{}: ", name)?, + Some(name) => write!(f, "{name}: ")?, None => f.write_str("_: ")?, } data.type_ref.hir_fmt(f)?; @@ -511,9 +511,9 @@ impl HirDisplay for Module { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { // FIXME: Module doesn't have visibility saved in data. match self.name(f.db) { - Some(name) => write!(f, "mod {}", name), + Some(name) => write!(f, "mod {name}"), None if self.is_crate_root(f.db) => match self.krate(f.db).display_name(f.db) { - Some(name) => write!(f, "extern crate {}", name), + Some(name) => write!(f, "extern crate {name}"), None => f.write_str("extern crate {unknown}"), }, None => f.write_str("mod {unnamed}"), diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index cbbcaebb42855..08fd4453dfca5 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -39,12 +39,13 @@ use arrayvec::ArrayVec; use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind}; use either::Either; use hir_def::{ - adt::{ReprData, VariantData}, + adt::VariantData, body::{BodyDiagnostic, SyntheticSyntax}, - expr::{BindingAnnotation, LabelId, Pat, PatId}, + expr::{BindingAnnotation, ExprOrPatId, LabelId, Pat, PatId}, generics::{TypeOrConstParamData, TypeParamProvenance}, item_tree::ItemTreeNode, lang_item::LangItemTarget, + layout::{Layout, LayoutError, ReprOptions}, nameres::{self, diagnostics::DefDiagnostic}, per_ns::PerNs, resolver::{HasResolver, Resolver}, @@ -59,6 +60,7 @@ use hir_ty::{ all_super_traits, autoderef, consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt}, diagnostics::BodyValidationDiagnostic, + layout::layout_of_ty, method_resolution::{self, TyFingerprint}, primitive::UintTy, traits::FnTrait, @@ -72,7 +74,7 @@ use once_cell::unsync::Lazy; use rustc_hash::FxHashSet; use stdx::{impl_from, never}; use syntax::{ - ast::{self, Expr, HasAttrs as _, HasDocComments, HasName}, + ast::{self, HasAttrs as _, HasDocComments, HasName}, AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T, }; @@ -83,9 +85,10 @@ pub use crate::{ diagnostics::{ AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, - MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch, - UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, - UnresolvedModule, UnresolvedProcMacro, + MissingUnsafe, NoSuchField, PrivateAssocItem, PrivateField, + ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, + UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule, + UnresolvedProcMacro, }, has_source::HasSource, semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, @@ -112,12 +115,20 @@ pub use { path::{ModPath, PathKind}, type_ref::{Mutability, TypeRef}, visibility::Visibility, + // FIXME: This is here since it is input of a method in `HirWrite` + // and things outside of hir need to implement that trait. We probably + // should move whole `hir_ty::display` to this crate so we will become + // able to use `ModuleDef` or `Definition` instead of `ModuleDefId`. + ModuleDefId, }, hir_expand::{ name::{known, Name}, ExpandResult, HirFileId, InFile, MacroFile, Origin, }, - hir_ty::{display::HirDisplay, PointerCast, Safety}, + hir_ty::{ + display::{HirDisplay, HirWrite}, + PointerCast, Safety, + }, }; // These are negative re-exports: pub using these names is forbidden, they @@ -597,7 +608,7 @@ impl Module { pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec { let def_map = self.id.def_map(db.upcast()); let scope = &def_map[self.id.local_id].scope; - scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect() + scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| it.into()).collect() } pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec { @@ -803,7 +814,7 @@ fn precise_macro_call_location( .doc_comments_and_attrs() .nth((*invoc_attr_index) as usize) .and_then(Either::left) - .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index)); + .unwrap_or_else(|| panic!("cannot find attribute #{invoc_attr_index}")); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), @@ -844,6 +855,10 @@ impl Field { self.parent.variant_data(db).fields()[self.id].name.clone() } + pub fn index(&self) -> usize { + u32::from(self.id.into_raw()) as usize + } + /// Returns the type as in the signature of the struct (i.e., with /// placeholder types for type parameters). Only use this in the context of /// the field definition. @@ -859,6 +874,10 @@ impl Field { Type::new(db, var_id, ty) } + pub fn layout(&self, db: &dyn HirDatabase) -> Result { + layout_of_ty(db, &self.ty(db).ty, self.parent.module(db).krate().into()) + } + pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef { self.parent } @@ -900,7 +919,7 @@ impl Struct { Type::from_def(db, self.id) } - pub fn repr(self, db: &dyn HirDatabase) -> Option { + pub fn repr(self, db: &dyn HirDatabase) -> Option { db.struct_data(self.id).repr.clone() } @@ -984,8 +1003,30 @@ impl Enum { Type::new_for_crate( self.id.lookup(db.upcast()).container.krate(), TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() { - Either::Left(builtin) => hir_def::builtin_type::BuiltinType::Int(builtin), - Either::Right(builtin) => hir_def::builtin_type::BuiltinType::Uint(builtin), + hir_def::layout::IntegerType::Pointer(sign) => match sign { + true => hir_def::builtin_type::BuiltinType::Int( + hir_def::builtin_type::BuiltinInt::Isize, + ), + false => hir_def::builtin_type::BuiltinType::Uint( + hir_def::builtin_type::BuiltinUint::Usize, + ), + }, + hir_def::layout::IntegerType::Fixed(i, sign) => match sign { + true => hir_def::builtin_type::BuiltinType::Int(match i { + hir_def::layout::Integer::I8 => hir_def::builtin_type::BuiltinInt::I8, + hir_def::layout::Integer::I16 => hir_def::builtin_type::BuiltinInt::I16, + hir_def::layout::Integer::I32 => hir_def::builtin_type::BuiltinInt::I32, + hir_def::layout::Integer::I64 => hir_def::builtin_type::BuiltinInt::I64, + hir_def::layout::Integer::I128 => hir_def::builtin_type::BuiltinInt::I128, + }), + false => hir_def::builtin_type::BuiltinType::Uint(match i { + hir_def::layout::Integer::I8 => hir_def::builtin_type::BuiltinUint::U8, + hir_def::layout::Integer::I16 => hir_def::builtin_type::BuiltinUint::U16, + hir_def::layout::Integer::I32 => hir_def::builtin_type::BuiltinUint::U32, + hir_def::layout::Integer::I64 => hir_def::builtin_type::BuiltinUint::U64, + hir_def::layout::Integer::I128 => hir_def::builtin_type::BuiltinUint::U128, + }), + }, }), ) } @@ -1042,7 +1083,7 @@ impl Variant { db.enum_data(self.parent.id).variants[self.id].variant_data.clone() } - pub fn value(self, db: &dyn HirDatabase) -> Option { + pub fn value(self, db: &dyn HirDatabase) -> Option { self.source(db)?.value.expr() } @@ -1076,6 +1117,13 @@ impl Adt { }) } + pub fn layout(self, db: &dyn HirDatabase) -> Result { + if db.generic_params(self.into()).iter().count() != 0 { + return Err(LayoutError::HasPlaceholder); + } + db.layout_of_adt(self.into(), Substitution::empty(Interner)) + } + /// Turns this ADT into a type. Any type parameters of the ADT will be /// turned into unknown types, which is good for e.g. finding the most /// general set of completions, but will not look very nice when printed. @@ -1306,6 +1354,25 @@ impl DefWithBody { Err(SyntheticSyntax) => (), } } + &hir_ty::InferenceDiagnostic::PrivateField { expr, field } => { + let expr = source_map.expr_syntax(expr).expect("unexpected synthetic"); + let field = field.into(); + acc.push(PrivateField { expr, field }.into()) + } + &hir_ty::InferenceDiagnostic::PrivateAssocItem { id, item } => { + let expr_or_pat = match id { + ExprOrPatId::ExprId(expr) => source_map + .expr_syntax(expr) + .expect("unexpected synthetic") + .map(Either::Left), + ExprOrPatId::PatId(pat) => source_map + .pat_syntax(pat) + .expect("unexpected synthetic") + .map(Either::Right), + }; + let item = item.into(); + acc.push(PrivateAssocItem { expr_or_pat, item }.into()) + } } } for (expr, mismatch) in infer.expr_type_mismatches() { @@ -1492,7 +1559,7 @@ impl Function { } pub fn self_param(self, db: &dyn HirDatabase) -> Option { - self.has_self_param(db).then(|| SelfParam { func: self.id }) + self.has_self_param(db).then_some(SelfParam { func: self.id }) } pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec { @@ -2344,17 +2411,19 @@ pub struct DeriveHelper { impl DeriveHelper { pub fn derive(&self) -> Macro { - Macro { id: self.derive.into() } + Macro { id: self.derive } } pub fn name(&self, db: &dyn HirDatabase) -> Name { match self.derive { - MacroId::Macro2Id(_) => None, + MacroId::Macro2Id(it) => { + db.macro2_data(it).helpers.as_deref().and_then(|it| it.get(self.idx)).cloned() + } MacroId::MacroRulesId(_) => None, MacroId::ProcMacroId(proc_macro) => db .proc_macro_data(proc_macro) .helpers - .as_ref() + .as_deref() .and_then(|it| it.get(self.idx)) .cloned(), } @@ -2712,7 +2781,7 @@ impl Impl { pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec { let krate = trait_.module(db).krate(); let mut all = Vec::new(); - for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() { + for Crate { id } in krate.transitive_reverse_dependencies(db) { let impls = db.trait_impls_in_crate(id); all.extend(impls.for_trait(trait_.id).map(Self::from)) } @@ -2857,6 +2926,13 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize))) } + pub fn is_int_or_uint(&self) -> bool { + match self.ty.kind(Interner) { + TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)) => true, + _ => false, + } + } + pub fn remove_ref(&self) -> Option { match &self.ty.kind(Interner) { TyKind::Ref(.., ty) => Some(self.derived(ty.clone())), @@ -3031,7 +3107,7 @@ impl Type { let adt = adt_id.into(); match adt { - Adt::Struct(s) => matches!(s.repr(db), Some(ReprData { packed: true, .. })), + Adt::Struct(s) => s.repr(db).unwrap_or_default().pack.is_some(), _ => false, } } @@ -3225,7 +3301,7 @@ impl Type { with_local_impls.and_then(|b| b.id.containing_block()).into(), name, method_resolution::LookupMode::MethodCall, - &mut |_adj, id| callback(id), + &mut |_adj, id, _| callback(id), ); } @@ -3650,6 +3726,13 @@ impl From for ScopeDef { } } +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Adjustment { + pub source: Type, + pub target: Type, + pub kind: Adjust, +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub enum Adjust { /// Go from ! to any type. diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index 2e1f88ba09043..e0d26103915c0 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -2,14 +2,17 @@ mod source_to_def; -use std::{cell::RefCell, fmt, iter, ops}; +use std::{cell::RefCell, fmt, iter, mem, ops}; use base_db::{FileId, FileRange}; +use either::Either; use hir_def::{ - body, macro_id_to_def_id, + body, + expr::Expr, + macro_id_to_def_id, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, - AsMacroCall, FunctionId, MacroId, TraitId, VariantId, + AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ db::AstDatabase, @@ -29,7 +32,7 @@ use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, SourceAnalyzer}, - Access, Adjust, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, + Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, @@ -334,7 +337,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { self.imp.resolve_trait(trait_) } - pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { + pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { self.imp.expr_adjustments(expr) } @@ -438,8 +441,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { } pub fn to_def(&self, src: &T) -> Option { - let src = self.imp.find_file(src.syntax()).with_value(src).cloned(); - T::to_def(&self.imp, src) + self.imp.to_def(src) } pub fn to_module_def(&self, file: FileId) -> Option { @@ -481,6 +483,11 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool { self.imp.is_unsafe_ident_pat(ident_pat) } + + /// Returns `true` if the `node` is inside an `unsafe` context. + pub fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { + self.imp.is_inside_unsafe(expr) + } } impl<'db> SemanticsImpl<'db> { @@ -788,7 +795,7 @@ impl<'db> SemanticsImpl<'db> { // requeue the tokens we got from mapping our current token down stack.extend(mapped_tokens); // if the length changed we have found a mapping for the token - (stack.len() != len).then(|| ()) + (stack.len() != len).then_some(()) }; // Remap the next token in the queue into a macro call its in, if it is not being remapped @@ -840,7 +847,7 @@ impl<'db> SemanticsImpl<'db> { } }; process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) - } else if let Some(meta) = ast::Meta::cast(parent.clone()) { + } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute let attr = meta.parent_attr()?; @@ -1067,26 +1074,42 @@ impl<'db> SemanticsImpl<'db> { } } - fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { + fn expr_adjustments(&self, expr: &ast::Expr) -> Option> { let mutability = |m| match m { hir_ty::Mutability::Not => Mutability::Shared, hir_ty::Mutability::Mut => Mutability::Mut, }; - self.analyze(expr.syntax())?.expr_adjustments(self.db, expr).map(|it| { + + let analyzer = self.analyze(expr.syntax())?; + + let (mut source_ty, _) = analyzer.type_of_expr(self.db, expr)?; + + analyzer.expr_adjustments(self.db, expr).map(|it| { it.iter() - .map(|adjust| match adjust.kind { - hir_ty::Adjust::NeverToAny => Adjust::NeverToAny, - hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => { - Adjust::Deref(Some(OverloadedDeref(mutability(m)))) - } - hir_ty::Adjust::Deref(None) => Adjust::Deref(None), - hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => { - Adjust::Borrow(AutoBorrow::RawPtr(mutability(m))) - } - hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => { - Adjust::Borrow(AutoBorrow::Ref(mutability(m))) - } - hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc), + .map(|adjust| { + let target = + Type::new_with_resolver(self.db, &analyzer.resolver, adjust.target.clone()); + let kind = match adjust.kind { + hir_ty::Adjust::NeverToAny => Adjust::NeverToAny, + hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => { + Adjust::Deref(Some(OverloadedDeref(mutability(m)))) + } + hir_ty::Adjust::Deref(None) => Adjust::Deref(None), + hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => { + Adjust::Borrow(AutoBorrow::RawPtr(mutability(m))) + } + hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => { + Adjust::Borrow(AutoBorrow::Ref(mutability(m))) + } + hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc), + }; + + // Update `source_ty` for the next adjustment + let source = mem::replace(&mut source_ty, target.clone()); + + let adjustment = Adjustment { source, target, kind }; + + adjustment }) .collect() }) @@ -1198,7 +1221,7 @@ impl<'db> SemanticsImpl<'db> { krate .dependencies(self.db) .into_iter() - .find_map(|dep| (dep.name == name).then(|| dep.krate)) + .find_map(|dep| (dep.name == name).then_some(dep.krate)) } fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { @@ -1223,10 +1246,15 @@ impl<'db> SemanticsImpl<'db> { fn with_ctx) -> T, T>(&self, f: F) -> T { let mut cache = self.s2d_cache.borrow_mut(); - let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; + let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache }; f(&mut ctx) } + fn to_def(&self, src: &T) -> Option { + let src = self.find_file(src.syntax()).with_value(src).cloned(); + T::to_def(self, src) + } + fn to_module_def(&self, file: FileId) -> impl Iterator { self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from) } @@ -1350,7 +1378,7 @@ impl<'db> SemanticsImpl<'db> { self.cache .borrow() .keys() - .map(|it| format!("{:?}", it)) + .map(|it| format!("{it:?}")) .collect::>() .join(", ") ) @@ -1442,6 +1470,56 @@ impl<'db> SemanticsImpl<'db> { .map(|ty| ty.original.is_packed(self.db)) .unwrap_or(false) } + + fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { + let item_or_variant = |ancestor: SyntaxNode| { + if ast::Item::can_cast(ancestor.kind()) { + ast::Item::cast(ancestor).map(Either::Left) + } else { + ast::Variant::cast(ancestor).map(Either::Right) + } + }; + let Some(enclosing_item) = expr.syntax().ancestors().find_map(item_or_variant) else { return false }; + + let def = match &enclosing_item { + Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true, + Either::Left(ast::Item::Fn(it)) => { + self.to_def(it).map(<_>::into).map(DefWithBodyId::FunctionId) + } + Either::Left(ast::Item::Const(it)) => { + self.to_def(it).map(<_>::into).map(DefWithBodyId::ConstId) + } + Either::Left(ast::Item::Static(it)) => { + self.to_def(it).map(<_>::into).map(DefWithBodyId::StaticId) + } + Either::Left(_) => None, + Either::Right(it) => self.to_def(it).map(<_>::into).map(DefWithBodyId::VariantId), + }; + let Some(def) = def else { return false }; + let enclosing_node = enclosing_item.as_ref().either(|i| i.syntax(), |v| v.syntax()); + + let (body, source_map) = self.db.body_with_source_map(def); + + let file_id = self.find_file(expr.syntax()).file_id; + + let Some(mut parent) = expr.syntax().parent() else { return false }; + loop { + if &parent == enclosing_node { + break false; + } + + if let Some(parent) = ast::Expr::cast(parent.clone()) { + if let Some(expr_id) = source_map.node_expr(InFile { file_id, value: &parent }) { + if let Expr::Unsafe { .. } = body[expr_id] { + break true; + } + } + } + + let Some(parent_) = parent.parent() else { break false }; + parent = parent_; + } + } } fn macro_call_to_macro_id( @@ -1600,7 +1678,7 @@ impl<'a> SemanticsScope<'a> { self.db, def, resolution.in_type_ns()?, - |name, _, id| cb(name, id.into()), + |name, id| cb(name, id.into()), ) } } diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 91ea1c24d14f8..059b80bcf1392 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -21,8 +21,8 @@ use hir_def::{ path::{ModPath, Path, PathKind}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, type_ref::Mutability, - AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId, - Lookup, ModuleDefId, TraitId, VariantId, + AsMacroCall, AssocItemId, ConstId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, + LocalFieldId, Lookup, ModuleDefId, TraitId, VariantId, }; use hir_expand::{ builtin_fn_macro::BuiltinFnLikeExpander, @@ -118,7 +118,7 @@ impl SourceAnalyzer { fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option { let src = match expr { ast::Expr::MacroExpr(expr) => { - self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))? + self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?))? } _ => InFile::new(self.file_id, expr.clone()), }; @@ -228,7 +228,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, pat: &ast::Pat, ) -> Option> { - let pat_id = self.pat_id(&pat)?; + let pat_id = self.pat_id(pat)?; let infer = self.infer.as_ref()?; Some( infer @@ -270,7 +270,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, await_expr: &ast::AwaitExpr, ) -> Option { - let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone(); + let mut ty = self.ty_of_expr(db, &await_expr.expr()?)?.clone(); let into_future_trait = self .resolver @@ -316,7 +316,7 @@ impl SourceAnalyzer { ast::UnaryOp::Not => name![not], ast::UnaryOp::Neg => name![neg], }; - let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?; + let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?; let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; // HACK: subst for all methods coincides with that for their trait because the methods @@ -331,8 +331,8 @@ impl SourceAnalyzer { db: &dyn HirDatabase, index_expr: &ast::IndexExpr, ) -> Option { - let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?; - let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?; + let base_ty = self.ty_of_expr(db, &index_expr.base()?)?; + let index_ty = self.ty_of_expr(db, &index_expr.index()?)?; let lang_item_name = name![index]; @@ -352,8 +352,8 @@ impl SourceAnalyzer { binop_expr: &ast::BinExpr, ) -> Option { let op = binop_expr.op_kind()?; - let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?; - let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?; + let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?; + let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?; let (op_trait, op_fn) = lang_names_for_bin_op(op) .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; @@ -372,7 +372,7 @@ impl SourceAnalyzer { db: &dyn HirDatabase, try_expr: &ast::TryExpr, ) -> Option { - let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?; + let ty = self.ty_of_expr(db, &try_expr.expr()?)?; let op_fn = db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; @@ -482,7 +482,7 @@ impl SourceAnalyzer { let infer = self.infer.as_deref()?; if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) { let expr_id = self.expr_id(db, &path_expr.into())?; - if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) { + if let Some((assoc, subs)) = infer.assoc_resolutions_for_expr(expr_id) { let assoc = match assoc { AssocItemId::FunctionId(f_in_trait) => { match infer.type_of_expr.get(expr_id) { @@ -501,7 +501,9 @@ impl SourceAnalyzer { } } } - + AssocItemId::ConstId(const_id) => { + self.resolve_impl_const_or_trait_def(db, const_id, subs).into() + } _ => assoc, }; @@ -515,7 +517,7 @@ impl SourceAnalyzer { prefer_value_ns = true; } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) { let pat_id = self.pat_id(&path_pat.into())?; - if let Some(assoc) = infer.assoc_resolutions_for_pat(pat_id) { + if let Some((assoc, _)) = infer.assoc_resolutions_for_pat(pat_id) { return Some(PathResolution::Def(AssocItem::from(assoc).into())); } if let Some(VariantId::EnumVariantId(variant)) = @@ -792,6 +794,24 @@ impl SourceAnalyzer { method_resolution::lookup_impl_method(db, env, func, substs) } + fn resolve_impl_const_or_trait_def( + &self, + db: &dyn HirDatabase, + const_id: ConstId, + subs: Substitution, + ) -> ConstId { + let krate = self.resolver.krate(); + let owner = match self.resolver.body_owner() { + Some(it) => it, + None => return const_id, + }; + let env = owner.as_generic_def_id().map_or_else( + || Arc::new(hir_ty::TraitEnvironment::empty(krate)), + |d| db.trait_environment(d), + ); + method_resolution::lookup_impl_const(db, env, const_id, subs) + } + fn lang_trait_fn( &self, db: &dyn HirDatabase, @@ -804,7 +824,7 @@ impl SourceAnalyzer { } fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> { - self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?) + self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, expr)?) } } @@ -967,7 +987,7 @@ fn resolve_hir_path_( db, def, res.in_type_ns()?, - |name, _, id| (name == unresolved.name).then(|| id), + |name, id| (name == unresolved.name).then_some(id), ) }) .map(TypeAlias::from) diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml index e781c0a016d5a..b9260473b12d7 100644 --- a/crates/ide-assists/Cargo.toml +++ b/crates/ide-assists/Cargo.toml @@ -14,6 +14,7 @@ cov-mark = "2.0.0-pre.1" itertools = "0.10.5" either = "1.7.0" +smallvec = "1.10.0" stdx = { path = "../stdx", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/ide-assists/src/handlers/add_explicit_type.rs b/crates/ide-assists/src/handlers/add_explicit_type.rs index b5f99726fe1c8..0057f439f1af4 100644 --- a/crates/ide-assists/src/handlers/add_explicit_type.rs +++ b/crates/ide-assists/src/handlers/add_explicit_type.rs @@ -47,7 +47,10 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> O // Don't enable the assist if there is a type ascription without any placeholders if let Some(ty) = &ascribed_ty { let mut contains_infer_ty = false; - walk_ty(ty, &mut |ty| contains_infer_ty |= matches!(ty, ast::Type::InferType(_))); + walk_ty(ty, &mut |ty| { + contains_infer_ty |= matches!(ty, ast::Type::InferType(_)); + false + }); if !contains_infer_ty { cov_mark::hit!(add_explicit_type_not_applicable_if_ty_already_specified); return None; diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 2b3793659cf7d..161bcc5c8da5f 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -107,6 +107,14 @@ fn add_missing_impl_members_inner( ) -> Option<()> { let _p = profile::span("add_missing_impl_members_inner"); let impl_def = ctx.find_node_at_offset::()?; + + if ctx.token_at_offset().all(|t| { + t.parent_ancestors() + .any(|s| ast::BlockExpr::can_cast(s.kind()) || ast::ParamList::can_cast(s.kind())) + }) { + return None; + } + let target_scope = ctx.sema.scope(impl_def.syntax())?; let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; @@ -1343,4 +1351,95 @@ impl PartialEq for SomeStruct { "#, ); } + + #[test] + fn test_ignore_function_body() { + check_assist_not_applicable( + add_missing_default_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self) {$0 + let x = 5; + } +}"#, + ) + } + + #[test] + fn test_ignore_param_list() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self); +} + +impl Trait for () { + type X = u8; + fn foo(&self$0) { + let x = 5; + } +}"#, + ) + } + + #[test] + fn test_ignore_scope_inside_function() { + check_assist_not_applicable( + add_missing_impl_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self); +} + +impl Trait for () { + type X = u8; + fn foo(&self) { + let x = async {$0 5 }; + } +}"#, + ) + } + + #[test] + fn test_apply_outside_function() { + check_assist( + add_missing_default_members, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self)$0 {} +}"#, + r#" +trait Trait { + type X; + fn foo(&self); + fn bar(&self) {} +} + +impl Trait for () { + type X = u8; + fn foo(&self) {} + + $0fn bar(&self) {} +}"#, + ) + } } diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 73f4db4e5ff2b..8e4ac69ae6f63 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -326,7 +326,7 @@ impl ExtendedEnum { fn resolve_enum_def(sema: &Semantics<'_, RootDatabase>, expr: &ast::Expr) -> Option { sema.type_of_expr(expr)?.adjusted().autoderef(sema.db).find_map(|ty| match ty.as_adt() { Some(Adt::Enum(e)) => Some(ExtendedEnum::Enum(e)), - _ => ty.is_bool().then(|| ExtendedEnum::Bool), + _ => ty.is_bool().then_some(ExtendedEnum::Bool), }) } @@ -344,7 +344,7 @@ fn resolve_tuple_of_enum_def( // For now we only handle expansion for a tuple of enums. Here // we map non-enum items to None and rely on `collect` to // convert Vec> into Option>. - _ => ty.is_bool().then(|| ExtendedEnum::Bool), + _ => ty.is_bool().then_some(ExtendedEnum::Bool), }) }) .collect() diff --git a/crates/ide-assists/src/handlers/add_return_type.rs b/crates/ide-assists/src/handlers/add_return_type.rs index 89040a8569e63..879c478acf882 100644 --- a/crates/ide-assists/src/handlers/add_return_type.rs +++ b/crates/ide-assists/src/handlers/add_return_type.rs @@ -35,16 +35,16 @@ pub(crate) fn add_return_type(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opt match builder_edit_pos { InsertOrReplace::Insert(insert_pos, needs_whitespace) => { let preceeding_whitespace = if needs_whitespace { " " } else { "" }; - builder.insert(insert_pos, &format!("{preceeding_whitespace}-> {ty} ")) + builder.insert(insert_pos, format!("{preceeding_whitespace}-> {ty} ")) } InsertOrReplace::Replace(text_range) => { - builder.replace(text_range, &format!("-> {ty}")) + builder.replace(text_range, format!("-> {ty}")) } } if let FnType::Closure { wrap_expr: true } = fn_type { cov_mark::hit!(wrap_closure_non_block_expr); // `|x| x` becomes `|x| -> T x` which is invalid, so wrap it in a block - builder.replace(tail_expr.syntax().text_range(), &format!("{{{tail_expr}}}")); + builder.replace(tail_expr.syntax().text_range(), format!("{{{tail_expr}}}")); } }, ) diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs index a689270bc0915..698ad78cce6ff 100644 --- a/crates/ide-assists/src/handlers/auto_import.rs +++ b/crates/ide-assists/src/handlers/auto_import.rs @@ -203,7 +203,7 @@ fn relevance_score( // get the distance between the imported path and the current module // (prefer items that are more local) Some((item_module, current_module)) => { - score -= module_distance_hueristic(db, ¤t_module, &item_module) as i32; + score -= module_distance_hueristic(db, current_module, &item_module) as i32; } // could not find relevant modules, so just use the length of the path as an estimate diff --git a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs index 80eecf4a09868..f32ef2d59d891 100644 --- a/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs +++ b/crates/ide-assists/src/handlers/convert_iter_for_each_to_for.rs @@ -216,7 +216,7 @@ fn validate_method_call_expr( let krate = module.krate(); let iter_trait = FamousDefs(sema, krate).core_iter_Iterator()?; - it_type.impls_trait(sema.db, iter_trait, &[]).then(|| (expr, receiver)) + it_type.impls_trait(sema.db, iter_trait, &[]).then_some((expr, receiver)) } #[cfg(test)] diff --git a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs index 92e091fca126c..b0383291e7370 100644 --- a/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs +++ b/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs @@ -168,7 +168,7 @@ fn edit_struct_references( let arg_list = call_expr.syntax().descendants().find_map(ast::ArgList::cast)?; edit.replace( - call_expr.syntax().text_range(), + ctx.sema.original_range(&node).range, ast::make::record_expr( path, ast::make::record_expr_field_list(arg_list.args().zip(names).map( @@ -249,6 +249,24 @@ mod tests { ); check_assist_not_applicable(convert_tuple_struct_to_named_struct, r#"struct Foo$0;"#); } + #[test] + fn convert_in_macro_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo {($i:expr) => {$i} } +struct T$0(u8); +fn test() { + foo!(T(1)); +}"#, + r#" +macro_rules! foo {($i:expr) => {$i} } +struct T { field1: u8 } +fn test() { + foo!(T { field1: 1 }); +}"#, + ); + } #[test] fn convert_simple_struct() { @@ -554,6 +572,29 @@ where ); } + #[test] + fn convert_variant_in_macro_args() { + check_assist( + convert_tuple_struct_to_named_struct, + r#" +macro_rules! foo {($i:expr) => {$i} } +enum T { + V$0(u8) +} +fn test() { + foo!(T::V(1)); +}"#, + r#" +macro_rules! foo {($i:expr) => {$i} } +enum T { + V { field1: u8 } +} +fn test() { + foo!(T::V { field1: 1 }); +}"#, + ); + } + #[test] fn convert_simple_variant() { check_assist( diff --git a/crates/ide-assists/src/handlers/move_format_string_arg.rs b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs similarity index 84% rename from crates/ide-assists/src/handlers/move_format_string_arg.rs rename to crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 11db6ae7f7b81..4f3b6e0c287c9 100644 --- a/crates/ide-assists/src/handlers/move_format_string_arg.rs +++ b/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -10,7 +10,7 @@ use itertools::Itertools; use stdx::format_to; use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; -// Assist: move_format_string_arg +// Assist: extract_expressions_from_format_string // // Move an expression out of a format string. // @@ -23,7 +23,7 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // // fn main() { -// print!("{x + 1}$0"); +// print!("{var} {x + 1}$0"); // } // ``` // -> @@ -36,11 +36,14 @@ use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange}; // } // // fn main() { -// print!("{}"$0, x + 1); +// print!("{var} {}"$0, x + 1); // } // ``` -pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { +pub(crate) fn extract_expressions_from_format_string( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { let fmt_string = ctx.find_token_at_offset::()?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; @@ -58,7 +61,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) acc.add( AssistId( - "move_format_string_arg", + "extract_expressions_from_format_string", // if there aren't any expressions, then make the assist a RefactorExtract if extracted_args.iter().filter(|f| matches!(f, Arg::Expr(_))).count() == 0 { AssistKind::RefactorExtract @@ -66,7 +69,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) AssistKind::QuickFix }, ), - "Extract format args", + "Extract format expressions", tt.syntax().text_range(), |edit| { let fmt_range = fmt_string.syntax().text_range(); @@ -118,15 +121,14 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) let mut placeholder_idx = 1; for extracted_args in extracted_args { - // remove expr from format string - args.push_str(", "); - match extracted_args { - Arg::Ident(s) | Arg::Expr(s) => { + Arg::Expr(s)=> { + args.push_str(", "); // insert arg args.push_str(&s); } Arg::Placeholder => { + args.push_str(", "); // try matching with existing argument match existing_args.next() { Some(ea) => { @@ -139,6 +141,7 @@ pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) } } } + Arg::Ident(_s) => (), } } @@ -171,7 +174,7 @@ macro_rules! print { #[test] fn multiple_middle_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -192,7 +195,7 @@ fn main() { #[test] fn single_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -213,7 +216,7 @@ fn main() { #[test] fn multiple_middle_placeholders_arg() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -234,7 +237,7 @@ fn main() { #[test] fn multiple_trailing_args() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -255,7 +258,7 @@ fn main() { #[test] fn improper_commas() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -276,7 +279,7 @@ fn main() { #[test] fn nested_tt() { check_assist( - move_format_string_arg, + extract_expressions_from_format_string, &add_macro_decl( r#" fn main() { @@ -289,6 +292,29 @@ fn main() { fn main() { print!("My name is {} {}"$0, stringify!(Paperino), x + x) } +"#, + ), + ); + } + + #[test] + fn extract_only_expressions() { + check_assist( + extract_expressions_from_format_string, + &add_macro_decl( + r#" +fn main() { + let var = 1 + 1; + print!("foobar {var} {var:?} {x$0 + x}") +} +"#, + ), + &add_macro_decl( + r#" +fn main() { + let var = 1 + 1; + print!("foobar {var} {var:?} {}"$0, x + x) +} "#, ), ); diff --git a/crates/ide-assists/src/handlers/extract_function.rs b/crates/ide-assists/src/handlers/extract_function.rs index c1e2f19ab18b2..e04a1dabb2c58 100644 --- a/crates/ide-assists/src/handlers/extract_function.rs +++ b/crates/ide-assists/src/handlers/extract_function.rs @@ -11,7 +11,9 @@ use ide_db::{ helpers::mod_path_to_ast, imports::insert_use::{insert_use, ImportScope}, search::{FileReference, ReferenceCategory, SearchScope}, - syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr}, + syntax_helpers::node_ext::{ + for_each_tail_expr, preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr, + }, FxIndexSet, RootDatabase, }; use itertools::Itertools; @@ -78,7 +80,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op }; let body = extraction_target(&node, range)?; - let container_info = body.analyze_container(&ctx.sema)?; + let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema)?; let (locals_used, self_param) = body.analyze(&ctx.sema); @@ -119,6 +121,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ret_ty, body, outliving_locals, + contains_tail_expr, mods: container_info, }; @@ -245,6 +248,8 @@ struct Function { ret_ty: RetType, body: FunctionBody, outliving_locals: Vec, + /// Whether at least one of the container's tail expr is contained in the range we're extracting. + contains_tail_expr: bool, mods: ContainerInfo, } @@ -265,7 +270,7 @@ enum ParamKind { MutRef, } -#[derive(Debug, Eq, PartialEq)] +#[derive(Debug)] enum FunType { Unit, Single(hir::Type), @@ -294,7 +299,6 @@ struct ControlFlow { #[derive(Clone, Debug)] struct ContainerInfo { is_const: bool, - is_in_tail: bool, parent_loop: Option, /// The function's return type, const's type etc. ret_type: Option, @@ -584,7 +588,7 @@ impl FunctionBody { FunctionBody::Expr(expr) => Some(expr.clone()), FunctionBody::Span { parent, text_range } => { let tail_expr = parent.tail_expr()?; - text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr) + text_range.contains_range(tail_expr.syntax().text_range()).then_some(tail_expr) } } } @@ -743,7 +747,10 @@ impl FunctionBody { (res, self_param) } - fn analyze_container(&self, sema: &Semantics<'_, RootDatabase>) -> Option { + fn analyze_container( + &self, + sema: &Semantics<'_, RootDatabase>, + ) -> Option<(ContainerInfo, bool)> { let mut ancestors = self.parent()?.ancestors(); let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted); let mut parent_loop = None; @@ -815,28 +822,36 @@ impl FunctionBody { } }; }; - let container_tail = match expr? { - ast::Expr::BlockExpr(block) => block.tail_expr(), - expr => Some(expr), - }; - let is_in_tail = - container_tail.zip(self.tail_expr()).map_or(false, |(container_tail, body_tail)| { - container_tail.syntax().text_range().contains_range(body_tail.syntax().text_range()) + + let expr = expr?; + let contains_tail_expr = if let Some(body_tail) = self.tail_expr() { + let mut contains_tail_expr = false; + let tail_expr_range = body_tail.syntax().text_range(); + for_each_tail_expr(&expr, &mut |e| { + if tail_expr_range.contains_range(e.syntax().text_range()) { + contains_tail_expr = true; + } }); + contains_tail_expr + } else { + false + }; let parent = self.parent()?; let parents = generic_parents(&parent); let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect(); let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect(); - Some(ContainerInfo { - is_in_tail, - is_const, - parent_loop, - ret_type: ty, - generic_param_lists, - where_clauses, - }) + Some(( + ContainerInfo { + is_const, + parent_loop, + ret_type: ty, + generic_param_lists, + where_clauses, + }, + contains_tail_expr, + )) } fn return_ty(&self, ctx: &AssistContext<'_>) -> Option { @@ -1368,7 +1383,7 @@ impl FlowHandler { None => FlowHandler::None, Some(flow_kind) => { let action = flow_kind.clone(); - if *ret_ty == FunType::Unit { + if let FunType::Unit = ret_ty { match flow_kind { FlowKind::Return(None) | FlowKind::Break(_, None) @@ -1633,7 +1648,7 @@ impl Function { fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option { let fun_ty = self.return_type(ctx); - let handler = if self.mods.is_in_tail { + let handler = if self.contains_tail_expr { FlowHandler::None } else { FlowHandler::from_ret_ty(self, &fun_ty) @@ -1707,7 +1722,7 @@ fn make_body( fun: &Function, ) -> ast::BlockExpr { let ret_ty = fun.return_type(ctx); - let handler = if fun.mods.is_in_tail { + let handler = if fun.contains_tail_expr { FlowHandler::None } else { FlowHandler::from_ret_ty(fun, &ret_ty) @@ -1785,7 +1800,7 @@ fn make_body( .collect::>(); let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent)); - make::hacky_block_expr_with_comments(elements, tail_expr) + make::hacky_block_expr(elements, tail_expr) } }; @@ -1845,9 +1860,29 @@ fn with_default_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::B } fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr { - let stmt_tail = block.tail_expr().map(|expr| make::expr_stmt(expr).into()); - let stmts = block.statements().chain(stmt_tail); - make::block_expr(stmts, Some(tail_expr)) + let stmt_tail_opt: Option = + block.tail_expr().map(|expr| make::expr_stmt(expr).into()); + + let mut elements: Vec = vec![]; + + block.statements().for_each(|stmt| { + elements.push(syntax::NodeOrToken::Node(stmt.syntax().clone())); + }); + + if let Some(stmt_list) = block.stmt_list() { + stmt_list.syntax().children_with_tokens().for_each(|node_or_token| { + match &node_or_token { + syntax::NodeOrToken::Token(_) => elements.push(node_or_token), + _ => (), + }; + }); + } + + if let Some(stmt_tail) = stmt_tail_opt { + elements.push(syntax::NodeOrToken::Node(stmt_tail.syntax().clone())); + } + + make::hacky_block_expr(elements, Some(tail_expr)) } fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String { @@ -1946,7 +1981,7 @@ fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) { if nested_scope.is_none() { if let Some(expr) = ast::Expr::cast(e.clone()) { match expr { - ast::Expr::ReturnExpr(return_expr) if nested_scope.is_none() => { + ast::Expr::ReturnExpr(return_expr) => { let expr = return_expr.expr(); if let Some(replacement) = make_rewritten_flow(handler, expr) { ted::replace(return_expr.syntax(), replacement.syntax()) @@ -4944,9 +4979,8 @@ fn $0fun_name() { ); } - // FIXME: we do want to preserve whitespace #[test] - fn extract_function_does_not_preserve_whitespace() { + fn extract_function_does_preserve_whitespace() { check_assist( extract_function, r#" @@ -4965,6 +4999,7 @@ fn func() { fn $0fun_name() { let a = 0; + let x = 0; } "#, @@ -5582,6 +5617,193 @@ impl Struct where T: Into + Copy, U: Debug { fn $0fun_name(t: T, v: V) -> i32 where T: Into + Copy, V: Into { t.into() + v.into() } +"#, + ); + } + + #[test] + fn non_tail_expr_of_tail_expr_loop() { + check_assist( + extract_function, + r#" +pub fn f() { + loop { + $0if true { + continue; + }$0 + + if false { + break; + } + } +} +"#, + r#" +pub fn f() { + loop { + if let ControlFlow::Break(_) = fun_name() { + continue; + } + + if false { + break; + } + } +} + +fn $0fun_name() -> ControlFlow<()> { + if true { + return ControlFlow::Break(()); + } + ControlFlow::Continue(()) +} +"#, + ); + } + + #[test] + fn non_tail_expr_of_tail_if_block() { + // FIXME: double semicolon + check_assist( + extract_function, + r#" +//- minicore: option, try +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + let a = $0if true { + Some(())? + } else { + () + }$0; + Some(a) + } else { + None + } +} +"#, + r#" +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + let a = fun_name()?;; + Some(a) + } else { + None + } +} + +fn $0fun_name() -> Option<()> { + Some(if true { + Some(())? + } else { + () + }) +} +"#, + ); + } + + #[test] + fn tail_expr_of_tail_block_nested() { + check_assist( + extract_function, + r#" +//- minicore: option, try +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + $0{ + let a = if true { + Some(())? + } else { + () + }; + Some(a) + }$0 + } else { + None + } +} +"#, + r#" +impl core::ops::Try for Option { + type Output = T; + type Residual = Option; +} +impl core::ops::FromResidual for Option {} + +fn f() -> Option<()> { + if true { + fun_name()? + } else { + None + } +} + +fn $0fun_name() -> Option<()> { + let a = if true { + Some(())? + } else { + () + }; + Some(a) +} +"#, + ); + } + + #[test] + fn non_tail_expr_with_comment_of_tail_expr_loop() { + check_assist( + extract_function, + r#" +pub fn f() { + loop { + $0// A comment + if true { + continue; + }$0 + if false { + break; + } + } +} +"#, + r#" +pub fn f() { + loop { + if let ControlFlow::Break(_) = fun_name() { + continue; + } + if false { + break; + } + } +} + +fn $0fun_name() -> ControlFlow<()> { + // A comment + if true { + return ControlFlow::Break(()); + } + ControlFlow::Continue(()) +} "#, ); } diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs index 56834394aebaa..0fa7bd558bbfa 100644 --- a/crates/ide-assists/src/handlers/extract_module.rs +++ b/crates/ide-assists/src/handlers/extract_module.rs @@ -10,6 +10,8 @@ use ide_db::{ defs::{Definition, NameClass, NameRefClass}, search::{FileReference, SearchScope}, }; +use itertools::Itertools; +use smallvec::SmallVec; use stdx::format_to; use syntax::{ algo::find_node_at_range, @@ -116,13 +118,13 @@ pub(crate) fn extract_module(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti let mut body_items: Vec = Vec::new(); let mut items_to_be_processed: Vec = module.body_items.clone(); - let mut new_item_indent = old_item_indent + 1; - if impl_parent.is_some() { - new_item_indent = old_item_indent + 2; + let new_item_indent = if impl_parent.is_some() { + old_item_indent + 2 } else { items_to_be_processed = [module.use_items.clone(), items_to_be_processed].concat(); - } + old_item_indent + 1 + }; for item in items_to_be_processed { let item = item.indent(IndentLevel(1)); @@ -657,28 +659,23 @@ impl Module { fn check_intersection_and_push( import_paths_to_be_removed: &mut Vec, - import_path: TextRange, + mut import_path: TextRange, ) { - if import_paths_to_be_removed.len() > 0 { - // Text ranges received here for imports are extended to the - // next/previous comma which can cause intersections among them - // and later deletion of these can cause panics similar - // to reported in #11766. So to mitigate it, we - // check for intersection between all current members - // and if it exists we combine both text ranges into - // one - let r = import_paths_to_be_removed - .into_iter() - .position(|it| it.intersect(import_path).is_some()); - match r { - Some(it) => { - import_paths_to_be_removed[it] = import_paths_to_be_removed[it].cover(import_path) - } - None => import_paths_to_be_removed.push(import_path), - } - } else { - import_paths_to_be_removed.push(import_path); + // Text ranges received here for imports are extended to the + // next/previous comma which can cause intersections among them + // and later deletion of these can cause panics similar + // to reported in #11766. So to mitigate it, we + // check for intersection between all current members + // and combine all such ranges into one. + let s: SmallVec<[_; 2]> = import_paths_to_be_removed + .into_iter() + .positions(|it| it.intersect(import_path).is_some()) + .collect(); + for pos in s.into_iter().rev() { + let intersecting_path = import_paths_to_be_removed.swap_remove(pos); + import_path = import_path.cover(intersecting_path); } + import_paths_to_be_removed.push(import_path); } fn does_source_exists_outside_sel_in_same_mod( @@ -1766,4 +1763,49 @@ mod modname { ", ) } + + #[test] + fn test_merge_multiple_intersections() { + check_assist( + extract_module, + r#" +mod dep { + pub struct A; + pub struct B; + pub struct C; +} + +use dep::{A, B, C}; + +$0struct S { + inner: A, + state: C, + condvar: B, +}$0 +"#, + r#" +mod dep { + pub struct A; + pub struct B; + pub struct C; +} + +use dep::{}; + +mod modname { + use super::dep::B; + + use super::dep::C; + + use super::dep::A; + + pub(crate) struct S { + pub(crate) inner: A, + pub(crate) state: C, + pub(crate) condvar: B, + } +} +"#, + ); + } } diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index b4e10667b07ab..49debafe1a0ad 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -178,7 +178,7 @@ fn extract_generic_params( .fold(false, |tagged, ty| tag_generics_in_variant(&ty, &mut generics) || tagged), }; - let generics = generics.into_iter().filter_map(|(param, tag)| tag.then(|| param)); + let generics = generics.into_iter().filter_map(|(param, tag)| tag.then_some(param)); tagged_one.then(|| make::generic_param_list(generics)) } diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs index 3116935fc5e75..0505f5784f814 100644 --- a/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -108,76 +108,80 @@ fn collect_used_generics<'gp>( } let mut generics = Vec::new(); - walk_ty(ty, &mut |ty| match ty { - ast::Type::PathType(ty) => { - if let Some(path) = ty.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - match gp { - ast::GenericParam::ConstParam(cp) => cp.name(), - ast::GenericParam::TypeParam(tp) => tp.name(), - _ => None, + walk_ty(ty, &mut |ty| { + match ty { + ast::Type::PathType(ty) => { + if let Some(path) = ty.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + match gp { + ast::GenericParam::ConstParam(cp) => cp.name(), + ast::GenericParam::TypeParam(tp) => tp.name(), + _ => None, + } + .map_or(false, |n| n.text() == name_ref.text()) + }) { + generics.push(param); } - .map_or(false, |n| n.text() == name_ref.text()) - }) { - generics.push(param); } + generics.extend( + path.segments() + .filter_map(|seg| seg.generic_arg_list()) + .flat_map(|it| it.generic_args()) + .filter_map(|it| match it { + ast::GenericArg::LifetimeArg(lt) => { + let lt = lt.lifetime()?; + known_generics.iter().find(find_lifetime(<.text())) + } + _ => None, + }), + ); } - generics.extend( - path.segments() - .filter_map(|seg| seg.generic_arg_list()) - .flat_map(|it| it.generic_args()) - .filter_map(|it| match it { - ast::GenericArg::LifetimeArg(lt) => { - let lt = lt.lifetime()?; - known_generics.iter().find(find_lifetime(<.text())) - } - _ => None, - }), - ); } - } - ast::Type::ImplTraitType(impl_ty) => { - if let Some(it) = impl_ty.type_bound_list() { - generics.extend( - it.bounds() - .filter_map(|it| it.lifetime()) - .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ); + ast::Type::ImplTraitType(impl_ty) => { + if let Some(it) = impl_ty.type_bound_list() { + generics.extend( + it.bounds() + .filter_map(|it| it.lifetime()) + .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ); + } } - } - ast::Type::DynTraitType(dyn_ty) => { - if let Some(it) = dyn_ty.type_bound_list() { - generics.extend( - it.bounds() - .filter_map(|it| it.lifetime()) - .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ); + ast::Type::DynTraitType(dyn_ty) => { + if let Some(it) = dyn_ty.type_bound_list() { + generics.extend( + it.bounds() + .filter_map(|it| it.lifetime()) + .filter_map(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ); + } } - } - ast::Type::RefType(ref_) => generics.extend( - ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), - ), - ast::Type::ArrayType(ar) => { - if let Some(expr) = ar.expr() { - if let ast::Expr::PathExpr(p) = expr { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().map_or(false, |n| n.text() == name_ref.text()) - } else { - false + ast::Type::RefType(ref_) => generics.extend( + ref_.lifetime() + .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), + ), + ast::Type::ArrayType(ar) => { + if let Some(expr) = ar.expr() { + if let ast::Expr::PathExpr(p) = expr { + if let Some(path) = p.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().map_or(false, |n| n.text() == name_ref.text()) + } else { + false + } + }) { + generics.push(param); } - }) { - generics.push(param); } } } } } - } - _ => (), + _ => (), + }; + false }); // stable resort to lifetime, type, const generics.sort_by_key(|gp| match gp { diff --git a/crates/ide-assists/src/handlers/generate_default_from_new.rs b/crates/ide-assists/src/handlers/generate_default_from_new.rs index 49d9fd707ffcc..2d074a33e7fde 100644 --- a/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -53,7 +53,7 @@ pub(crate) fn generate_default_from_new(acc: &mut Assists, ctx: &AssistContext<' return None; } - let impl_ = fn_node.syntax().ancestors().into_iter().find_map(ast::Impl::cast)?; + let impl_ = fn_node.syntax().ancestors().find_map(ast::Impl::cast)?; if is_default_implemented(ctx, &impl_) { cov_mark::hit!(default_block_is_already_present); cov_mark::hit!(struct_in_module_with_default); diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index ceae80755037c..c8d0493d097c5 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -81,7 +81,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' acc.add_group( &GroupLabel("Generate delegate methods…".to_owned()), AssistId("generate_delegate_methods", AssistKind::Generate), - format!("Generate delegate for `{}.{}()`", field_name, method.name(ctx.db())), + format!("Generate delegate for `{field_name}.{}()`", method.name(ctx.db())), target, |builder| { // Create the function @@ -104,9 +104,11 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' make::name_ref(&method_name.to_string()), arg_list, ); - let body = make::block_expr([], Some(tail_expr)); let ret_type = method_source.ret_type(); let is_async = method_source.async_token().is_some(); + let tail_expr_finished = + if is_async { make::expr_await(tail_expr) } else { tail_expr }; + let body = make::block_expr([], Some(tail_expr_finished)); let f = make::fn_(vis, name, type_params, params, body, ret_type, is_async) .indent(ast::edit::IndentLevel(1)) .clone_for_update(); @@ -306,7 +308,7 @@ struct Person { impl Person { $0pub(crate) async fn age(&'a mut self, ty: T, arg: J) -> T { - self.age.age(ty, arg) + self.age.age(ty, arg).await } }"#, ); diff --git a/crates/ide-assists/src/handlers/generate_deref.rs b/crates/ide-assists/src/handlers/generate_deref.rs index 55b7afb3d3b09..b6958e29193ca 100644 --- a/crates/ide-assists/src/handlers/generate_deref.rs +++ b/crates/ide-assists/src/handlers/generate_deref.rs @@ -85,8 +85,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<() let strukt = ctx.find_node_at_offset::()?; let field = ctx.find_node_at_offset::()?; let field_list = ctx.find_node_at_offset::()?; - let field_list_index = - field_list.syntax().children().into_iter().position(|s| &s == field.syntax())?; + let field_list_index = field_list.syntax().children().position(|s| &s == field.syntax())?; let deref_type_to_generate = match existing_deref_impl(&ctx.sema, &strukt) { None => DerefType::Deref, diff --git a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs index c9aa41c845ad5..ee643ce9a4ac3 100644 --- a/crates/ide-assists/src/handlers/generate_enum_projection_method.rs +++ b/crates/ide-assists/src/handlers/generate_enum_projection_method.rs @@ -157,7 +157,7 @@ fn generate_enum_projection_method( assist_description, target, |builder| { - let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} ")); let field_type_syntax = field_type.syntax(); diff --git a/crates/ide-assists/src/handlers/generate_enum_variant.rs b/crates/ide-assists/src/handlers/generate_enum_variant.rs index 0bcb5728311b7..cd037f7492c66 100644 --- a/crates/ide-assists/src/handlers/generate_enum_variant.rs +++ b/crates/ide-assists/src/handlers/generate_enum_variant.rs @@ -180,7 +180,7 @@ fn make_tuple_field_list( ) -> Option { let args = call_expr.arg_list()?.args(); let tuple_fields = args.map(|arg| { - let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder); + let ty = expr_ty(ctx, arg, scope).unwrap_or_else(make::ty_placeholder); make::tuple_field(None, ty) }); Some(make::tuple_field_list(tuple_fields).into()) diff --git a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs index 7c81d2c6a6cc9..742f1f78c2ec9 100644 --- a/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs +++ b/crates/ide-assists/src/handlers/generate_from_impl_for_enum.rs @@ -1,7 +1,9 @@ use ide_db::{famous_defs::FamousDefs, RootDatabase}; use syntax::ast::{self, AstNode, HasName}; -use crate::{utils::generate_trait_impl_text, AssistContext, AssistId, AssistKind, Assists}; +use crate::{ + utils::generate_trait_impl_text_intransitive, AssistContext, AssistId, AssistKind, Assists, +}; // Assist: generate_from_impl_for_enum // @@ -70,7 +72,7 @@ pub(crate) fn generate_from_impl_for_enum( }}"# ) }; - let from_impl = generate_trait_impl_text(&enum_, &from_trait, &impl_code); + let from_impl = generate_trait_impl_text_intransitive(&enum_, &from_trait, &impl_code); edit.insert(start_offset, from_impl); }, ) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index 57f198748cb76..da9b0cda5b594 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -514,7 +514,7 @@ fn fn_args( /// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()]; /// assert_eq!(names, expected); /// ``` -fn deduplicate_arg_names(arg_names: &mut Vec) { +fn deduplicate_arg_names(arg_names: &mut [String]) { let mut arg_name_counts = FxHashMap::default(); for name in arg_names.iter() { *arg_name_counts.entry(name).or_insert(0) += 1; diff --git a/crates/ide-assists/src/handlers/generate_getter.rs b/crates/ide-assists/src/handlers/generate_getter.rs index 5e71914283495..15641b448d001 100644 --- a/crates/ide-assists/src/handlers/generate_getter.rs +++ b/crates/ide-assists/src/handlers/generate_getter.rs @@ -176,7 +176,7 @@ pub(crate) fn generate_getter_impl( // for separating it from other assoc items, that needs // to be handled spearately let mut getter_buf = - generate_getter_from_info(ctx, &getter_info, &record_field_info); + generate_getter_from_info(ctx, &getter_info, record_field_info); // Insert `$0` only for last getter we generate if i == record_fields_count - 1 { @@ -235,7 +235,7 @@ fn generate_getter_from_info( ) -> String { let mut buf = String::with_capacity(512); - let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{} ", v)); + let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} ")); let (ty, body) = if info.mutable { ( format!("&mut {}", record_field_info.field_ty), @@ -271,7 +271,7 @@ fn generate_getter_from_info( }}", vis, record_field_info.fn_name, - info.mutable.then(|| "mut ").unwrap_or_default(), + info.mutable.then_some("mut ").unwrap_or_default(), ty, body, ); diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs index 9af26c04eb458..9ad14a819d97b 100644 --- a/crates/ide-assists/src/handlers/generate_impl.rs +++ b/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,14 +1,17 @@ use syntax::ast::{self, AstNode, HasName}; -use crate::{utils::generate_impl_text, AssistContext, AssistId, AssistKind, Assists}; +use crate::{ + utils::{generate_impl_text, generate_trait_impl_text_intransitive}, + AssistContext, AssistId, AssistKind, Assists, +}; // Assist: generate_impl // // Adds a new inherent impl for a type. // // ``` -// struct Ctx { -// data: T,$0 +// struct Ctx$0 { +// data: T, // } // ``` // -> @@ -26,6 +29,10 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio let name = nominal.name()?; let target = nominal.syntax().text_range(); + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + acc.add( AssistId("generate_impl", AssistKind::Generate), format!("Generate impl for `{name}`"), @@ -46,145 +53,393 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio ) } +// Assist: generate_trait_impl +// +// Adds a new trait impl for a type. +// +// ``` +// struct $0Ctx { +// data: T, +// } +// ``` +// -> +// ``` +// struct Ctx { +// data: T, +// } +// +// impl $0 for Ctx { +// +// } +// ``` +pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let nominal = ctx.find_node_at_offset::()?; + let name = nominal.name()?; + let target = nominal.syntax().text_range(); + + if let Some(_) = ctx.find_node_at_offset::() { + return None; + } + + acc.add( + AssistId("generate_trait_impl", AssistKind::Generate), + format!("Generate trait impl for `{name}`"), + target, + |edit| { + let start_offset = nominal.syntax().text_range().end(); + match ctx.config.snippet_cap { + Some(cap) => { + let snippet = generate_trait_impl_text_intransitive(&nominal, "$0", ""); + edit.insert_snippet(cap, start_offset, snippet); + } + None => { + let text = generate_trait_impl_text_intransitive(&nominal, "", ""); + edit.insert(start_offset, text); + } + } + }, + ) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_target}; use super::*; - // FIXME: break up into separate test fns #[test] fn test_add_impl() { check_assist( generate_impl, - "struct Foo {$0}\n", - "struct Foo {}\n\nimpl Foo {\n $0\n}\n", + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl Foo { + $0 + } + "#, ); + } + + #[test] + fn test_add_impl_with_generics() { check_assist( generate_impl, - "struct Foo {$0}", - "struct Foo {}\n\nimpl Foo {\n $0\n}", + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} + + impl Foo { + $0 + } + "#, ); + } + + #[test] + fn test_add_impl_with_generics_and_lifetime_parameters() { check_assist( generate_impl, - "struct Foo<'a, T: Foo<'a>> {$0}", - "struct Foo<'a, T: Foo<'a>> {}\n\nimpl<'a, T: Foo<'a>> Foo<'a, T> {\n $0\n}", + r#" + struct Foo<'a, T: Foo<'a>>$0 {} + "#, + r#" + struct Foo<'a, T: Foo<'a>> {} + + impl<'a, T: Foo<'a>> Foo<'a, T> { + $0 + } + "#, ); + } + + #[test] + fn test_add_impl_with_attributes() { check_assist( generate_impl, r#" - struct MyOwnArray {}$0"#, + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, r#" - struct MyOwnArray {} + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} - impl MyOwnArray { - $0 - }"#, + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> Foo<'a, T> { + $0 + } + "#, ); + } + + #[test] + fn test_add_impl_with_default_generic() { check_assist( generate_impl, r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {$0}"#, + struct Defaulted$0 {} + "#, r#" - #[cfg(feature = "foo")] - struct Foo<'a, T: Foo<'a>> {} + struct Defaulted {} - #[cfg(feature = "foo")] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - }"#, + impl Defaulted { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_constrained_default_generic() { check_assist( generate_impl, r#" - #[cfg(not(feature = "foo"))] - struct Foo<'a, T: Foo<'a>> {$0}"#, + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, r#" - #[cfg(not(feature = "foo"))] - struct Foo<'a, T: Foo<'a>> {} + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} - #[cfg(not(feature = "foo"))] - impl<'a, T: Foo<'a>> Foo<'a, T> { - $0 - }"#, + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_const_defaulted_generic() { check_assist( generate_impl, r#" - struct Defaulted {}$0"#, + struct Defaulted$0 {} + "#, r#" - struct Defaulted {} + struct Defaulted {} - impl Defaulted { - $0 - }"#, + impl Defaulted { + $0 + } + "#, ); + } + #[test] + fn test_add_impl_with_trait_constraint() { check_assist( generate_impl, r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {}$0"#, + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, r#" - struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } - impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> Defaulted<'a, 'b, T, S> { - $0 - }"#, + impl Struct + where + T: Trait, + { + $0 + } + "#, ); + } - check_assist( + #[test] + fn add_impl_target() { + check_assist_target( generate_impl, r#" - struct Defaulted {}$0"#, + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", + ); + } + + #[test] + fn test_add_trait_impl() { + check_assist( + generate_trait_impl, + r#" + struct Foo$0 {} + "#, r#" - struct Defaulted {} + struct Foo {} - impl Defaulted { - $0 - }"#, + impl $0 for Foo { + + } + "#, ); + } + #[test] + fn test_add_trait_impl_with_generics() { check_assist( - generate_impl, - r#"pub trait Trait {} -struct Struct$0 -where - T: Trait, -{ - inner: T, -}"#, - r#"pub trait Trait {} -struct Struct -where - T: Trait, -{ - inner: T, -} + generate_trait_impl, + r#" + struct Foo$0 {} + "#, + r#" + struct Foo {} -impl Struct -where - T: Trait, -{ - $0 -}"#, + impl $0 for Foo { + + } + "#, ); } #[test] - fn add_impl_target() { + fn test_add_trait_impl_with_generics_and_lifetime_parameters() { + check_assist( + generate_trait_impl, + r#" + struct Foo<'a, T: Foo<'a>>$0 {} + "#, + r#" + struct Foo<'a, T: Foo<'a>> {} + + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_attributes() { + check_assist( + generate_trait_impl, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo$0<'a>> {} + "#, + r#" + #[cfg(feature = "foo")] + struct Foo<'a, T: Foo<'a>> {} + + #[cfg(feature = "foo")] + impl<'a, T: Foo<'a>> $0 for Foo<'a, T> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_constrained_default_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + "#, + r#" + struct Defaulted<'a, 'b: 'a, T: Debug + Clone + 'a + 'b = String, const S: usize> {} + + impl<'a, 'b: 'a, T: Debug + Clone + 'a + 'b, const S: usize> $0 for Defaulted<'a, 'b, T, S> { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_const_defaulted_generic() { + check_assist( + generate_trait_impl, + r#" + struct Defaulted$0 {} + "#, + r#" + struct Defaulted {} + + impl $0 for Defaulted { + + } + "#, + ); + } + + #[test] + fn test_add_trait_impl_with_trait_constraint() { + check_assist( + generate_trait_impl, + r#" + pub trait Trait {} + struct Struct$0 + where + T: Trait, + { + inner: T, + } + "#, + r#" + pub trait Trait {} + struct Struct + where + T: Trait, + { + inner: T, + } + + impl $0 for Struct + where + T: Trait, + { + + } + "#, + ); + } + + #[test] + fn add_trait_impl_target() { check_assist_target( - generate_impl, - " -struct SomeThingIrrelevant; -/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {$0} -struct EvenMoreIrrelevant; -", - "/// Has a lifetime parameter -struct Foo<'a, T: Foo<'a>> {}", + generate_trait_impl, + r#" + struct SomeThingIrrelevant; + /// Has a lifetime parameter + struct Foo$0<'a, T: Foo<'a>> {} + struct EvenMoreIrrelevant; + "#, + "/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}", ); } } diff --git a/crates/ide-assists/src/handlers/generate_new.rs b/crates/ide-assists/src/handlers/generate_new.rs index 17fadea0eaf19..8d311262a753f 100644 --- a/crates/ide-assists/src/handlers/generate_new.rs +++ b/crates/ide-assists/src/handlers/generate_new.rs @@ -70,7 +70,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option )?; let expr = use_trivial_constructor( - &ctx.sema.db, + ctx.sema.db, ide_db::helpers::mod_path_to_ast(&type_path), &ty, )?; diff --git a/crates/ide-assists/src/handlers/inline_call.rs b/crates/ide-assists/src/handlers/inline_call.rs index 0c546ce5d41c6..5ac18727c1960 100644 --- a/crates/ide-assists/src/handlers/inline_call.rs +++ b/crates/ide-assists/src/handlers/inline_call.rs @@ -394,7 +394,7 @@ fn inline( // Inline parameter expressions or generate `let` statements depending on whether inlining works or not. for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() { // izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors - let usages: &[ast::PathExpr] = &*usages; + let usages: &[ast::PathExpr] = &usages; let expr: &ast::Expr = expr; let insert_let_stmt = || { diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs new file mode 100644 index 0000000000000..9d03f03d201a9 --- /dev/null +++ b/crates/ide-assists/src/handlers/inline_macro.rs @@ -0,0 +1,233 @@ +use syntax::ast::{self, AstNode}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: inline_macro +// +// Takes a macro and inlines it one step. +// +// ``` +// macro_rules! num { +// (+$($t:tt)+) => (1 + num!($($t )+)); +// (-$($t:tt)+) => (-1 + num!($($t )+)); +// (+) => (1); +// (-) => (-1); +// } +// +// fn main() { +// let number = num$0!(+ + + - + +); +// println!("{number}"); +// } +// ``` +// -> +// ``` +// macro_rules! num { +// (+$($t:tt)+) => (1 + num!($($t )+)); +// (-$($t:tt)+) => (-1 + num!($($t )+)); +// (+) => (1); +// (-) => (-1); +// } +// +// fn main() { +// let number = 1+num!(+ + - + +); +// println!("{number}"); +// } +// ``` +pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let unexpanded = ctx.find_node_at_offset::()?; + let expanded = ctx.sema.expand(&unexpanded)?.clone_for_update(); + + let text_range = unexpanded.syntax().text_range(); + + acc.add( + AssistId("inline_macro", AssistKind::RefactorRewrite), + format!("Inline macro"), + text_range, + |builder| builder.replace(text_range, expanded.to_string()), + ) +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + macro_rules! simple_macro { + () => { + r#" +macro_rules! foo { + (foo) => (true); + () => (false); +} +"# + }; + } + macro_rules! double_macro { + () => { + r#" +macro_rules! bar { + (bar) => (true); + ($($tt:tt)?) => (false); +} +macro_rules! foo { + (foo) => (true); + (bar) => (bar!(bar)); + ($($tt:tt)?) => (bar!($($tt)?)); +} +"# + }; + } + + macro_rules! complex_macro { + () => { + r#" +macro_rules! num { + (+$($t:tt)+) => (1 + num!($($t )+)); + (-$($t:tt)+) => (-1 + num!($($t )+)); + (+) => (1); + (-) => (-1); +} +"# + }; + } + #[test] + fn inline_macro_target() { + check_assist_target( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let a = foo$0!(foo); }"#), + "foo!(foo)", + ); + } + + #[test] + fn inline_macro_target_start() { + check_assist_target( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let a = $0foo!(foo); }"#), + "foo!(foo)", + ); + } + + #[test] + fn inline_macro_target_end() { + check_assist_target( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let a = foo!(foo$0); }"#), + "foo!(foo)", + ); + } + + #[test] + fn inline_macro_simple_case1() { + check_assist( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result = foo$0!(foo); }"#), + concat!(simple_macro!(), r#"fn f() { let result = true; }"#), + ); + } + + #[test] + fn inline_macro_simple_case2() { + check_assist( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result = foo$0!(); }"#), + concat!(simple_macro!(), r#"fn f() { let result = false; }"#), + ); + } + + #[test] + fn inline_macro_simple_not_applicable() { + check_assist_not_applicable( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result$0 = foo!(foo); }"#), + ); + } + + #[test] + fn inline_macro_simple_not_applicable_broken_macro() { + // FIXME: This is a bug. The macro should not expand, but it's + // the same behaviour as the "Expand Macro Recursively" commmand + // so it's presumably OK for the time being. + check_assist( + inline_macro, + concat!(simple_macro!(), r#"fn f() { let result = foo$0!(asdfasdf); }"#), + concat!(simple_macro!(), r#"fn f() { let result = true; }"#), + ); + } + + #[test] + fn inline_macro_double_case1() { + check_assist( + inline_macro, + concat!(double_macro!(), r#"fn f() { let result = foo$0!(bar); }"#), + concat!(double_macro!(), r#"fn f() { let result = bar!(bar); }"#), + ); + } + + #[test] + fn inline_macro_double_case2() { + check_assist( + inline_macro, + concat!(double_macro!(), r#"fn f() { let result = foo$0!(asdf); }"#), + concat!(double_macro!(), r#"fn f() { let result = bar!(asdf); }"#), + ); + } + + #[test] + fn inline_macro_complex_case1() { + check_assist( + inline_macro, + concat!(complex_macro!(), r#"fn f() { let result = num!(+ +$0 + - +); }"#), + concat!(complex_macro!(), r#"fn f() { let result = 1+num!(+ + - +); }"#), + ); + } + + #[test] + fn inline_macro_complex_case2() { + check_assist( + inline_macro, + concat!(complex_macro!(), r#"fn f() { let result = n$0um!(- + + - +); }"#), + concat!(complex_macro!(), r#"fn f() { let result = -1+num!(+ + - +); }"#), + ); + } + + #[test] + fn inline_macro_recursive_macro() { + check_assist( + inline_macro, + r#" +macro_rules! foo { + () => {foo!()} +} +fn f() { let result = foo$0!(); } +"#, + r#" +macro_rules! foo { + () => {foo!()} +} +fn f() { let result = foo!(); } +"#, + ); + } + + #[test] + fn inline_macro_unknown_macro() { + check_assist_not_applicable( + inline_macro, + r#" +fn f() { let result = foo$0!(); } +"#, + ); + } + + #[test] + fn inline_macro_function_call_not_applicable() { + check_assist_not_applicable( + inline_macro, + r#" +fn f() { let result = foo$0(); } +"#, + ); + } +} diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs index 353d467ed19f3..5982e9d61dbf9 100644 --- a/crates/ide-assists/src/handlers/inline_type_alias.rs +++ b/crates/ide-assists/src/handlers/inline_type_alias.rs @@ -138,7 +138,7 @@ pub(crate) fn inline_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> O replacement = Replacement::Plain; } _ => { - let alias = get_type_alias(&ctx, &alias_instance)?; + let alias = get_type_alias(ctx, &alias_instance)?; concrete_type = alias.ty()?; replacement = inline(&alias, &alias_instance)?; } @@ -158,7 +158,7 @@ impl Replacement { fn to_text(&self, concrete_type: &ast::Type) -> String { match self { Replacement::Generic { lifetime_map, const_and_type_map } => { - create_replacement(&lifetime_map, &const_and_type_map, &concrete_type) + create_replacement(lifetime_map, const_and_type_map, concrete_type) } Replacement::Plain => concrete_type.to_string(), } @@ -240,7 +240,7 @@ impl ConstAndTypeMap { ) -> Option { let mut inner = HashMap::new(); let instance_generics = generic_args_to_const_and_type_generics(instance_args); - let alias_generics = generic_param_list_to_const_and_type_generics(&alias_generics); + let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics); if instance_generics.len() > alias_generics.len() { cov_mark::hit!(too_many_generic_args); diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs new file mode 100644 index 0000000000000..0e3a1e652b0dc --- /dev/null +++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -0,0 +1,481 @@ +use hir::{AsAssocItem, AssocItemContainer, HasCrate, HasSource}; +use ide_db::{assists::AssistId, base_db::FileRange, defs::Definition, search::SearchScope}; +use syntax::{ + ast::{self, edit::IndentLevel, edit_in_place::Indent, AstNode}, + SyntaxKind, +}; + +use crate::{ + assist_context::{AssistContext, Assists}, + utils, +}; + +// NOTE: Code may break if the self type implements a trait that has associated const with the same +// name, but it's pretty expensive to check that (`hir::Impl::all_for_type()`) and we assume that's +// pretty rare case. + +// Assist: move_const_to_impl +// +// Move a local constant item in a method to impl's associated constant. All the references will be +// qualified with `Self::`. +// +// ``` +// struct S; +// impl S { +// fn foo() -> usize { +// /// The answer. +// const C$0: usize = 42; +// +// C * C +// } +// } +// ``` +// -> +// ``` +// struct S; +// impl S { +// /// The answer. +// const C: usize = 42; +// +// fn foo() -> usize { +// Self::C * Self::C +// } +// } +// ``` +pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let db = ctx.db(); + let const_: ast::Const = ctx.find_node_at_offset()?; + // Don't show the assist when the cursor is at the const's body. + if let Some(body) = const_.body() { + if body.syntax().text_range().contains(ctx.offset()) { + return None; + } + } + + let parent_fn = const_.syntax().ancestors().find_map(ast::Fn::cast)?; + + // NOTE: We can technically provide this assist for default methods in trait definitions, but + // it's somewhat complex to handle it correctly when the const's name conflicts with + // supertrait's item. We may want to consider implementing it in the future. + let AssocItemContainer::Impl(impl_) = ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db) else { return None; }; + if impl_.trait_(db).is_some() { + return None; + } + + let def = ctx.sema.to_def(&const_)?; + let name = def.name(db)?; + let items = impl_.source(db)?.value.assoc_item_list()?; + + let ty = impl_.self_ty(db); + // If there exists another associated item with the same name, skip the assist. + if ty + .iterate_assoc_items(db, ty.krate(db), |assoc| { + // Type aliases wouldn't conflict due to different namespaces, but we're only checking + // the items in inherent impls, so we assume `assoc` is never type alias for the sake + // of brevity (inherent associated types exist in nightly Rust, but it's *very* + // unstable and we don't support them either). + assoc.name(db).filter(|it| it == &name) + }) + .is_some() + { + return None; + } + + let usages = + Definition::Const(def).usages(&ctx.sema).in_scope(SearchScope::file_range(FileRange { + file_id: ctx.file_id(), + range: parent_fn.syntax().text_range(), + })); + + acc.add( + AssistId("move_const_to_impl", crate::AssistKind::RefactorRewrite), + "Move const to impl block", + const_.syntax().text_range(), + |builder| { + let range_to_delete = match const_.syntax().next_sibling_or_token() { + Some(s) if matches!(s.kind(), SyntaxKind::WHITESPACE) => { + // Remove following whitespaces too. + const_.syntax().text_range().cover(s.text_range()) + } + _ => const_.syntax().text_range(), + }; + builder.delete(range_to_delete); + + let const_ref = format!("Self::{name}"); + for range in usages.all().file_ranges().map(|it| it.range) { + builder.replace(range, const_ref.clone()); + } + + // Heuristically inserting the extracted const after the consecutive existing consts + // from the beginning of assoc items. We assume there are no inherent assoc type as + // above. + let last_const = + items.assoc_items().take_while(|it| matches!(it, ast::AssocItem::Const(_))).last(); + let insert_offset = match &last_const { + Some(it) => it.syntax().text_range().end(), + None => match items.l_curly_token() { + Some(l_curly) => l_curly.text_range().end(), + // Not sure if this branch is ever reachable, but it wouldn't hurt to have a + // fallback. + None => items.syntax().text_range().start(), + }, + }; + + // If the moved const will be the first item of the impl, add a new line after that. + // + // We're assuming the code is formatted according to Rust's standard style guidelines + // (i.e. no empty lines between impl's `{` token and its first assoc item). + let fixup = if last_const.is_none() { "\n" } else { "" }; + let indent = IndentLevel::from_node(parent_fn.syntax()); + + let const_ = const_.clone_for_update(); + const_.reindent_to(indent); + let mut const_text = format!("\n{indent}{const_}{fixup}"); + utils::escape_non_snippet(&mut const_text); + builder.insert(insert_offset, const_text); + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn not_applicable_to_top_level_const() { + check_assist_not_applicable( + move_const_to_impl, + r#" +const C$0: () = (); +"#, + ); + } + + #[test] + fn not_applicable_to_free_fn() { + check_assist_not_applicable( + move_const_to_impl, + r#" +fn f() { + const C$0: () = (); +} +"#, + ); + } + + #[test] + fn not_applicable_when_at_const_body() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() { + const C: () = ($0); + } +} + "#, + ); + } + + #[test] + fn not_applicable_when_inside_const_body_block() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() { + const C: () = { + ($0) + }; + } +} + "#, + ); + } + + #[test] + fn not_applicable_to_trait_impl_fn() { + check_assist_not_applicable( + move_const_to_impl, + r#" +trait Trait { + fn f(); +} +impl Trait for () { + fn f() { + const C$0: () = (); + } +} +"#, + ); + } + + #[test] + fn not_applicable_to_non_assoc_fn_inside_impl() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() { + fn g() { + const C$0: () = (); + } + } +} +"#, + ); + } + + #[test] + fn not_applicable_when_const_with_same_name_exists() { + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + const C: usize = 42; + fn f() { + const C$0: () = (); + } +"#, + ); + + check_assist_not_applicable( + move_const_to_impl, + r#" +struct S; +impl S { + const C: usize = 42; +} +impl S { + fn f() { + const C$0: () = (); + } +"#, + ); + } + + #[test] + fn move_const_simple_body() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + /// doc comment + const C$0: usize = 42; + + C * C + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + const C: usize = 42; + + fn f() -> usize { + Self::C * Self::C + } +} +"#, + ); + } + + #[test] + fn move_const_simple_body_existing_const() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + const X: () = (); + const Y: () = (); + + fn f() -> usize { + /// doc comment + const C$0: usize = 42; + + C * C + } +} +"#, + r#" +struct S; +impl S { + const X: () = (); + const Y: () = (); + /// doc comment + const C: usize = 42; + + fn f() -> usize { + Self::C * Self::C + } +} +"#, + ); + } + + #[test] + fn move_const_block_body() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + /// doc comment + const C$0: usize = { + let a = 3; + let b = 4; + a * b + }; + + C * C + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + const C: usize = { + let a = 3; + let b = 4; + a * b + }; + + fn f() -> usize { + Self::C * Self::C + } +} +"#, + ); + } + + #[test] + fn correct_indent_when_nested() { + check_assist( + move_const_to_impl, + r#" +fn main() { + struct S; + impl S { + fn f() -> usize { + /// doc comment + const C$0: usize = 42; + + C * C + } + } +} +"#, + r#" +fn main() { + struct S; + impl S { + /// doc comment + const C: usize = 42; + + fn f() -> usize { + Self::C * Self::C + } + } +} +"#, + ) + } + + #[test] + fn move_const_in_nested_scope_with_same_name_in_other_scope() { + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + const C: &str = "outer"; + + let n = { + /// doc comment + const C$0: usize = 42; + + let m = { + const C: &str = "inner"; + C.len() + }; + + C * m + }; + + n + C.len() + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + const C: usize = 42; + + fn f() -> usize { + const C: &str = "outer"; + + let n = { + let m = { + const C: &str = "inner"; + C.len() + }; + + Self::C * m + }; + + n + C.len() + } +} +"#, + ); + } + + #[test] + fn moved_const_body_is_escaped() { + // Note that the last argument is what *lsp clients would see* rather than + // what users would see. Unescaping happens thereafter. + check_assist( + move_const_to_impl, + r#" +struct S; +impl S { + fn f() -> usize { + /// doc comment + /// \\ + /// ${snippet} + const C$0: &str = "\ and $1"; + + C.len() + } +} +"#, + r#" +struct S; +impl S { + /// doc comment + /// \\\\ + /// \${snippet} + const C: &str = "\\ and \$1"; + + fn f() -> usize { + Self::C.len() + } +} +"#, + ) + } +} diff --git a/crates/ide-assists/src/handlers/qualify_method_call.rs b/crates/ide-assists/src/handlers/qualify_method_call.rs index 1ea87429c5092..e7014597a1d8b 100644 --- a/crates/ide-assists/src/handlers/qualify_method_call.rs +++ b/crates/ide-assists/src/handlers/qualify_method_call.rs @@ -53,7 +53,7 @@ pub(crate) fn qualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> let qualify_candidate = QualifyCandidate::ImplMethod(ctx.sema.db, call, resolved_call); acc.add( - AssistId("qualify_method_call", AssistKind::RefactorInline), + AssistId("qualify_method_call", AssistKind::RefactorRewrite), format!("Qualify `{ident}` method call"), range, |builder| { diff --git a/crates/ide-assists/src/handlers/remove_dbg.rs b/crates/ide-assists/src/handlers/remove_dbg.rs index 99ae60e07bcfa..52dd670ec2a46 100644 --- a/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/crates/ide-assists/src/handlers/remove_dbg.rs @@ -64,7 +64,7 @@ fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, Str let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]); let input_expressions = input_expressions .into_iter() - .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) + .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) .collect::>>()?; diff --git a/crates/ide-assists/src/handlers/remove_parentheses.rs b/crates/ide-assists/src/handlers/remove_parentheses.rs new file mode 100644 index 0000000000000..e9c7c6bae9cee --- /dev/null +++ b/crates/ide-assists/src/handlers/remove_parentheses.rs @@ -0,0 +1,221 @@ +use syntax::{ast, AstNode}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: remove_parentheses +// +// Removes redundant parentheses. +// +// ``` +// fn main() { +// _ = $0(2) + 2; +// } +// ``` +// -> +// ``` +// fn main() { +// _ = 2 + 2; +// } +// ``` +pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let parens = ctx.find_node_at_offset::()?; + + let cursor_in_range = + parens.l_paren_token()?.text_range().contains_range(ctx.selection_trimmed()) + || parens.r_paren_token()?.text_range().contains_range(ctx.selection_trimmed()); + if !cursor_in_range { + return None; + } + + let expr = parens.expr()?; + + let parent = parens.syntax().parent()?; + if expr.needs_parens_in(parent) { + return None; + } + + let target = parens.syntax().text_range(); + acc.add( + AssistId("remove_parentheses", AssistKind::Refactor), + "Remove redundant parentheses", + target, + |builder| builder.replace_ast(parens.into(), expr), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn remove_parens_simple() { + check_assist(remove_parentheses, r#"fn f() { $0(2) + 2; }"#, r#"fn f() { 2 + 2; }"#); + check_assist(remove_parentheses, r#"fn f() { ($02) + 2; }"#, r#"fn f() { 2 + 2; }"#); + check_assist(remove_parentheses, r#"fn f() { (2)$0 + 2; }"#, r#"fn f() { 2 + 2; }"#); + check_assist(remove_parentheses, r#"fn f() { (2$0) + 2; }"#, r#"fn f() { 2 + 2; }"#); + } + + #[test] + fn remove_parens_closure() { + check_assist(remove_parentheses, r#"fn f() { &$0(|| 42) }"#, r#"fn f() { &|| 42 }"#); + + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(|| 42).f() }"#); + } + + #[test] + fn remove_parens_if_let_chains() { + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { if let true = $0(true && true) {} }"#, + ); + } + + #[test] + fn remove_parens_associativity() { + check_assist( + remove_parentheses, + r#"fn f() { $0(2 + 2) + 2; }"#, + r#"fn f() { 2 + 2 + 2; }"#, + ); + check_assist_not_applicable(remove_parentheses, r#"fn f() { 2 + $0(2 + 2); }"#); + } + + #[test] + fn remove_parens_precedence() { + check_assist( + remove_parentheses, + r#"fn f() { $0(2 * 3) + 1; }"#, + r#"fn f() { 2 * 3 + 1; }"#, + ); + check_assist(remove_parentheses, r#"fn f() { ( $0(2) ); }"#, r#"fn f() { ( 2 ); }"#); + check_assist(remove_parentheses, r#"fn f() { $0(2?)?; }"#, r#"fn f() { 2??; }"#); + check_assist(remove_parentheses, r#"fn f() { f(($02 + 2)); }"#, r#"fn f() { f(2 + 2); }"#); + check_assist( + remove_parentheses, + r#"fn f() { (1<2)&&$0(3>4); }"#, + r#"fn f() { (1<2)&&3>4; }"#, + ); + } + + #[test] + fn remove_parens_doesnt_apply_precedence() { + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(2 + 2) * 8; }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(2 + 2).f(); }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(2 + 2).await; }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0!(2..2); }"#); + } + + #[test] + fn remove_parens_doesnt_apply_with_cursor_not_on_paren() { + check_assist_not_applicable(remove_parentheses, r#"fn f() { (2 +$0 2) }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() {$0 (2 + 2) }"#); + } + + #[test] + fn remove_parens_doesnt_apply_when_expr_would_be_turned_into_a_statement() { + check_assist_not_applicable(remove_parentheses, r#"fn x() -> u8 { $0({ 0 } + 1) }"#); + check_assist_not_applicable( + remove_parentheses, + r#"fn x() -> u8 { $0(if true { 0 } else { 1 } + 1) }"#, + ); + check_assist_not_applicable(remove_parentheses, r#"fn x() -> u8 { $0(loop {} + 1) }"#); + } + + #[test] + fn remove_parens_doesnt_apply_weird_syntax_and_adge_cases() { + // removing `()` would break code because {} would be counted as the loop/if body + check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(0..{3}) {} }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { for _ in $0(S {}) {} }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { if $0(S {} == 2) {} }"#); + check_assist_not_applicable(remove_parentheses, r#"fn f() { if $0(return) {} }"#); + } + + #[test] + fn remove_parens_return_with_value_followed_by_block() { + check_assist( + remove_parentheses, + r#"fn f() { if $0(return ()) {} }"#, + r#"fn f() { if return () {} }"#, + ); + } + + #[test] + fn remove_exprs_let_else_restrictions() { + // `}` is not allowed before `else` here + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { let _ = $0(S{}) else { return }; }"#, + ); + + // logic operators can't directly appear in the let-else + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { let _ = $0(false || false) else { return }; }"#, + ); + check_assist_not_applicable( + remove_parentheses, + r#"fn f() { let _ = $0(true && true) else { return }; }"#, + ); + } + + #[test] + fn remove_parens_weird_places() { + check_assist( + remove_parentheses, + r#"fn f() { match () { _=>$0(()) } }"#, + r#"fn f() { match () { _=>() } }"#, + ); + + check_assist( + remove_parentheses, + r#"fn x() -> u8 { { [$0({ 0 } + 1)] } }"#, + r#"fn x() -> u8 { { [{ 0 } + 1] } }"#, + ); + } + + #[test] + fn remove_parens_return_dot_f() { + check_assist( + remove_parentheses, + r#"fn f() { $0(return).f() }"#, + r#"fn f() { return.f() }"#, + ); + } + + #[test] + fn remove_parens_prefix_then_return_something() { + check_assist( + remove_parentheses, + r#"fn f() { &$0(return ()) }"#, + r#"fn f() { &return () }"#, + ); + } + + #[test] + fn remove_parens_double_paren_stmt() { + check_assist( + remove_parentheses, + r#"fn x() -> u8 { $0(({ 0 } + 1)) }"#, + r#"fn x() -> u8 { ({ 0 } + 1) }"#, + ); + + check_assist( + remove_parentheses, + r#"fn x() -> u8 { (($0{ 0 } + 1)) }"#, + r#"fn x() -> u8 { ({ 0 } + 1) }"#, + ); + } + + #[test] + fn remove_parens_im_tired_of_naming_tests() { + check_assist( + remove_parentheses, + r#"fn f() { 2 + $0(return 2) }"#, + r#"fn f() { 2 + return 2 }"#, + ); + + check_assist_not_applicable(remove_parentheses, r#"fn f() { $0(return 2) + 2 }"#); + } +} diff --git a/crates/ide-assists/src/handlers/replace_arith_op.rs b/crates/ide-assists/src/handlers/replace_arith_op.rs new file mode 100644 index 0000000000000..f1ca35cafc3a4 --- /dev/null +++ b/crates/ide-assists/src/handlers/replace_arith_op.rs @@ -0,0 +1,226 @@ +use ide_db::assists::{AssistId, AssistKind, GroupLabel}; +use syntax::{ + ast::{self, ArithOp, BinaryOp}, + AstNode, TextRange, +}; + +use crate::assist_context::{AssistContext, Assists}; + +// Assist: replace_arith_with_checked +// +// Replaces arithmetic on integers with the `checked_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.checked_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_checked(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Checked) +} + +// Assist: replace_arith_with_saturating +// +// Replaces arithmetic on integers with the `saturating_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.saturating_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_saturating( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Saturating) +} + +// Assist: replace_arith_with_wrapping +// +// Replaces arithmetic on integers with the `wrapping_*` equivalent. +// +// ``` +// fn main() { +// let x = 1 $0+ 2; +// } +// ``` +// -> +// ``` +// fn main() { +// let x = 1.wrapping_add(2); +// } +// ``` +pub(crate) fn replace_arith_with_wrapping( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + replace_arith(acc, ctx, ArithKind::Wrapping) +} + +fn replace_arith(acc: &mut Assists, ctx: &AssistContext<'_>, kind: ArithKind) -> Option<()> { + let (lhs, op, rhs) = parse_binary_op(ctx)?; + + if !is_primitive_int(ctx, &lhs) || !is_primitive_int(ctx, &rhs) { + return None; + } + + let start = lhs.syntax().text_range().start(); + let end = rhs.syntax().text_range().end(); + let range = TextRange::new(start, end); + + acc.add_group( + &GroupLabel("replace_arith".into()), + kind.assist_id(), + kind.label(), + range, + |builder| { + let method_name = kind.method_name(op); + + builder.replace(range, format!("{lhs}.{method_name}({rhs})")) + }, + ) +} + +fn is_primitive_int(ctx: &AssistContext<'_>, expr: &ast::Expr) -> bool { + match ctx.sema.type_of_expr(expr) { + Some(ty) => ty.adjusted().is_int_or_uint(), + _ => false, + } +} + +/// Extract the operands of an arithmetic expression (e.g. `1 + 2` or `1.checked_add(2)`) +fn parse_binary_op(ctx: &AssistContext<'_>) -> Option<(ast::Expr, ArithOp, ast::Expr)> { + let expr = ctx.find_node_at_offset::()?; + + let op = match expr.op_kind() { + Some(BinaryOp::ArithOp(ArithOp::Add)) => ArithOp::Add, + Some(BinaryOp::ArithOp(ArithOp::Sub)) => ArithOp::Sub, + Some(BinaryOp::ArithOp(ArithOp::Mul)) => ArithOp::Mul, + Some(BinaryOp::ArithOp(ArithOp::Div)) => ArithOp::Div, + _ => return None, + }; + + let lhs = expr.lhs()?; + let rhs = expr.rhs()?; + + Some((lhs, op, rhs)) +} + +pub(crate) enum ArithKind { + Saturating, + Wrapping, + Checked, +} + +impl ArithKind { + fn assist_id(&self) -> AssistId { + let s = match self { + ArithKind::Saturating => "replace_arith_with_saturating", + ArithKind::Checked => "replace_arith_with_checked", + ArithKind::Wrapping => "replace_arith_with_wrapping", + }; + + AssistId(s, AssistKind::RefactorRewrite) + } + + fn label(&self) -> &'static str { + match self { + ArithKind::Saturating => "Replace arithmetic with call to saturating_*", + ArithKind::Checked => "Replace arithmetic with call to checked_*", + ArithKind::Wrapping => "Replace arithmetic with call to wrapping_*", + } + } + + fn method_name(&self, op: ArithOp) -> String { + let prefix = match self { + ArithKind::Checked => "checked_", + ArithKind::Wrapping => "wrapping_", + ArithKind::Saturating => "saturating_", + }; + + let suffix = match op { + ArithOp::Add => "add", + ArithOp::Sub => "sub", + ArithOp::Mul => "mul", + ArithOp::Div => "div", + _ => unreachable!("this function should only be called with +, -, / or *"), + }; + format!("{prefix}{suffix}") + } +} + +#[cfg(test)] +mod tests { + use crate::tests::check_assist; + + use super::*; + + #[test] + fn arith_kind_method_name() { + assert_eq!(ArithKind::Saturating.method_name(ArithOp::Add), "saturating_add"); + assert_eq!(ArithKind::Checked.method_name(ArithOp::Sub), "checked_sub"); + } + + #[test] + fn replace_arith_with_checked_add() { + check_assist( + replace_arith_with_checked, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.checked_add(2); +} +"#, + ) + } + + #[test] + fn replace_arith_with_saturating_add() { + check_assist( + replace_arith_with_saturating, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.saturating_add(2); +} +"#, + ) + } + + #[test] + fn replace_arith_with_wrapping_add() { + check_assist( + replace_arith_with_wrapping, + r#" +fn main() { + let x = 1 $0+ 2; +} +"#, + r#" +fn main() { + let x = 1.wrapping_add(2); +} +"#, + ) + } +} diff --git a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 6fa15b28e4efc..a6693d7d790cc 100644 --- a/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -907,7 +907,34 @@ impl PartialEq for Foo { } #[test] - fn add_custom_impl_partial_eq_tuple_enum() { + fn add_custom_impl_partial_eq_single_variant_tuple_enum() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Foo { + Bar(String), +} +"#, + r#" +enum Foo { + Bar(String), +} + +impl PartialEq for Foo { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Bar(l0), Self::Bar(r0)) => l0 == r0, + } + } +} +"#, + ) + } + + #[test] + fn add_custom_impl_partial_eq_partial_tuple_enum() { check_assist( replace_derive_with_manual_impl, r#" @@ -936,6 +963,99 @@ impl PartialEq for Foo { ) } + #[test] + fn add_custom_impl_partial_eq_tuple_enum() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Foo { + Bar(String), + Baz(i32), +} +"#, + r#" +enum Foo { + Bar(String), + Baz(i32), +} + +impl PartialEq for Foo { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Bar(l0), Self::Bar(r0)) => l0 == r0, + (Self::Baz(l0), Self::Baz(r0)) => l0 == r0, + _ => false, + } + } +} +"#, + ) + } + + #[test] + fn add_custom_impl_partial_eq_tuple_enum_generic() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Either { + Left(T), + Right(U), +} +"#, + r#" +enum Either { + Left(T), + Right(U), +} + +impl PartialEq for Either { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Left(l0), Self::Left(r0)) => l0 == r0, + (Self::Right(l0), Self::Right(r0)) => l0 == r0, + _ => false, + } + } +} +"#, + ) + } + + #[test] + fn add_custom_impl_partial_eq_tuple_enum_generic_existing_bounds() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: eq, derive +#[derive(Partial$0Eq)] +enum Either { + Left(T), + Right(U), +} +"#, + r#" +enum Either { + Left(T), + Right(U), +} + +impl PartialEq for Either { + $0fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Left(l0), Self::Left(r0)) => l0 == r0, + (Self::Right(l0), Self::Right(r0)) => l0 == r0, + _ => false, + } + } +} +"#, + ) + } + #[test] fn add_custom_impl_partial_eq_record_enum() { check_assist( @@ -1112,7 +1232,7 @@ struct Foo { bar: U, } -impl Default for Foo { +impl Default for Foo { $0fn default() -> Self { Self { foo: Default::default(), bar: Default::default() } } diff --git a/crates/ide-assists/src/handlers/replace_or_with_or_else.rs b/crates/ide-assists/src/handlers/replace_or_with_or_else.rs index 77382056c1833..f0ed3c4fe6f50 100644 --- a/crates/ide-assists/src/handlers/replace_or_with_or_else.rs +++ b/crates/ide-assists/src/handlers/replace_or_with_or_else.rs @@ -75,7 +75,7 @@ fn into_closure(param: &Expr) -> Expr { (|| { if let ast::Expr::CallExpr(call) = param { if call.arg_list()?.args().count() == 0 { - Some(call.expr()?.clone()) + Some(call.expr()?) } else { None } @@ -151,7 +151,7 @@ fn into_call(param: &Expr) -> Expr { (|| { if let ast::Expr::ClosureExpr(closure) = param { if closure.param_list()?.params().count() == 0 { - Some(closure.body()?.clone()) + Some(closure.body()?) } else { None } diff --git a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs index c177adc7a10d7..6626ce0795997 100644 --- a/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs +++ b/crates/ide-assists/src/handlers/replace_turbofish_with_explicit_type.rs @@ -42,7 +42,7 @@ pub(crate) fn replace_turbofish_with_explicit_type( let r_angle = generic_args.r_angle_token()?; let turbofish_range = TextRange::new(colon2.text_range().start(), r_angle.text_range().end()); - let turbofish_args: Vec = generic_args.generic_args().into_iter().collect(); + let turbofish_args: Vec = generic_args.generic_args().collect(); // Find type of ::<_> if turbofish_args.len() != 1 { diff --git a/crates/ide-assists/src/handlers/unnecessary_async.rs b/crates/ide-assists/src/handlers/unnecessary_async.rs index 0439883225330..7f612c2a142c7 100644 --- a/crates/ide-assists/src/handlers/unnecessary_async.rs +++ b/crates/ide-assists/src/handlers/unnecessary_async.rs @@ -107,7 +107,7 @@ fn find_all_references( /// If no await expression is found, returns None. fn find_await_expression(ctx: &AssistContext<'_>, nameref: &NameRef) -> Option { // From the nameref, walk up the tree to the await expression. - let await_expr = if let Some(path) = full_path_of_name_ref(&nameref) { + let await_expr = if let Some(path) = full_path_of_name_ref(nameref) { // Function calls. path.syntax() .parent() diff --git a/crates/ide-assists/src/handlers/unqualify_method_call.rs b/crates/ide-assists/src/handlers/unqualify_method_call.rs new file mode 100644 index 0000000000000..e9d4e270cdcfb --- /dev/null +++ b/crates/ide-assists/src/handlers/unqualify_method_call.rs @@ -0,0 +1,211 @@ +use syntax::{ + ast::{self, make, AstNode, HasArgList}, + TextRange, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +// Assist: unqualify_method_call +// +// Transforms universal function call syntax into a method call. +// +// ``` +// fn main() { +// std::ops::Add::add$0(1, 2); +// } +// # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +// ``` +// -> +// ``` +// fn main() { +// 1.add(2); +// } +// # mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +// ``` +pub(crate) fn unqualify_method_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let call = ctx.find_node_at_offset::()?; + let ast::Expr::PathExpr(path_expr) = call.expr()? else { return None }; + let path = path_expr.path()?; + + let cursor_in_range = path.syntax().text_range().contains_range(ctx.selection_trimmed()); + if !cursor_in_range { + return None; + } + + let args = call.arg_list()?; + let l_paren = args.l_paren_token()?; + let mut args_iter = args.args(); + let first_arg = args_iter.next()?; + let second_arg = args_iter.next(); + + _ = path.qualifier()?; + let method_name = path.segment()?.name_ref()?; + + let res = ctx.sema.resolve_path(&path)?; + let hir::PathResolution::Def(hir::ModuleDef::Function(fun)) = res else { return None }; + if !fun.has_self_param(ctx.sema.db) { + return None; + } + + // `core::ops::Add::add(` -> `` + let delete_path = + TextRange::new(path.syntax().text_range().start(), l_paren.text_range().end()); + + // Parens around `expr` if needed + let parens = needs_parens_as_receiver(&first_arg).then(|| { + let range = first_arg.syntax().text_range(); + (range.start(), range.end()) + }); + + // `, ` -> `.add(` + let replace_comma = TextRange::new( + first_arg.syntax().text_range().end(), + second_arg + .map(|a| a.syntax().text_range().start()) + .unwrap_or_else(|| first_arg.syntax().text_range().end()), + ); + + acc.add( + AssistId("unqualify_method_call", AssistKind::RefactorRewrite), + "Unqualify method call", + call.syntax().text_range(), + |edit| { + edit.delete(delete_path); + if let Some((open, close)) = parens { + edit.insert(open, "("); + edit.insert(close, ")"); + } + edit.replace(replace_comma, format!(".{method_name}(")); + }, + ) +} + +fn needs_parens_as_receiver(expr: &ast::Expr) -> bool { + // Make `(expr).dummy()` + let dummy_call = make::expr_method_call( + make::expr_paren(expr.clone()), + make::name_ref("dummy"), + make::arg_list([]), + ); + + // Get the `expr` clone with the right parent back + // (unreachable!s are fine since we've just constructed the expression) + let ast::Expr::MethodCallExpr(call) = &dummy_call else { unreachable!() }; + let Some(receiver) = call.receiver() else { unreachable!() }; + let ast::Expr::ParenExpr(parens) = receiver else { unreachable!() }; + let Some(expr) = parens.expr() else { unreachable!() }; + + expr.needs_parens_in(dummy_call.syntax().clone()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn unqualify_method_call_simple() { + check_assist( + unqualify_method_call, + r#" +struct S; +impl S { fn f(self, S: S) {} } +fn f() { S::$0f(S, S); }"#, + r#" +struct S; +impl S { fn f(self, S: S) {} } +fn f() { S.f(S); }"#, + ); + } + + #[test] + fn unqualify_method_call_trait() { + check_assist( + unqualify_method_call, + r#" +//- minicore: add +fn f() { ::$0add(2, 2); }"#, + r#" +fn f() { 2.add(2); }"#, + ); + + check_assist( + unqualify_method_call, + r#" +//- minicore: add +fn f() { core::ops::Add::$0add(2, 2); }"#, + r#" +fn f() { 2.add(2); }"#, + ); + + check_assist( + unqualify_method_call, + r#" +//- minicore: add +use core::ops::Add; +fn f() { <_>::$0add(2, 2); }"#, + r#" +use core::ops::Add; +fn f() { 2.add(2); }"#, + ); + } + + #[test] + fn unqualify_method_call_single_arg() { + check_assist( + unqualify_method_call, + r#" + struct S; + impl S { fn f(self) {} } + fn f() { S::$0f(S); }"#, + r#" + struct S; + impl S { fn f(self) {} } + fn f() { S.f(); }"#, + ); + } + + #[test] + fn unqualify_method_call_parens() { + check_assist( + unqualify_method_call, + r#" +//- minicore: deref +struct S; +impl core::ops::Deref for S { + type Target = S; + fn deref(&self) -> &S { self } +} +fn f() { core::ops::Deref::$0deref(&S); }"#, + r#" +struct S; +impl core::ops::Deref for S { + type Target = S; + fn deref(&self) -> &S { self } +} +fn f() { (&S).deref(); }"#, + ); + } + + #[test] + fn unqualify_method_call_doesnt_apply_with_cursor_not_on_path() { + check_assist_not_applicable( + unqualify_method_call, + r#" +//- minicore: add +fn f() { core::ops::Add::add(2,$0 2); }"#, + ); + } + + #[test] + fn unqualify_method_call_doesnt_apply_with_no_self() { + check_assist_not_applicable( + unqualify_method_call, + r#" +struct S; +impl S { fn assoc(S: S, S: S) {} } +fn f() { S::assoc$0(S, S); }"#, + ); + } +} diff --git a/crates/ide-assists/src/handlers/unwrap_block.rs b/crates/ide-assists/src/handlers/unwrap_block.rs index 7969a491822d1..53cdac03a3358 100644 --- a/crates/ide-assists/src/handlers/unwrap_block.rs +++ b/crates/ide-assists/src/handlers/unwrap_block.rs @@ -37,7 +37,8 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))? } - if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) { + if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT | SyntaxKind::LET_STMT) + { return acc.add(assist_id, assist_label, target, |builder| { builder.replace(block.syntax().text_range(), update_expr_string(block.to_string())); }); @@ -713,6 +714,50 @@ fn main() -> i32 { return 3; 5 } +"#, + ); + } + + #[test] + fn unwrap_block_in_let_initializers() { + // https://github.com/rust-lang/rust-analyzer/issues/13679 + check_assist( + unwrap_block, + r#" +fn main() { + let x = {$0 + bar + }; +} +"#, + r#" +fn main() { + let x = bar; +} +"#, + ); + } + + #[test] + fn unwrap_if_in_let_initializers() { + // https://github.com/rust-lang/rust-analyzer/issues/13679 + check_assist( + unwrap_block, + r#" +fn main() { + let a = 1; + let x = if a - 1 == 0 {$0 + foo + } else { + bar + }; +} +"#, + r#" +fn main() { + let a = 1; + let x = foo; +} "#, ); } diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 387cc63142825..7813c9f9cbe80 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -128,6 +128,7 @@ mod handlers { mod convert_while_to_loop; mod destructure_tuple_binding; mod expand_glob_import; + mod extract_expressions_from_format_string; mod extract_function; mod extract_module; mod extract_struct_from_enum_variant; @@ -138,7 +139,6 @@ mod handlers { mod flip_binexpr; mod flip_comma; mod flip_trait_bound; - mod move_format_string_arg; mod generate_constant; mod generate_default_from_enum_variant; mod generate_default_from_new; @@ -159,12 +159,14 @@ mod handlers { mod add_return_type; mod inline_call; mod inline_local_variable; + mod inline_macro; mod inline_type_alias; mod introduce_named_lifetime; mod invert_if; mod merge_imports; mod merge_match_arms; mod move_bounds; + mod move_const_to_impl; mod move_guard; mod move_module_to_file; mod move_to_mod_rs; @@ -178,12 +180,14 @@ mod handlers { mod remove_dbg; mod remove_mut; mod remove_unused_param; + mod remove_parentheses; mod reorder_fields; mod reorder_impl_items; mod replace_try_expr_with_match; mod replace_derive_with_manual_impl; mod replace_if_let_with_match; mod replace_or_with_or_else; + mod replace_arith_op; mod introduce_named_generic; mod replace_let_with_if_let; mod replace_qualified_name_with_use; @@ -198,6 +202,7 @@ mod handlers { mod unnecessary_async; mod unwrap_block; mod unwrap_result_return_type; + mod unqualify_method_call; mod wrap_return_type_in_result; pub(crate) fn all() -> &'static [Handler] { @@ -228,6 +233,7 @@ mod handlers { convert_while_to_loop::convert_while_to_loop, destructure_tuple_binding::destructure_tuple_binding, expand_glob_import::expand_glob_import, + extract_expressions_from_format_string::extract_expressions_from_format_string, extract_struct_from_enum_variant::extract_struct_from_enum_variant, extract_type_alias::extract_type_alias, fix_visibility::fix_visibility, @@ -247,6 +253,7 @@ mod handlers { generate_from_impl_for_enum::generate_from_impl_for_enum, generate_function::generate_function, generate_impl::generate_impl, + generate_impl::generate_trait_impl, generate_is_empty_from_len::generate_is_empty_from_len, generate_new::generate_new, inline_call::inline_call, @@ -254,13 +261,14 @@ mod handlers { inline_local_variable::inline_local_variable, inline_type_alias::inline_type_alias, inline_type_alias::inline_type_alias_uses, + inline_macro::inline_macro, introduce_named_generic::introduce_named_generic, introduce_named_lifetime::introduce_named_lifetime, invert_if::invert_if, merge_imports::merge_imports, merge_match_arms::merge_match_arms, move_bounds::move_bounds_to_where_clause, - move_format_string_arg::move_format_string_arg, + move_const_to_impl::move_const_to_impl, move_guard::move_arm_cond_to_match_guard, move_guard::move_guard_to_arm_body, move_module_to_file::move_module_to_file, @@ -277,6 +285,7 @@ mod handlers { remove_dbg::remove_dbg, remove_mut::remove_mut, remove_unused_param::remove_unused_param, + remove_parentheses::remove_parentheses, reorder_fields::reorder_fields, reorder_impl_items::reorder_impl_items, replace_try_expr_with_match::replace_try_expr_with_match, @@ -288,6 +297,9 @@ mod handlers { replace_or_with_or_else::replace_or_with_or_else, replace_turbofish_with_explicit_type::replace_turbofish_with_explicit_type, replace_qualified_name_with_use::replace_qualified_name_with_use, + replace_arith_op::replace_arith_with_wrapping, + replace_arith_op::replace_arith_with_checked, + replace_arith_op::replace_arith_with_saturating, sort_items::sort_items, split_import::split_import, toggle_ignore::toggle_ignore, @@ -297,6 +309,7 @@ mod handlers { unwrap_block::unwrap_block, unwrap_result_return_type::unwrap_result_return_type, unwrap_tuple::unwrap_tuple, + unqualify_method_call::unqualify_method_call, wrap_return_type_in_result::wrap_return_type_in_result, // These are manually sorted for better priorities. By default, // priority is determined by the size of the target range (smaller diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index 92ced27c78aed..fca268a1f0b2d 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -171,7 +171,7 @@ fn check(handler: Handler, before: &str, expected: ExpectedResult<'_>, assist_la } FileSystemEdit::MoveDir { src, src_id, dst } => { // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet. - (dst, format!("{:?}\n{:?}", src_id, src)) + (dst, format!("{src_id:?}\n{src:?}")) } }; let sr = db.file_source_root(dst.anchor); diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index c09317572acf2..006ae4b303418 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -624,6 +624,37 @@ fn qux(bar: Bar, baz: Baz) {} ) } +#[test] +fn doctest_extract_expressions_from_format_string() { + check_doc_test( + "extract_expressions_from_format_string", + r#####" +macro_rules! format_args { + ($lit:literal $(tt:tt)*) => { 0 }, +} +macro_rules! print { + ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); +} + +fn main() { + print!("{var} {x + 1}$0"); +} +"#####, + r#####" +macro_rules! format_args { + ($lit:literal $(tt:tt)*) => { 0 }, +} +macro_rules! print { + ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); +} + +fn main() { + print!("{var} {}"$0, x + 1); +} +"#####, + ) +} + #[test] fn doctest_extract_function() { check_doc_test( @@ -1249,8 +1280,8 @@ fn doctest_generate_impl() { check_doc_test( "generate_impl", r#####" -struct Ctx { - data: T,$0 +struct Ctx$0 { + data: T, } "#####, r#####" @@ -1341,6 +1372,27 @@ impl Person { ) } +#[test] +fn doctest_generate_trait_impl() { + check_doc_test( + "generate_trait_impl", + r#####" +struct $0Ctx { + data: T, +} +"#####, + r#####" +struct Ctx { + data: T, +} + +impl $0 for Ctx { + +} +"#####, + ) +} + #[test] fn doctest_inline_call() { check_doc_test( @@ -1417,6 +1469,39 @@ fn main() { ) } +#[test] +fn doctest_inline_macro() { + check_doc_test( + "inline_macro", + r#####" +macro_rules! num { + (+$($t:tt)+) => (1 + num!($($t )+)); + (-$($t:tt)+) => (-1 + num!($($t )+)); + (+) => (1); + (-) => (-1); +} + +fn main() { + let number = num$0!(+ + + - + +); + println!("{number}"); +} +"#####, + r#####" +macro_rules! num { + (+$($t:tt)+) => (1 + num!($($t )+)); + (-$($t:tt)+) => (-1 + num!($($t )+)); + (+) => (1); + (-) => (-1); +} + +fn main() { + let number = 1+num!(+ + - + +); + println!("{number}"); +} +"#####, + ) +} + #[test] fn doctest_inline_type_alias() { check_doc_test( @@ -1654,31 +1739,29 @@ fn apply(f: F, x: T) -> U where F: FnOnce(T) -> U { } #[test] -fn doctest_move_format_string_arg() { +fn doctest_move_const_to_impl() { check_doc_test( - "move_format_string_arg", + "move_const_to_impl", r#####" -macro_rules! format_args { - ($lit:literal $(tt:tt)*) => { 0 }, -} -macro_rules! print { - ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); -} +struct S; +impl S { + fn foo() -> usize { + /// The answer. + const C$0: usize = 42; -fn main() { - print!("{x + 1}$0"); + C * C + } } "#####, r#####" -macro_rules! format_args { - ($lit:literal $(tt:tt)*) => { 0 }, -} -macro_rules! print { - ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*))); -} +struct S; +impl S { + /// The answer. + const C: usize = 42; -fn main() { - print!("{}"$0, x + 1); + fn foo() -> usize { + Self::C * Self::C + } } "#####, ) @@ -1928,6 +2011,23 @@ impl Walrus { ) } +#[test] +fn doctest_remove_parentheses() { + check_doc_test( + "remove_parentheses", + r#####" +fn main() { + _ = $0(2) + 2; +} +"#####, + r#####" +fn main() { + _ = 2 + 2; +} +"#####, + ) +} + #[test] fn doctest_remove_unused_param() { check_doc_test( @@ -1999,6 +2099,57 @@ impl Foo for Bar { ) } +#[test] +fn doctest_replace_arith_with_checked() { + check_doc_test( + "replace_arith_with_checked", + r#####" +fn main() { + let x = 1 $0+ 2; +} +"#####, + r#####" +fn main() { + let x = 1.checked_add(2); +} +"#####, + ) +} + +#[test] +fn doctest_replace_arith_with_saturating() { + check_doc_test( + "replace_arith_with_saturating", + r#####" +fn main() { + let x = 1 $0+ 2; +} +"#####, + r#####" +fn main() { + let x = 1.saturating_add(2); +} +"#####, + ) +} + +#[test] +fn doctest_replace_arith_with_wrapping() { + check_doc_test( + "replace_arith_with_wrapping", + r#####" +fn main() { + let x = 1 $0+ 2; +} +"#####, + r#####" +fn main() { + let x = 1.wrapping_add(2); +} +"#####, + ) +} + #[test] fn doctest_replace_char_with_string() { check_doc_test( @@ -2415,6 +2566,25 @@ pub async fn bar() { foo() } ) } +#[test] +fn doctest_unqualify_method_call() { + check_doc_test( + "unqualify_method_call", + r#####" +fn main() { + std::ops::Add::add$0(1, 2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + r#####" +fn main() { + 1.add(2); +} +mod std { pub mod ops { pub trait Add { fn add(self, _: Self) {} } impl Add for i32 {} } } +"#####, + ) +} + #[test] fn doctest_unwrap_block() { check_doc_test( diff --git a/crates/ide-assists/src/tests/sourcegen.rs b/crates/ide-assists/src/tests/sourcegen.rs index 070b83d3c16b1..b4f50c7fb26a4 100644 --- a/crates/ide-assists/src/tests/sourcegen.rs +++ b/crates/ide-assists/src/tests/sourcegen.rs @@ -18,7 +18,7 @@ use super::check_doc_test; for assist in assists.iter() { for (idx, section) in assist.sections.iter().enumerate() { let test_id = - if idx == 0 { assist.id.clone() } else { format!("{}_{}", &assist.id, idx) }; + if idx == 0 { assist.id.clone() } else { format!("{}_{idx}", &assist.id) }; let test = format!( r######" #[test] @@ -95,8 +95,7 @@ impl Assist { let id = block.id; assert!( id.chars().all(|it| it.is_ascii_lowercase() || it == '_'), - "invalid assist id: {:?}", - id + "invalid assist id: {id:?}" ); let mut lines = block.contents.iter().peekable(); let location = sourcegen::Location { file: path.to_path_buf(), line: block.line }; @@ -175,7 +174,7 @@ impl fmt::Display for Assist { fn hide_hash_comments(text: &str) -> String { text.split('\n') // want final newline .filter(|&it| !(it.starts_with("# ") || it == "#")) - .map(|it| format!("{}\n", it)) + .map(|it| format!("{it}\n")) .collect() } @@ -190,6 +189,6 @@ fn reveal_hash_comments(text: &str) -> String { it } }) - .map(|it| format!("{}\n", it)) + .map(|it| format!("{it}\n")) .collect() } diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 68c31b4f8e922..7add660649211 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -208,6 +208,23 @@ pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor } } +/// Escapes text that should be rendered as-is, typically those that we're copy-pasting what the +/// users wrote. +/// +/// This function should only be used when the text doesn't contain snippet **AND** the text +/// wouldn't be included in a snippet. +pub(crate) fn escape_non_snippet(text: &mut String) { + // While we *can* escape `}`, we don't really have to in this specific case. We only need to + // escape it inside `${}` to disambiguate it from the ending token of the syntax, but after we + // escape every occurrence of `$`, we wouldn't have `${}` in the first place. + // + // This will break if the text contains snippet or it will be included in a snippet (hence doc + // comment). Compare `fn escape(buf)` in `render_snippet()` above, where the escaped text is + // included in a snippet. + stdx::replace(text, '\\', r"\\"); + stdx::replace(text, '$', r"\$"); +} + pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { node.children_with_tokens() .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) @@ -417,35 +434,67 @@ pub(crate) fn find_impl_block_end(impl_def: ast::Impl, buf: &mut String) -> Opti Some(end) } -// Generates the surrounding `impl Type { }` including type and lifetime -// parameters +/// Generates the surrounding `impl Type { }` including type and lifetime +/// parameters. pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String { - generate_impl_text_inner(adt, None, code) + generate_impl_text_inner(adt, None, true, code) } -// Generates the surrounding `impl for Type { }` including type -// and lifetime parameters +/// Generates the surrounding `impl for Type { }` including type +/// and lifetime parameters, with `` appended to `impl`'s generic parameters' bounds. +/// +/// This is useful for traits like `PartialEq`, since `impl PartialEq for U` often requires `T: PartialEq`. pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String { - generate_impl_text_inner(adt, Some(trait_text), code) + generate_impl_text_inner(adt, Some(trait_text), true, code) } -fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String { +/// Generates the surrounding `impl for Type { }` including type +/// and lifetime parameters, with `impl`'s generic parameters' bounds kept as-is. +/// +/// This is useful for traits like `From`, since `impl From for U` doesn't require `T: From`. +pub(crate) fn generate_trait_impl_text_intransitive( + adt: &ast::Adt, + trait_text: &str, + code: &str, +) -> String { + generate_impl_text_inner(adt, Some(trait_text), false, code) +} + +fn generate_impl_text_inner( + adt: &ast::Adt, + trait_text: Option<&str>, + trait_is_transitive: bool, + code: &str, +) -> String { // Ensure lifetime params are before type & const params let generic_params = adt.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { - // remove defaults since they can't be specified in impls + let ty_or_const_params = generic_params.type_or_const_params().map(|param| { match param { ast::TypeOrConstParam::Type(param) => { let param = param.clone_for_update(); + // remove defaults since they can't be specified in impls param.remove_default(); - Some(ast::GenericParam::TypeParam(param)) + let mut bounds = + param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect()); + if let Some(trait_) = trait_text { + // Add the current trait to `bounds` if the trait is transitive, + // meaning `impl Trait for U` requires `T: Trait`. + if trait_is_transitive { + bounds.push(make::type_bound(trait_)); + } + }; + // `{ty_param}: {bounds}` + let param = + make::type_param(param.name().unwrap(), make::type_bound_list(bounds)); + ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { let param = param.clone_for_update(); + // remove defaults since they can't be specified in impls param.remove_default(); - Some(ast::GenericParam::ConstParam(param)) + ast::GenericParam::ConstParam(param) } } }); @@ -596,7 +645,7 @@ pub(crate) fn convert_reference_type( } fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option { - ty.is_copy(db).then(|| ReferenceConversionType::Copy) + ty.is_copy(db).then_some(ReferenceConversionType::Copy) } fn handle_as_ref_str( @@ -607,7 +656,7 @@ fn handle_as_ref_str( let str_type = hir::BuiltinType::str().ty(db); ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type]) - .then(|| ReferenceConversionType::AsRefStr) + .then_some(ReferenceConversionType::AsRefStr) } fn handle_as_ref_slice( @@ -619,7 +668,7 @@ fn handle_as_ref_slice( let slice_type = hir::Type::new_slice(type_argument); ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type]) - .then(|| ReferenceConversionType::AsRefSlice) + .then_some(ReferenceConversionType::AsRefSlice) } fn handle_dereferenced( @@ -630,7 +679,7 @@ fn handle_dereferenced( let type_argument = ty.type_arguments().next()?; ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument]) - .then(|| ReferenceConversionType::Dereferenced) + .then_some(ReferenceConversionType::Dereferenced) } fn handle_option_as_ref( diff --git a/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/crates/ide-assists/src/utils/gen_trait_fn_body.rs index 6c87e66c134d7..d4abb51259e99 100644 --- a/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -419,7 +419,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { make::ext::path_from_idents(["Self", &variant.name()?.to_string()]) } - fn gen_tuple_field(field_name: &String) -> ast::Pat { + fn gen_tuple_field(field_name: &str) -> ast::Pat { ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name))) } @@ -516,10 +516,18 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let expr = match arms.len() { 0 => eq_check, - _ => { - if n_cases > arms.len() { + arms_len => { + // Generate the fallback arm when this enum has >1 variants. + // The fallback arm will be `_ => false,` if we've already gone through every case where the variants of self and other match, + // and `_ => std::mem::discriminant(self) == std::mem::discriminant(other),` otherwise. + if n_cases > 1 { let lhs = make::wildcard_pat().into(); - arms.push(make::match_arm(Some(lhs), None, eq_check)); + let rhs = if arms_len == n_cases { + make::expr_literal("false").into() + } else { + eq_check + }; + arms.push(make::match_arm(Some(lhs), None, rhs)); } let match_target = make::expr_tuple(vec![lhs_name, rhs_name]); diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 296dfc14250f7..eb87d6c58262d 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -133,7 +133,7 @@ impl Completions { if incomplete_let && snippet.ends_with('}') { // complete block expression snippets with a trailing semicolon, if inside an incomplete let cov_mark::hit!(let_semi); - item.insert_snippet(cap, format!("{};", snippet)); + item.insert_snippet(cap, format!("{snippet};")); } else { item.insert_snippet(cap, snippet); } @@ -494,7 +494,7 @@ impl Completions { pattern_ctx, path_ctx, variant, - local_name.clone(), + local_name, None, )); } diff --git a/crates/ide-completion/src/completions/attribute.rs b/crates/ide-completion/src/completions/attribute.rs index d9fe94cb44ee1..bb950c76f8831 100644 --- a/crates/ide-completion/src/completions/attribute.rs +++ b/crates/ide-completion/src/completions/attribute.rs @@ -357,7 +357,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option> { Some( input_expressions .into_iter() - .filter_map(|(is_sep, group)| (!is_sep).then(|| group)) + .filter_map(|(is_sep, group)| (!is_sep).then_some(group)) .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join(""))) .collect::>(), ) @@ -371,9 +371,7 @@ fn attributes_are_sorted() { attrs.for_each(|next| { assert!( prev < next, - r#"ATTRIBUTES array is not sorted, "{}" should come after "{}""#, - prev, - next + r#"ATTRIBUTES array is not sorted, "{prev}" should come after "{next}""# ); prev = next; }); diff --git a/crates/ide-completion/src/completions/attribute/cfg.rs b/crates/ide-completion/src/completions/attribute/cfg.rs index 311060143b06a..7ef4ff30b56c7 100644 --- a/crates/ide-completion/src/completions/attribute/cfg.rs +++ b/crates/ide-completion/src/completions/attribute/cfg.rs @@ -11,7 +11,7 @@ use crate::{completions::Completions, context::CompletionContext, CompletionItem pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { let add_completion = |item: &str| { let mut completion = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item); - completion.insert_text(format!(r#""{}""#, item)); + completion.insert_text(format!(r#""{item}""#)); acc.add(completion.build()); }; @@ -29,7 +29,7 @@ pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) { Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion), Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion), Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| { - let insert_text = format!(r#""{}""#, s); + let insert_text = format!(r#""{s}""#); let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s); item.insert_text(insert_text); diff --git a/crates/ide-completion/src/completions/attribute/lint.rs b/crates/ide-completion/src/completions/attribute/lint.rs index 967f6ddd9a83b..818c3cfd5fe7f 100644 --- a/crates/ide-completion/src/completions/attribute/lint.rs +++ b/crates/ide-completion/src/completions/attribute/lint.rs @@ -51,7 +51,7 @@ pub(super) fn complete_lint( continue; } let label = match qual { - Some(qual) if !is_qualified => format!("{}::{}", qual, name), + Some(qual) if !is_qualified => format!("{qual}::{name}"), _ => name.to_owned(), }; let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label); diff --git a/crates/ide-completion/src/completions/dot.rs b/crates/ide-completion/src/completions/dot.rs index 02004ff7b6868..7c6e5e100f63f 100644 --- a/crates/ide-completion/src/completions/dot.rs +++ b/crates/ide-completion/src/completions/dot.rs @@ -32,12 +32,12 @@ pub(crate) fn complete_dot( complete_fields( acc, ctx, - &receiver_ty, + receiver_ty, |acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty), |acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty), ); } - complete_methods(ctx, &receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); + complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None)); } pub(crate) fn complete_undotted_self( diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 09e95e53de63c..1002be21131d9 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -68,28 +68,26 @@ mod tests { &format!( r#" #[rustc_builtin_macro] - macro_rules! {} {{ + macro_rules! {macro_name} {{ ($var:literal) => {{ 0 }} }} fn main() {{ - let foo = {}!("CAR$0"); + let foo = {macro_name}!("CAR$0"); }} - "#, - macro_name, macro_name + "# ), &format!( r#" #[rustc_builtin_macro] - macro_rules! {} {{ + macro_rules! {macro_name} {{ ($var:literal) => {{ 0 }} }} fn main() {{ - let foo = {}!("CARGO_BIN_NAME"); + let foo = {macro_name}!("CARGO_BIN_NAME"); }} - "#, - macro_name, macro_name + "# ), ); } @@ -112,7 +110,7 @@ mod tests { "#; let completions = completion_list(fixture); - assert!(completions.is_empty(), "Completions weren't empty: {}", completions); + assert!(completions.is_empty(), "Completions weren't empty: {completions}"); } #[test] @@ -129,7 +127,7 @@ mod tests { "#; let completions = completion_list(fixture); - assert!(completions.is_empty(), "Completions weren't empty: {}", completions); + assert!(completions.is_empty(), "Completions weren't empty: {completions}"); } #[test] @@ -145,6 +143,6 @@ mod tests { "#; let completions = completion_list(fixture); - assert!(completions.is_empty(), "Completions weren't empty: {}", completions) + assert!(completions.is_empty(), "Completions weren't empty: {completions}") } } diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 3192b21cfb2e2..cfe4787f734d7 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -64,7 +64,7 @@ pub(crate) fn complete_expr_path( acc.add_enum_variants(ctx, path_ctx, e); } - ctx.iterate_path_candidates(&ty, |item| { + ctx.iterate_path_candidates(ty, |item| { add_assoc_item(acc, item); }); diff --git a/crates/ide-completion/src/completions/fn_param.rs b/crates/ide-completion/src/completions/fn_param.rs index f0ecc595af33e..d8b8a190eb840 100644 --- a/crates/ide-completion/src/completions/fn_param.rs +++ b/crates/ide-completion/src/completions/fn_param.rs @@ -192,5 +192,5 @@ fn comma_wrapper(ctx: &CompletionContext<'_>) -> Option<(impl Fn(&str) -> String matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE); let leading = if has_leading_comma { "" } else { ", " }; - Some((move |label: &_| (format!("{}{}{}", leading, label, trailing)), param.text_range())) + Some((move |label: &_| (format!("{leading}{label}{trailing}")), param.text_range())) } diff --git a/crates/ide-completion/src/completions/format_string.rs b/crates/ide-completion/src/completions/format_string.rs index 038bdb4279e07..5c46c5806e65e 100644 --- a/crates/ide-completion/src/completions/format_string.rs +++ b/crates/ide-completion/src/completions/format_string.rs @@ -13,7 +13,7 @@ pub(crate) fn format_string( original: &ast::String, expanded: &ast::String, ) { - if !is_format_string(&expanded) { + if !is_format_string(expanded) { return; } let cursor = ctx.position.offset; diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 7384a3f2d80b4..9a060857e9e4c 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -37,7 +37,7 @@ use ide_db::{ traits::get_missing_assoc_items, SymbolKind, }; use syntax::{ - ast::{self, edit_in_place::AttrsOwnerEdit}, + ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds}, AstNode, SyntaxElement, SyntaxKind, TextRange, T, }; use text_edit::TextEdit; @@ -190,7 +190,7 @@ fn add_function_impl( }; let mut item = CompletionItem::new(completion_kind, replacement_range, label); - item.lookup_by(format!("fn {}", fn_name)) + item.lookup_by(format!("fn {fn_name}")) .set_documentation(func.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); @@ -205,11 +205,11 @@ fn add_function_impl( let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro()); match ctx.config.snippet_cap { Some(cap) => { - let snippet = format!("{} {{\n $0\n}}", function_decl); + let snippet = format!("{function_decl} {{\n $0\n}}"); item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); } None => { - let header = format!("{} {{", function_decl); + let header = format!("{function_decl} {{"); item.text_edit(TextEdit::replace(replacement_range, header)); } }; @@ -249,10 +249,10 @@ fn add_type_alias_impl( ) { let alias_name = type_alias.name(ctx.db).unescaped().to_smol_str(); - let label = format!("type {} =", alias_name); + let label = format!("type {alias_name} ="); let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label); - item.lookup_by(format!("type {}", alias_name)) + item.lookup_by(format!("type {alias_name}")) .set_documentation(type_alias.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() }); @@ -265,10 +265,21 @@ fn add_type_alias_impl( }; let start = transformed_ty.syntax().text_range().start(); - let Some(end) = transformed_ty - .eq_token() - .map(|tok| tok.text_range().start()) - .or(transformed_ty.semicolon_token().map(|tok| tok.text_range().start())) else { return }; + + let end = if let Some(end) = + transformed_ty.colon_token().map(|tok| tok.text_range().start()) + { + end + } else if let Some(end) = transformed_ty.eq_token().map(|tok| tok.text_range().start()) + { + end + } else if let Some(end) = + transformed_ty.semicolon_token().map(|tok| tok.text_range().start()) + { + end + } else { + return; + }; let len = end - start; let mut decl = transformed_ty.syntax().text().slice(..len).to_string(); @@ -279,7 +290,7 @@ fn add_type_alias_impl( match ctx.config.snippet_cap { Some(cap) => { - let snippet = format!("{}$0;", decl); + let snippet = format!("{decl}$0;"); item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); } None => { @@ -310,10 +321,10 @@ fn add_const_impl( }; let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro()); - let replacement = format!("{} ", label); + let replacement = format!("{label} "); let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label); - item.lookup_by(format!("const {}", const_name)) + item.lookup_by(format!("const {const_name}")) .set_documentation(const_.docs(ctx.db)) .set_relevance(CompletionRelevance { is_item_from_trait: true, @@ -322,7 +333,7 @@ fn add_const_impl( match ctx.config.snippet_cap { Some(cap) => item.snippet_edit( cap, - TextEdit::replace(replacement_range, format!("{}$0;", replacement)), + TextEdit::replace(replacement_range, format!("{replacement}$0;")), ), None => item.text_edit(TextEdit::replace(replacement_range, replacement)), }; @@ -834,11 +845,10 @@ trait Test {{ struct T; impl Test for T {{ - {} - {} + {hint} + {next_sibling} }} -"#, - hint, next_sibling +"# ), &format!( r#" @@ -850,11 +860,10 @@ trait Test {{ struct T; impl Test for T {{ - {} - {} + {completed} + {next_sibling} }} -"#, - completed, next_sibling +"# ), ) }; @@ -894,10 +903,9 @@ struct T; impl Foo for T {{ // Comment #[bar] - {} + {hint} }} -"#, - hint +"# ), &format!( r#" @@ -911,10 +919,9 @@ struct T; impl Foo for T {{ // Comment #[bar] - {} + {completed} }} -"#, - completed +"# ), ) }; diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index b9bd47f7da504..f4f37d77d81f3 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -5,7 +5,7 @@ mod format_like; use hir::{Documentation, HasAttrs}; use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap}; use syntax::{ - ast::{self, AstNode, AstToken}, + ast::{self, make, AstNode, AstToken}, SyntaxKind::{EXPR_STMT, STMT_LIST}, TextRange, TextSize, }; @@ -61,7 +61,7 @@ pub(crate) fn complete_postfix( let mut item = postfix_snippet( "drop", "fn drop(&mut self)", - &format!("drop($0{})", receiver_text), + &format!("drop($0{receiver_text})"), ); item.set_documentation(drop_fn.docs(ctx.db)); item.add_to(acc); @@ -76,14 +76,14 @@ pub(crate) fn complete_postfix( postfix_snippet( "ifl", "if let Ok {}", - &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text), + &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); postfix_snippet( "while", "while let Ok {}", - &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text), + &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); } @@ -91,46 +91,44 @@ pub(crate) fn complete_postfix( postfix_snippet( "ifl", "if let Some {}", - &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text), + &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); postfix_snippet( "while", "while let Some {}", - &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text), + &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"), ) .add_to(acc); } } } else if receiver_ty.is_bool() || receiver_ty.is_unknown() { - postfix_snippet("if", "if expr {}", &format!("if {} {{\n $0\n}}", receiver_text)) + postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}")) .add_to(acc); - postfix_snippet( - "while", - "while expr {}", - &format!("while {} {{\n $0\n}}", receiver_text), - ) - .add_to(acc); - postfix_snippet("not", "!expr", &format!("!{}", receiver_text)).add_to(acc); + postfix_snippet("while", "while expr {}", &format!("while {receiver_text} {{\n $0\n}}")) + .add_to(acc); + postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc); } else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() { if receiver_ty.impls_trait(ctx.db, trait_, &[]) { postfix_snippet( "for", "for ele in expr {}", - &format!("for ele in {} {{\n $0\n}}", receiver_text), + &format!("for ele in {receiver_text} {{\n $0\n}}"), ) .add_to(acc); } } - postfix_snippet("ref", "&expr", &format!("&{}", receiver_text)).add_to(acc); - postfix_snippet("refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc); + postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc); + postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc); // The rest of the postfix completions create an expression that moves an argument, // so it's better to consider references now to avoid breaking the compilation - let dot_receiver = include_references(dot_receiver); - let receiver_text = get_receiver_text(&dot_receiver, receiver_is_ambiguous_float_literal); + + let (dot_receiver, node_to_replace_with) = include_references(dot_receiver); + let receiver_text = + get_receiver_text(&node_to_replace_with, receiver_is_ambiguous_float_literal); let postfix_snippet = match build_postfix_snippet_builder(ctx, cap, &dot_receiver) { Some(it) => it, None => return, @@ -146,7 +144,7 @@ pub(crate) fn complete_postfix( postfix_snippet( "match", "match expr {}", - &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text), + &format!("match {receiver_text} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}"), ) .add_to(acc); } @@ -155,8 +153,7 @@ pub(crate) fn complete_postfix( "match", "match expr {}", &format!( - "match {} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}", - receiver_text + "match {receiver_text} {{\n Some(${{1:_}}) => {{$2}},\n None => {{$0}},\n}}" ), ) .add_to(acc); @@ -166,21 +163,21 @@ pub(crate) fn complete_postfix( postfix_snippet( "match", "match expr {}", - &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text), + &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"), ) .add_to(acc); } } - postfix_snippet("box", "Box::new(expr)", &format!("Box::new({})", receiver_text)).add_to(acc); - postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); // fixme - postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{})", receiver_text)).add_to(acc); - postfix_snippet("call", "function(expr)", &format!("${{1}}({})", receiver_text)).add_to(acc); + postfix_snippet("box", "Box::new(expr)", &format!("Box::new({receiver_text})")).add_to(acc); + postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({receiver_text})")).add_to(acc); // fixme + postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{receiver_text})")).add_to(acc); + postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})")).add_to(acc); if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) { if matches!(parent.kind(), STMT_LIST | EXPR_STMT) { - postfix_snippet("let", "let", &format!("let $0 = {};", receiver_text)).add_to(acc); - postfix_snippet("letm", "let mut", &format!("let mut $0 = {};", receiver_text)) + postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")).add_to(acc); + postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};")) .add_to(acc); } } @@ -210,14 +207,35 @@ fn get_receiver_text(receiver: &ast::Expr, receiver_is_ambiguous_float_literal: text.replace('\\', "\\\\").replace('$', "\\$") } -fn include_references(initial_element: &ast::Expr) -> ast::Expr { +fn include_references(initial_element: &ast::Expr) -> (ast::Expr, ast::Expr) { let mut resulting_element = initial_element.clone(); - while let Some(parent_ref_element) = - resulting_element.syntax().parent().and_then(ast::RefExpr::cast) + + while let Some(field_expr) = resulting_element.syntax().parent().and_then(ast::FieldExpr::cast) { - resulting_element = ast::Expr::from(parent_ref_element); + resulting_element = ast::Expr::from(field_expr); } - resulting_element + + let mut new_element_opt = initial_element.clone(); + + if let Some(first_ref_expr) = resulting_element.syntax().parent().and_then(ast::RefExpr::cast) { + if let Some(expr) = first_ref_expr.expr() { + resulting_element = expr; + } + + while let Some(parent_ref_element) = + resulting_element.syntax().parent().and_then(ast::RefExpr::cast) + { + resulting_element = ast::Expr::from(parent_ref_element); + + new_element_opt = make::expr_ref(new_element_opt, false); + } + } else { + // If we do not find any ref expressions, restore + // all the progress of tree climbing + resulting_element = initial_element.clone(); + } + + (resulting_element, new_element_opt) } fn build_postfix_snippet_builder<'ctx>( @@ -225,8 +243,7 @@ fn build_postfix_snippet_builder<'ctx>( cap: SnippetCap, receiver: &'ctx ast::Expr, ) -> Option Builder + 'ctx> { - let receiver_syntax = receiver.syntax(); - let receiver_range = ctx.sema.original_range_opt(receiver_syntax)?.range; + let receiver_range = ctx.sema.original_range_opt(receiver.syntax())?.range; if ctx.source_range().end() < receiver_range.start() { // This shouldn't happen, yet it does. I assume this might be due to an incorrect token mapping. return None; @@ -278,7 +295,7 @@ fn add_custom_postfix_completions( let body = snippet.postfix_snippet(receiver_text); let mut builder = postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body); - builder.documentation(Documentation::new(format!("```rust\n{}\n```", body))); + builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } @@ -549,7 +566,7 @@ fn main() { ControlFlow::Break('\\\\') } ); check_edit_with_config( - config.clone(), + config, "break", r#" //- minicore: try @@ -578,12 +595,12 @@ fn main() { check_edit( "format", r#"fn main() { "{some_var:?}".$0 }"#, - r#"fn main() { format!("{:?}", some_var) }"#, + r#"fn main() { format!("{some_var:?}") }"#, ); check_edit( "panic", r#"fn main() { "Panic with {a}".$0 }"#, - r#"fn main() { panic!("Panic with {}", a) }"#, + r#"fn main() { panic!("Panic with {a}") }"#, ); check_edit( "println", @@ -616,22 +633,55 @@ fn main() { #[test] fn postfix_custom_snippets_completion_for_references() { + // https://github.com/rust-lang/rust-analyzer/issues/7929 + + let snippet = Snippet::new( + &[], + &["ok".into()], + &["Ok(${receiver})".into()], + "", + &[], + crate::SnippetScope::Expr, + ) + .unwrap(); + check_edit_with_config( - CompletionConfig { - snippets: vec![Snippet::new( - &[], - &["ok".into()], - &["Ok(${receiver})".into()], - "", - &[], - crate::SnippetScope::Expr, - ) - .unwrap()], - ..TEST_CONFIG - }, + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, + "ok", + r#"fn main() { &&42.o$0 }"#, + r#"fn main() { Ok(&&42) }"#, + ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet.clone()], ..TEST_CONFIG }, "ok", r#"fn main() { &&42.$0 }"#, r#"fn main() { Ok(&&42) }"#, ); + + check_edit_with_config( + CompletionConfig { snippets: vec![snippet], ..TEST_CONFIG }, + "ok", + r#" +struct A { + a: i32, +} + +fn main() { + let a = A {a :1}; + &a.a.$0 +} + "#, + r#" +struct A { + a: i32, +} + +fn main() { + let a = A {a :1}; + Ok(&a.a) +} + "#, + ); } } diff --git a/crates/ide-completion/src/completions/postfix/format_like.rs b/crates/ide-completion/src/completions/postfix/format_like.rs index b43bdb9ab9d1a..dfcc78e92308d 100644 --- a/crates/ide-completion/src/completions/postfix/format_like.rs +++ b/crates/ide-completion/src/completions/postfix/format_like.rs @@ -54,7 +54,11 @@ pub(crate) fn add_format_like_completions( if let Ok((out, exprs)) = parse_format_exprs(receiver_text.text()) { let exprs = with_placeholders(exprs); for (label, macro_name) in KINDS { - let snippet = format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", ")); + let snippet = if exprs.is_empty() { + format!(r#"{}({})"#, macro_name, out) + } else { + format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", ")) + }; postfix_snippet(label, macro_name, &snippet).add_to(acc); } @@ -72,16 +76,29 @@ mod tests { ("eprintln!", "{}", r#"eprintln!("{}", $1)"#), ( "log::info!", - "{} {expr} {} {2 + 2}", - r#"log::info!("{} {} {} {}", $1, expr, $2, 2 + 2)"#, + "{} {ident} {} {2 + 2}", + r#"log::info!("{} {ident} {} {}", $1, $2, 2 + 2)"#, ), - ("format!", "{expr:?}", r#"format!("{:?}", expr)"#), ]; for (kind, input, output) in test_vector { let (parsed_string, exprs) = parse_format_exprs(input).unwrap(); let exprs = with_placeholders(exprs); - let snippet = format!(r#"{}("{}", {})"#, kind, parsed_string, exprs.join(", ")); + let snippet = format!(r#"{kind}("{parsed_string}", {})"#, exprs.join(", ")); + assert_eq!(&snippet, output); + } + } + + #[test] + fn test_into_suggestion_no_epxrs() { + let test_vector = &[ + ("println!", "{ident}", r#"println!("{ident}")"#), + ("format!", "{ident:?}", r#"format!("{ident:?}")"#), + ]; + + for (kind, input, output) in test_vector { + let (parsed_string, _exprs) = parse_format_exprs(input).unwrap(); + let snippet = format!(r#"{}("{}")"#, kind, parsed_string); assert_eq!(&snippet, output); } } diff --git a/crates/ide-completion/src/completions/record.rs b/crates/ide-completion/src/completions/record.rs index 5d96fbd30a81d..0521e735dedf1 100644 --- a/crates/ide-completion/src/completions/record.rs +++ b/crates/ide-completion/src/completions/record.rs @@ -124,7 +124,12 @@ fn complete_fields( #[cfg(test)] mod tests { - use crate::tests::check_edit; + use ide_db::SnippetCap; + + use crate::{ + tests::{check_edit, check_edit_with_config, TEST_CONFIG}, + CompletionConfig, + }; #[test] fn literal_struct_completion_edit() { @@ -151,6 +156,66 @@ fn baz() { ) } + #[test] + fn enum_variant_no_snippets() { + let conf = CompletionConfig { snippet_cap: SnippetCap::new(false), ..TEST_CONFIG }; + // tuple variant + check_edit_with_config( + conf.clone(), + "Variant()", + r#" +enum Enum { + Variant(usize), +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Va$0 + } +} +"#, + r#" +enum Enum { + Variant(usize), +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Variant + } +} +"#, + ); + + // record variant + check_edit_with_config( + conf, + "Variant{}", + r#" +enum Enum { + Variant{u: usize}, +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Va$0 + } +} +"#, + r#" +enum Enum { + Variant{u: usize}, +} + +impl Enum { + fn new(u: usize) -> Self { + Self::Variant + } +} +"#, + ) + } + #[test] fn literal_struct_impl_self_completion() { check_edit( diff --git a/crates/ide-completion/src/completions/snippet.rs b/crates/ide-completion/src/completions/snippet.rs index 66adb4286373a..da1f0542d286f 100644 --- a/crates/ide-completion/src/completions/snippet.rs +++ b/crates/ide-completion/src/completions/snippet.rs @@ -141,7 +141,7 @@ fn add_custom_completions( }; let body = snip.snippet(); let mut builder = snippet(ctx, cap, trigger, &body); - builder.documentation(Documentation::new(format!("```rust\n{}\n```", body))); + builder.documentation(Documentation::new(format!("```rust\n{body}\n```"))); for import in imports.into_iter() { builder.add_import(import); } diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index 8f9db2f94c204..37849c251a480 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -58,7 +58,7 @@ pub(crate) fn complete_type_path( trait_.items(ctx.sema.db).into_iter().for_each(|item| add_assoc_item(acc, item)) } Qualified::TypeAnchor { ty: Some(ty), trait_: None } => { - ctx.iterate_path_candidates(&ty, |item| { + ctx.iterate_path_candidates(ty, |item| { add_assoc_item(acc, item); }); diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 9850813a0ce1c..aa77f449530e5 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -19,7 +19,7 @@ use syntax::{ ast::{self, AttrKind, NameOrNameRef}, AstNode, SyntaxKind::{self, *}, - SyntaxToken, TextRange, TextSize, + SyntaxToken, TextRange, TextSize, T, }; use text_edit::Indel; @@ -569,6 +569,32 @@ impl<'a> CompletionContext<'a> { // completing on let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; + // try to skip completions on path with invalid colons + // this approach works in normal path and inside token tree + match original_token.kind() { + T![:] => { + // return if no prev token before colon + let prev_token = original_token.prev_token()?; + + // only has a single colon + if prev_token.kind() != T![:] { + return None; + } + + // has 3 colon or 2 coloncolon in a row + // special casing this as per discussion in https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1031845205 + // and https://github.com/rust-lang/rust-analyzer/pull/13611#discussion_r1032812751 + if prev_token + .prev_token() + .map(|t| t.kind() == T![:] || t.kind() == T![::]) + .unwrap_or(false) + { + return None; + } + } + _ => {} + } + let AnalysisResult { analysis, expected: (expected_type, expected_name), diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index c142a7305f9e9..e34824e22eac1 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -226,7 +226,7 @@ fn analyze( find_node_at_offset(&file_with_fake_ident, offset) { let parent = name_ref.syntax().parent()?; - let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?; + let (mut nameref_ctx, _) = classify_name_ref(sema, &original_file, name_ref, parent)?; if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind { path_ctx.kind = PathKind::Derive { existing_derives: sema @@ -277,7 +277,7 @@ fn analyze( return Some((analysis, (None, None), QualifierCtx::default())); } }; - let expected = expected_type_and_name(sema, &self_token, &name_like); + let expected = expected_type_and_name(sema, self_token, &name_like); let mut qual_ctx = QualifierCtx::default(); let analysis = match name_like { ast::NameLike::Lifetime(lifetime) => { @@ -286,7 +286,7 @@ fn analyze( ast::NameLike::NameRef(name_ref) => { let parent = name_ref.syntax().parent()?; let (nameref_ctx, qualifier_ctx) = - classify_name_ref(sema, &original_file, name_ref, parent.clone())?; + classify_name_ref(sema, &original_file, name_ref, parent)?; qual_ctx = qualifier_ctx; CompletionAnalysis::NameRef(nameref_ctx) } @@ -374,7 +374,7 @@ fn expected_type_and_name( ast::ArgList(_) => { cov_mark::hit!(expected_type_fn_param); ActiveParameter::at_token( - &sema, + sema, token.clone(), ).map(|ap| { let name = ap.ident().map(NameOrNameRef::Name); @@ -507,7 +507,7 @@ fn classify_lifetime( _ => LifetimeKind::Lifetime, } }; - let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start()); + let lifetime = find_node_at_offset(original_file, lifetime.syntax().text_range().start()); Some(LifetimeContext { lifetime, kind }) } @@ -548,7 +548,7 @@ fn classify_name( _ => return None, } }; - let name = find_node_at_offset(&original_file, name.syntax().text_range().start()); + let name = find_node_at_offset(original_file, name.syntax().text_range().start()); Some(NameContext { name, kind }) } @@ -558,7 +558,7 @@ fn classify_name_ref( name_ref: ast::NameRef, parent: SyntaxNode, ) -> Option<(NameRefContext, QualifierCtx)> { - let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); + let nameref = find_node_at_offset(original_file, name_ref.syntax().text_range().start()); let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default()); @@ -585,11 +585,7 @@ fn classify_name_ref( original_file, &record_field.parent_record_pat(), ), - ..pattern_context_for( - sema, - original_file, - record_field.parent_record_pat().clone().into(), - ) + ..pattern_context_for(sema, original_file, record_field.parent_record_pat().into()) }); return Some(make_res(kind)); } diff --git a/crates/ide-completion/src/context/tests.rs b/crates/ide-completion/src/context/tests.rs index 50845b3881f43..a654a5db57445 100644 --- a/crates/ide-completion/src/context/tests.rs +++ b/crates/ide-completion/src/context/tests.rs @@ -19,7 +19,7 @@ fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) { let name = completion_context.expected_name.map_or_else(|| "?".to_owned(), |name| name.to_string()); - expect.assert_eq(&format!("ty: {}, name: {}", ty, name)); + expect.assert_eq(&format!("ty: {ty}, name: {name}")); } #[test] diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 27c3ccb35a1ea..657eab5b1b830 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -453,10 +453,10 @@ impl Builder { // snippets can have multiple imports, but normal completions only have up to one if let Some(original_path) = import_edit.original_path.as_ref() { lookup = lookup.or_else(|| Some(label.clone())); - label = SmolStr::from(format!("{} (use {})", label, original_path)); + label = SmolStr::from(format!("{label} (use {original_path})")); } } else if let Some(trait_name) = self.trait_name { - label = SmolStr::from(format!("{} (as {})", label, trait_name)); + label = SmolStr::from(format!("{label} (as {trait_name})")); } let text_edit = match self.text_edit { diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 9d0044e55f598..4b48ec6bc3393 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -164,7 +164,6 @@ pub fn completions( completions::vis::complete_vis_path(&mut completions, ctx, path_ctx, has_in_token); } } - // prevent `(` from triggering unwanted completion noise return Some(completions.into()); } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 86302cb0678f1..e48d1aecd04fa 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -131,7 +131,7 @@ pub(crate) fn render_field( item.detail(ty.display(ctx.db()).to_string()) .set_documentation(field.docs(ctx.db())) .set_deprecated(is_deprecated) - .lookup_by(name.clone()); + .lookup_by(name); item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name)); if let Some(receiver) = &dot_access.receiver { if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) { @@ -144,8 +144,7 @@ pub(crate) fn render_field( } fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr { - receiver - .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into()) + receiver.map_or_else(|| field_name.into(), |receiver| format!("{receiver}.{field_name}").into()) } pub(crate) fn render_tuple_field( @@ -306,7 +305,7 @@ fn render_resolution_path( item.lookup_by(name.clone()) .label(SmolStr::from_iter([&name, "<…>"])) .trigger_call_info() - .insert_snippet(cap, format!("{}<$0>", local_name)); + .insert_snippet(cap, format!("{local_name}<$0>")); } } } @@ -528,13 +527,13 @@ mod tests { let tag = it.kind().tag(); let relevance = display_relevance(it.relevance()); - items.push(format!("{} {} {}\n", tag, it.label(), relevance)); + items.push(format!("{tag} {} {relevance}\n", it.label())); if let Some((mutability, _offset, relevance)) = it.ref_match() { let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label()); let relevance = display_relevance(relevance); - items.push(format!("{} {} {}\n", tag, label, relevance)); + items.push(format!("{tag} {label} {relevance}\n")); } items @@ -563,7 +562,7 @@ mod tests { .filter_map(|(cond, desc)| if cond { Some(desc) } else { None }) .join("+"); - format!("[{}]", relevance_factors) + format!("[{relevance_factors}]") } } diff --git a/crates/ide-completion/src/render/const_.rs b/crates/ide-completion/src/render/const_.rs index 93ea825e00427..70b19988ca733 100644 --- a/crates/ide-completion/src/render/const_.rs +++ b/crates/ide-completion/src/render/const_.rs @@ -16,7 +16,7 @@ fn render(ctx: RenderContext<'_>, const_: hir::Const) -> Option let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let detail = const_.display(db).to_string(); - let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name.clone()); + let mut item = CompletionItem::new(SymbolKind::Const, ctx.source_range(), name); item.set_documentation(ctx.docs(const_)) .set_deprecated(ctx.is_deprecated(const_) || ctx.is_deprecated_assoc_item(const_)) .detail(detail) diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 3761208460476..197592e78ce26 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -53,7 +53,7 @@ fn render( let (call, escaped_call) = match &func_kind { FuncKind::Method(_, Some(receiver)) => ( format!("{}.{}", receiver.unescaped(), name.unescaped()).into(), - format!("{}.{}", receiver, name).into(), + format!("{receiver}.{name}").into(), ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; @@ -162,7 +162,7 @@ pub(super) fn add_call_parens<'b>( cov_mark::hit!(inserts_parens_for_function_calls); let (snippet, label_suffix) = if self_param.is_none() && params.is_empty() { - (format!("{}()$0", escaped_name), "()") + (format!("{escaped_name}()$0"), "()") } else { builder.trigger_call_info(); let snippet = if let Some(CallableSnippets::FillArguments) = ctx.config.callable { @@ -174,7 +174,7 @@ pub(super) fn add_call_parens<'b>( let smol_str = n.to_smol_str(); let text = smol_str.as_str().trim_start_matches('_'); let ref_ = ref_of_param(ctx, text, param.ty()); - f(&format_args!("${{{}:{}{}}}", index + offset, ref_, text)) + f(&format_args!("${{{}:{ref_}{text}}}", index + offset)) } None => { let name = match param.ty().as_adt() { @@ -185,7 +185,7 @@ pub(super) fn add_call_parens<'b>( .map(|s| to_lower_snake_case(s.as_str())) .unwrap_or_else(|| "_".to_string()), }; - f(&format_args!("${{{}:{}}}", index + offset, name)) + f(&format_args!("${{{}:{name}}}", index + offset)) } } }); @@ -200,12 +200,12 @@ pub(super) fn add_call_parens<'b>( ) } None => { - format!("{}({})$0", escaped_name, function_params_snippet) + format!("{escaped_name}({function_params_snippet})$0") } } } else { cov_mark::hit!(suppress_arg_snippets); - format!("{}($0)", escaped_name) + format!("{escaped_name}($0)") }; (snippet, "(…)") diff --git a/crates/ide-completion/src/render/literal.rs b/crates/ide-completion/src/render/literal.rs index 0c791ac570c56..64dab02f7c5ce 100644 --- a/crates/ide-completion/src/render/literal.rs +++ b/crates/ide-completion/src/render/literal.rs @@ -84,7 +84,7 @@ fn render( } _ => RenderedLiteral { literal: escaped_qualified_name.clone(), - detail: escaped_qualified_name.clone(), + detail: escaped_qualified_name, }, }; @@ -96,7 +96,7 @@ fn render( if !should_add_parens { kind = StructKind::Unit; } - let label = format_literal_label(&qualified_name, kind); + let label = format_literal_label(&qualified_name, kind, snippet_cap); let lookup = if qualified { format_literal_lookup(&short_qualified_name.to_string(), kind) } else { diff --git a/crates/ide-completion/src/render/macro_.rs b/crates/ide-completion/src/render/macro_.rs index eabd0bd17d65f..ffcad1185aa41 100644 --- a/crates/ide-completion/src/render/macro_.rs +++ b/crates/ide-completion/src/render/macro_.rs @@ -66,7 +66,7 @@ fn render( match ctx.snippet_cap() { Some(cap) if needs_bang && !has_call_parens => { - let snippet = format!("{}!{}$0{}", escaped_name, bra, ket); + let snippet = format!("{escaped_name}!{bra}$0{ket}"); let lookup = banged_name(&name); item.insert_snippet(cap, snippet).lookup_by(lookup); } diff --git a/crates/ide-completion/src/render/pattern.rs b/crates/ide-completion/src/render/pattern.rs index c845ff21aaba4..21b4bc2174bee 100644 --- a/crates/ide-completion/src/render/pattern.rs +++ b/crates/ide-completion/src/render/pattern.rs @@ -33,7 +33,7 @@ pub(crate) fn render_struct_pat( let name = local_name.unwrap_or_else(|| strukt.name(ctx.db())); let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str()); let kind = strukt.kind(ctx.db()); - let label = format_literal_label(name.as_str(), kind); + let label = format_literal_label(name.as_str(), kind, ctx.snippet_cap()); let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat(&ctx, pattern_ctx, &escaped_name, kind, &visible_fields, fields_omitted)?; @@ -67,7 +67,7 @@ pub(crate) fn render_variant_pat( } _ => { let kind = variant.kind(ctx.db()); - let label = format_literal_label(name.as_str(), kind); + let label = format_literal_label(name.as_str(), kind, ctx.snippet_cap()); let lookup = format_literal_lookup(name.as_str(), kind); let pat = render_pat( &ctx, diff --git a/crates/ide-completion/src/render/type_alias.rs b/crates/ide-completion/src/render/type_alias.rs index de919429f2f95..fbe120d2ac949 100644 --- a/crates/ide-completion/src/render/type_alias.rs +++ b/crates/ide-completion/src/render/type_alias.rs @@ -40,7 +40,7 @@ fn render( }; let detail = type_alias.display(db).to_string(); - let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name.clone()); + let mut item = CompletionItem::new(SymbolKind::TypeAlias, ctx.source_range(), name); item.set_documentation(ctx.docs(type_alias)) .set_deprecated(ctx.is_deprecated(type_alias) || ctx.is_deprecated_assoc_item(type_alias)) .detail(detail) diff --git a/crates/ide-completion/src/render/union_literal.rs b/crates/ide-completion/src/render/union_literal.rs index 54e97dd57ba82..1b09ad1731f9d 100644 --- a/crates/ide-completion/src/render/union_literal.rs +++ b/crates/ide-completion/src/render/union_literal.rs @@ -24,7 +24,7 @@ pub(crate) fn render_union_literal( Some(p) => (p.unescaped().to_string(), p.to_string()), None => (name.unescaped().to_string(), name.to_string()), }; - let label = format_literal_label(&name.to_smol_str(), StructKind::Record); + let label = format_literal_label(&name.to_smol_str(), StructKind::Record, ctx.snippet_cap()); let lookup = format_literal_lookup(&name.to_smol_str(), StructKind::Record); let mut item = CompletionItem::new( CompletionItemKind::SymbolKind(SymbolKind::Union), @@ -68,7 +68,7 @@ pub(crate) fn render_union_literal( item.set_documentation(ctx.docs(un)) .set_deprecated(ctx.is_deprecated(un)) - .detail(&detail) + .detail(detail) .set_relevance(ctx.completion_relevance()); match ctx.snippet_cap() { diff --git a/crates/ide-completion/src/render/variant.rs b/crates/ide-completion/src/render/variant.rs index 24e6abdc9ad60..55c55725be4f9 100644 --- a/crates/ide-completion/src/render/variant.rs +++ b/crates/ide-completion/src/render/variant.rs @@ -22,6 +22,9 @@ pub(crate) fn render_record_lit( fields: &[hir::Field], path: &str, ) -> RenderedLiteral { + if snippet_cap.is_none() { + return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + } let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| { if snippet_cap.is_some() { f(&format_args!("{}: ${{{}:()}}", field.name(db), idx + 1)) @@ -35,8 +38,8 @@ pub(crate) fn render_record_lit( }); RenderedLiteral { - literal: format!("{} {{ {} }}", path, completions), - detail: format!("{} {{ {} }}", path, types), + literal: format!("{path} {{ {completions} }}"), + detail: format!("{path} {{ {types} }}"), } } @@ -48,6 +51,9 @@ pub(crate) fn render_tuple_lit( fields: &[hir::Field], path: &str, ) -> RenderedLiteral { + if snippet_cap.is_none() { + return RenderedLiteral { literal: path.to_string(), detail: path.to_string() }; + } let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| { if snippet_cap.is_some() { f(&format_args!("${{{}:()}}", idx + 1)) @@ -59,8 +65,8 @@ pub(crate) fn render_tuple_lit( let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db))); RenderedLiteral { - literal: format!("{}({})", path, completions), - detail: format!("{}({})", path, types), + literal: format!("{path}({completions})"), + detail: format!("{path}({types})"), } } @@ -87,7 +93,14 @@ pub(crate) fn visible_fields( } /// Format a struct, etc. literal option for display in the completions menu. -pub(crate) fn format_literal_label(name: &str, kind: StructKind) -> SmolStr { +pub(crate) fn format_literal_label( + name: &str, + kind: StructKind, + snippet_cap: Option, +) -> SmolStr { + if snippet_cap.is_none() { + return name.into(); + } match kind { StructKind::Tuple => SmolStr::from_iter([name, "(…)"]), StructKind::Record => SmolStr::from_iter([name, " {…}"]), diff --git a/crates/ide-completion/src/snippet.rs b/crates/ide-completion/src/snippet.rs index f3b8eae4fe8cd..343719c53694e 100644 --- a/crates/ide-completion/src/snippet.rs +++ b/crates/ide-completion/src/snippet.rs @@ -199,7 +199,7 @@ fn validate_snippet( ) -> Option<(Box<[GreenNode]>, String, Option>)> { let mut imports = Vec::with_capacity(requires.len()); for path in requires.iter() { - let use_path = ast::SourceFile::parse(&format!("use {};", path)) + let use_path = ast::SourceFile::parse(&format!("use {path};")) .syntax_node() .descendants() .find_map(ast::Path::cast)?; diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 9e2beb9ee3288..abe14e48e2236 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -86,7 +86,7 @@ pub(crate) fn completion_list_no_kw(ra_fixture: &str) -> String { } pub(crate) fn completion_list_no_kw_with_private_editable(ra_fixture: &str) -> String { - let mut config = TEST_CONFIG.clone(); + let mut config = TEST_CONFIG; config.enable_private_editable = true; completion_list_with_config(config, ra_fixture, false, None) } @@ -153,7 +153,7 @@ fn render_completion_list(completions: Vec) -> String { .into_iter() .map(|it| { let tag = it.kind().tag(); - let var_name = format!("{} {}", tag, it.label()); + let var_name = format!("{tag} {}", it.label()); let mut buf = var_name; if let Some(detail) = it.detail() { let width = label_width.saturating_sub(monospace_width(it.label())); @@ -183,12 +183,12 @@ pub(crate) fn check_edit_with_config( let ra_fixture_after = trim_indent(ra_fixture_after); let (db, position) = position(ra_fixture_before); let completions: Vec = - crate::completions(&db, &config, position, None).unwrap().into(); + crate::completions(&db, &config, position, None).unwrap(); let (completion,) = completions .iter() .filter(|it| it.lookup() == what) .collect_tuple() - .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions)); + .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}")); let mut actual = db.file_text(position.file_id).to_string(); let mut combined_edit = completion.text_edit().to_owned(); diff --git a/crates/ide-completion/src/tests/attribute.rs b/crates/ide-completion/src/tests/attribute.rs index 1578ba2c37712..4e60820dd6d60 100644 --- a/crates/ide-completion/src/tests/attribute.rs +++ b/crates/ide-completion/src/tests/attribute.rs @@ -607,6 +607,30 @@ fn attr_in_source_file_end() { ); } +#[test] +fn invalid_path() { + check( + r#" +//- proc_macros: identity +#[proc_macros:::$0] +struct Foo; +"#, + expect![[r#""#]], + ); + + check( + r#" +//- minicore: derive, copy +mod foo { + pub use Copy as Bar; +} +#[derive(foo:::::$0)] +struct Foo; +"#, + expect![""], + ); +} + mod cfg { use super::*; diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 8e26d889f9b63..043f552bd8a4c 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -4,7 +4,7 @@ use expect_test::{expect, Expect}; use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/item.rs b/crates/ide-completion/src/tests/item.rs index 409413c1dcdb1..3ef2a7c942bce 100644 --- a/crates/ide-completion/src/tests/item.rs +++ b/crates/ide-completion/src/tests/item.rs @@ -7,7 +7,7 @@ use expect_test::{expect, Expect}; use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs index 8ed6cb3cf867e..b62b988885d08 100644 --- a/crates/ide-completion/src/tests/item_list.rs +++ b/crates/ide-completion/src/tests/item_list.rs @@ -1,10 +1,10 @@ //! Completion tests for item list position. use expect_test::{expect, Expect}; -use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; +use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}")); expect.assert_eq(&actual) } @@ -277,3 +277,91 @@ fn after_unit_struct() { "#]], ); } + +#[test] +fn type_in_impl_trait() { + check_edit( + "type O", + r" +struct A; +trait B { +type O: ?Sized; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O: ?Sized; +} +impl B for A { +type O = $0; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { +type O; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O; +} +impl B for A { +type O = $0; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { +type O: ?Sized = u32; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O: ?Sized = u32; +} +impl B for A { +type O = $0; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { +type O = u32; +} +impl B for A { +$0 +} +", + r" +struct A; +trait B { +type O = u32; +} +impl B for A { +type O = $0; +} +", + ) +} diff --git a/crates/ide-completion/src/tests/pattern.rs b/crates/ide-completion/src/tests/pattern.rs index db8bef66405ef..ad9254e7f2ecf 100644 --- a/crates/ide-completion/src/tests/pattern.rs +++ b/crates/ide-completion/src/tests/pattern.rs @@ -9,7 +9,7 @@ fn check_empty(ra_fixture: &str, expect: Expect) { } fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/predicate.rs b/crates/ide-completion/src/tests/predicate.rs index a8676e2f24787..2656a4d545e66 100644 --- a/crates/ide-completion/src/tests/predicate.rs +++ b/crates/ide-completion/src/tests/predicate.rs @@ -4,7 +4,7 @@ use expect_test::{expect, Expect}; use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index 033dc99c26cf0..cad4af4937de5 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -2,13 +2,22 @@ use expect_test::{expect, Expect}; -use crate::tests::{check_edit, completion_list_no_kw}; +use crate::tests::{check_edit, completion_list_no_kw, completion_list_with_trigger_character}; fn check(ra_fixture: &str, expect: Expect) { let actual = completion_list_no_kw(ra_fixture); expect.assert_eq(&actual) } +pub(crate) fn check_with_trigger_character( + ra_fixture: &str, + trigger_character: Option, + expect: Expect, +) { + let actual = completion_list_with_trigger_character(ra_fixture, trigger_character); + expect.assert_eq(&actual) +} + #[test] fn completes_if_prefix_is_keyword() { check_edit( @@ -893,3 +902,82 @@ fn f() { "#]], ); } + +#[test] +fn completes_after_colon_with_trigger() { + check_with_trigger_character( + r#" +//- minicore: option +fn foo { ::$0 } +"#, + Some(':'), + expect![[r#" + md core + "#]], + ); + check_with_trigger_character( + r#" +//- minicore: option +fn foo { /* test */::$0 } +"#, + Some(':'), + expect![[r#" + md core + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate::$0 } +"#, + Some(':'), + expect![[r#" + fn foo() fn() + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate:$0 } +"#, + Some(':'), + expect![""], + ); +} + +#[test] +fn completes_after_colon_without_trigger() { + check_with_trigger_character( + r#" +fn foo { crate::$0 } +"#, + None, + expect![[r#" + fn foo() fn() + "#]], + ); + + check_with_trigger_character( + r#" +fn foo { crate:$0 } +"#, + None, + expect![""], + ); +} + +#[test] +fn no_completions_in_invalid_path() { + check( + r#" +fn foo { crate:::$0 } +"#, + expect![""], + ); + check( + r#" +fn foo { crate::::$0 } +"#, + expect![""], + ) +} diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index f0b7726c51d9b..c3f4fb4d1817f 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -4,7 +4,7 @@ use expect_test::{expect, Expect}; use crate::tests::{completion_list, BASE_ITEMS_FIXTURE}; fn check(ra_fixture: &str, expect: Expect) { - let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture)); + let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}")); expect.assert_eq(&actual) } diff --git a/crates/ide-db/src/assists.rs b/crates/ide-db/src/assists.rs index da23763dc2924..8c6c1c44aa706 100644 --- a/crates/ide-db/src/assists.rs +++ b/crates/ide-db/src/assists.rs @@ -88,7 +88,7 @@ impl FromStr for AssistKind { "RefactorExtract" => Ok(AssistKind::RefactorExtract), "RefactorInline" => Ok(AssistKind::RefactorInline), "RefactorRewrite" => Ok(AssistKind::RefactorRewrite), - unknown => Err(format!("Unknown AssistKind: '{}'", unknown)), + unknown => Err(format!("Unknown AssistKind: '{unknown}'")), } } } diff --git a/crates/ide-db/src/imports/import_assets.rs b/crates/ide-db/src/imports/import_assets.rs index 40a6a3e8970fb..994d48385a0f7 100644 --- a/crates/ide-db/src/imports/import_assets.rs +++ b/crates/ide-db/src/imports/import_assets.rs @@ -367,7 +367,7 @@ fn import_for_item( let expected_import_end = if item_as_assoc(db, original_item).is_some() { unresolved_qualifier.to_string() } else { - format!("{}::{}", unresolved_qualifier, item_name(db, original_item)?) + format!("{unresolved_qualifier}::{}", item_name(db, original_item)?) }; if !import_path_string.contains(unresolved_first_segment) || !import_path_string.ends_with(&expected_import_end) diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs index 59673af3204e6..b92e367f7e12a 100644 --- a/crates/ide-db/src/imports/insert_use/tests.rs +++ b/crates/ide-db/src/imports/insert_use/tests.rs @@ -1014,7 +1014,7 @@ fn check_with_config( .and_then(|it| ImportScope::find_insert_use_container(&it, sema)) .or_else(|| ImportScope::from(syntax)) .unwrap(); - let path = ast::SourceFile::parse(&format!("use {};", path)) + let path = ast::SourceFile::parse(&format!("use {path};")) .tree() .syntax() .descendants() diff --git a/crates/ide-db/src/imports/merge_imports.rs b/crates/ide-db/src/imports/merge_imports.rs index 371d642c15d1d..27b6321f3a7a5 100644 --- a/crates/ide-db/src/imports/merge_imports.rs +++ b/crates/ide-db/src/imports/merge_imports.rs @@ -91,7 +91,7 @@ fn recursive_merge(lhs: &ast::UseTree, rhs: &ast::UseTree, merge: MergeBehavior) .flat_map(|list| list.use_trees()) // We use Option here to early return from this function(this is not the // same as a `filter` op). - .map(|tree| merge.is_tree_allowed(&tree).then(|| tree)) + .map(|tree| merge.is_tree_allowed(&tree).then_some(tree)) .collect::>()?; use_trees.sort_unstable_by(|a, b| path_cmp_for_sort(a.path(), b.path())); for rhs_t in rhs.use_tree_list().into_iter().flat_map(|list| list.use_trees()) { diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index e0bc0f89f0a1d..156bbb634e4d5 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -165,7 +165,7 @@ pub trait LineIndexDatabase: base_db::SourceDatabase { fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc { let text = db.file_text(file_id); - Arc::new(LineIndex::new(&*text)) + Arc::new(LineIndex::new(&text)) } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 49b81265ea5be..cd4a7e1554cd7 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -197,7 +197,7 @@ fn rename_mod( // Module exists in a named file if !is_mod_rs { - let path = format!("{}.rs", new_name); + let path = format!("{new_name}.rs"); let dst = AnchoredPathBuf { anchor, path }; source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst }) } @@ -207,9 +207,7 @@ fn rename_mod( // - Module has submodules defined in separate files let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) { // Go up one level since the anchor is inside the dir we're trying to rename - (true, _, Some(mod_name)) => { - Some((format!("../{}", mod_name), format!("../{}", new_name))) - } + (true, _, Some(mod_name)) => Some((format!("../{mod_name}"), format!("../{new_name}"))), // The anchor is on the same level as target dir (false, true, Some(mod_name)) => Some((mod_name.to_string(), new_name.to_string())), _ => None, @@ -356,7 +354,7 @@ fn source_edit_from_name(edit: &mut TextEditBuilder, name: &ast::Name, new_name: // FIXME: instead of splitting the shorthand, recursively trigger a rename of the // other name https://github.com/rust-lang/rust-analyzer/issues/6547 - edit.insert(ident_pat.syntax().text_range().start(), format!("{}: ", new_name)); + edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: ")); return true; } } @@ -414,7 +412,7 @@ fn source_edit_from_name_ref( // Foo { field } -> Foo { new_name: field } // ^ insert `new_name: ` let offset = name_ref.syntax().text_range().start(); - edit.insert(offset, format!("{}: ", new_name)); + edit.insert(offset, format!("{new_name}: ")); return true; } (None, Some(_)) if matches!(def, Definition::Local(_)) => { @@ -422,7 +420,7 @@ fn source_edit_from_name_ref( // Foo { field } -> Foo { field: new_name } // ^ insert `: new_name` let offset = name_ref.syntax().text_range().end(); - edit.insert(offset, format!(": {}", new_name)); + edit.insert(offset, format!(": {new_name}")); return true; } _ => (), diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index aa5d7e9beb54f..b2b0e49085c8c 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -608,7 +608,7 @@ impl<'a> FindUsages<'a> { let reference = FileReference { range, name: ast::NameLike::NameRef(name_ref.clone()), - category: is_name_ref_in_import(name_ref).then(|| ReferenceCategory::Import), + category: is_name_ref_in_import(name_ref).then_some(ReferenceCategory::Import), }; sink(file_id, reference) } @@ -787,7 +787,7 @@ impl ReferenceCategory { fn new(def: &Definition, r: &ast::NameRef) -> Option { // Only Locals and Fields have accesses for now. if !matches!(def, Definition::Local(_) | Definition::Field(_)) { - return is_name_ref_in_import(r).then(|| ReferenceCategory::Import); + return is_name_ref_in_import(r).then_some(ReferenceCategory::Import); } let mode = r.syntax().ancestors().find_map(|node| { diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index bfb0031277105..c054cc1597968 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -206,7 +206,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec { } pub fn crate_symbols(db: &RootDatabase, krate: Crate, query: Query) -> Vec { - let _p = profile::span("crate_symbols").detail(|| format!("{:?}", query)); + let _p = profile::span("crate_symbols").detail(|| format!("{query:?}")); let modules = krate.modules(db); let indices: Vec<_> = modules diff --git a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs index 313346ee13153..fcef71fb74e7b 100644 --- a/crates/ide-db/src/syntax_helpers/format_string_exprs.rs +++ b/crates/ide-db/src/syntax_helpers/format_string_exprs.rs @@ -140,8 +140,8 @@ pub fn parse_format_exprs(input: &str) -> Result<(String, Vec), ()> { output.push_str(trimmed); } else if matches!(state, State::Expr) { extracted_expressions.push(Arg::Expr(trimmed.into())); - } else { - extracted_expressions.push(Arg::Ident(trimmed.into())); + } else if matches!(state, State::Ident) { + output.push_str(trimmed); } output.push(chr); @@ -205,7 +205,7 @@ mod tests { fn check(input: &str, expect: &Expect) { let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![])); let outcome_repr = if !exprs.is_empty() { - format!("{}; {}", output, with_placeholders(exprs).join(", ")) + format!("{output}; {}", with_placeholders(exprs).join(", ")) } else { output }; @@ -218,9 +218,9 @@ mod tests { let test_vector = &[ ("no expressions", expect![["no expressions"]]), (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]), - ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]), - ("{expr:?}", expect![["{:?}; expr"]]), - ("{expr:1$}", expect![[r"{:1\$}; expr"]]), + ("{expr} is {2 + 2}", expect![["{expr} is {}; 2 + 2"]]), + ("{expr:?}", expect![["{expr:?}"]]), + ("{expr:1$}", expect![[r"{expr:1\$}"]]), ("{:1$}", expect![[r"{:1\$}; $1"]]), ("{:>padding$}", expect![[r"{:>padding\$}; $1"]]), ("{}, {}, {0}", expect![[r"{}, {}, {0}; $1, $2"]]), @@ -230,8 +230,8 @@ mod tests { ("malformed}", expect![["-"]]), ("{{correct", expect![["{{correct"]]), ("correct}}", expect![["correct}}"]]), - ("{correct}}}", expect![["{}}}; correct"]]), - ("{correct}}}}}", expect![["{}}}}}; correct"]]), + ("{correct}}}", expect![["{correct}}}"]]), + ("{correct}}}}}", expect![["{correct}}}}}"]]), ("{incorrect}}", expect![["-"]]), ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]), ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]), @@ -239,7 +239,7 @@ mod tests { "{SomeStruct { val_a: 0, val_b: 1 }}", expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]], ), - ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]), + ("{expr:?} is {2.32f64:.5}", expect![["{expr:?} is {:.5}; 2.32f64"]]), ( "{SomeStruct { val_a: 0, val_b: 1 }:?}", expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]], @@ -262,8 +262,6 @@ mod tests { .unwrap() .1, vec![ - Arg::Ident("_ident".to_owned()), - Arg::Ident("r#raw_ident".to_owned()), Arg::Expr("expr.obj".to_owned()), Arg::Expr("name {thing: 42}".to_owned()), Arg::Placeholder diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index 39710b8f13eb5..a34dc1b69507e 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -173,7 +173,8 @@ pub fn walk_pat(pat: &ast::Pat, cb: &mut dyn FnMut(ast::Pat)) { } /// Preorder walk all the type's sub types. -pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) { +// FIXME: Make the control flow more proper +pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type) -> bool) { let mut preorder = ty.syntax().preorder(); while let Some(event) = preorder.next() { let node = match event { @@ -184,10 +185,12 @@ pub fn walk_ty(ty: &ast::Type, cb: &mut dyn FnMut(ast::Type)) { match ast::Type::cast(node) { Some(ty @ ast::Type::MacroType(_)) => { preorder.skip_subtree(); - cb(ty) + cb(ty); } Some(ty) => { - cb(ty); + if cb(ty) { + preorder.skip_subtree(); + } } // skip const args None if ast::ConstArg::can_cast(kind) => { @@ -252,6 +255,11 @@ pub fn is_pattern_cond(expr: ast::Expr) -> bool { /// Note that modifying the tree while iterating it will cause undefined iteration which might /// potentially results in an out of bounds panic. pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { + let walk_loop = |cb: &mut dyn FnMut(&ast::Expr), label, body: Option| { + for_each_break_expr(label, body.and_then(|it| it.stmt_list()), &mut |b| { + cb(&ast::Expr::BreakExpr(b)) + }) + }; match expr { ast::Expr::BlockExpr(b) => { match b.modifier() { @@ -291,11 +299,9 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { } } } - ast::Expr::LoopExpr(l) => { - for_each_break_expr(l.label(), l.loop_body().and_then(|it| it.stmt_list()), &mut |b| { - cb(&ast::Expr::BreakExpr(b)) - }) - } + ast::Expr::LoopExpr(l) => walk_loop(cb, l.label(), l.loop_body()), + ast::Expr::WhileExpr(w) => walk_loop(cb, w.label(), w.loop_body()), + ast::Expr::ForExpr(f) => walk_loop(cb, f.label(), f.loop_body()), ast::Expr::MatchExpr(m) => { if let Some(arms) = m.match_arm_list() { arms.arms().filter_map(|arm| arm.expr()).for_each(|e| for_each_tail_expr(&e, cb)); @@ -311,7 +317,6 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::ClosureExpr(_) | ast::Expr::ContinueExpr(_) | ast::Expr::FieldExpr(_) - | ast::Expr::ForExpr(_) | ast::Expr::IndexExpr(_) | ast::Expr::Literal(_) | ast::Expr::MacroExpr(_) @@ -325,10 +330,10 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::ReturnExpr(_) | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) - | ast::Expr::WhileExpr(_) | ast::Expr::LetExpr(_) | ast::Expr::UnderscoreExpr(_) - | ast::Expr::YieldExpr(_) => cb(expr), + | ast::Expr::YieldExpr(_) + | ast::Expr::YeetExpr(_) => cb(expr), } } @@ -447,7 +452,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option it.path(), diff --git a/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/crates/ide-db/src/test_data/test_symbol_index_collection.txt index 2f531ca0c709c..8c11408dec5d7 100644 --- a/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -14,11 +14,7 @@ name: "Alias", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: TYPE_ALIAS, @@ -36,11 +32,7 @@ name: "CONST", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -58,11 +50,7 @@ name: "CONST_WITH_INNER", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: CONST, @@ -80,11 +68,7 @@ name: "Enum", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: ENUM, @@ -102,11 +86,7 @@ name: "Macro", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_DEF, @@ -124,11 +104,7 @@ name: "STATIC", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STATIC, @@ -146,11 +122,7 @@ name: "Struct", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -168,13 +140,7 @@ name: "StructFromMacro", loc: DeclarationLocation { hir_file_id: HirFileId( - MacroFile( - MacroFile { - macro_call_id: MacroCallId( - 0, - ), - }, - ), + 2147483648, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -192,11 +158,7 @@ name: "StructInFn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -216,11 +178,7 @@ name: "StructInNamedConst", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -240,11 +198,7 @@ name: "StructInUnnamedConst", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -262,11 +216,7 @@ name: "Trait", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: TRAIT, @@ -284,11 +234,7 @@ name: "Union", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: UNION, @@ -306,11 +252,7 @@ name: "a_mod", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -328,11 +270,7 @@ name: "b_mod", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MODULE, @@ -350,11 +288,7 @@ name: "define_struct", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -372,11 +306,7 @@ name: "impl_fn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -394,11 +324,7 @@ name: "macro_rules_macro", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: MACRO_RULES, @@ -416,11 +342,7 @@ name: "main", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -438,11 +360,7 @@ name: "trait_fn", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: FN, @@ -475,11 +393,7 @@ name: "StructInModA", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 0, - ), - ), + 0, ), ptr: SyntaxNodePtr { kind: STRUCT, @@ -510,11 +424,7 @@ name: "StructInModB", loc: DeclarationLocation { hir_file_id: HirFileId( - FileId( - FileId( - 1, - ), - ), + 1, ), ptr: SyntaxNodePtr { kind: STRUCT, diff --git a/crates/ide-db/src/tests/sourcegen_lints.rs b/crates/ide-db/src/tests/sourcegen_lints.rs index 5042f6d815a1e..c7d5f3613d4bc 100644 --- a/crates/ide-db/src/tests/sourcegen_lints.rs +++ b/crates/ide-db/src/tests/sourcegen_lints.rs @@ -241,9 +241,9 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) { buf.push_str(r#"pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &["#); for (id, children) in clippy_groups { - let children = children.iter().map(|id| format!("clippy::{}", id)).collect::>(); + let children = children.iter().map(|id| format!("clippy::{id}")).collect::>(); if !children.is_empty() { - let lint_ident = format!("clippy::{}", id); + let lint_ident = format!("clippy::{id}"); let description = format!("lint group for: {}", children.iter().join(", ")); push_lint_group(buf, &lint_ident, &description, &children); } @@ -273,7 +273,7 @@ fn push_lint_group(buf: &mut String, label: &str, description: &str, children: & push_lint_completion(buf, label, description); - let children = format!("&[{}]", children.iter().map(|it| format!("\"{}\"", it)).join(", ")); + let children = format!("&[{}]", children.iter().map(|it| format!("\"{it}\"")).join(", ")); format_to!( buf, r###" diff --git a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs index 0c92e706b3916..10e637979f2cd 100644 --- a/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs +++ b/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs @@ -38,12 +38,12 @@ fn foo() { } #[test] - fn try_blocks_are_borders() { + fn async_blocks_are_borders() { check_diagnostics( r#" fn foo() { 'a: loop { - try { + async { break; //^^^^^ error: break outside of loop break 'a; @@ -60,12 +60,12 @@ fn foo() { } #[test] - fn async_blocks_are_borders() { + fn closures_are_borders() { check_diagnostics( r#" fn foo() { 'a: loop { - try { + || { break; //^^^^^ error: break outside of loop break 'a; @@ -82,21 +82,17 @@ fn foo() { } #[test] - fn closures_are_borders() { + fn blocks_pass_through() { check_diagnostics( r#" fn foo() { 'a: loop { - try { - break; - //^^^^^ error: break outside of loop - break 'a; - //^^^^^^^^ error: break outside of loop - continue; - //^^^^^^^^ error: continue outside of loop - continue 'a; - //^^^^^^^^^^^ error: continue outside of loop - }; + { + break; + break 'a; + continue; + continue 'a; + } } } "#, @@ -104,17 +100,17 @@ fn foo() { } #[test] - fn blocks_pass_through() { + fn try_blocks_pass_through() { check_diagnostics( r#" fn foo() { 'a: loop { - { - break; - break 'a; - continue; - continue 'a; - } + try { + break; + break 'a; + continue; + continue 'a; + }; } } "#, diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 3034295196b42..e8df6dcf285d0 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -125,7 +125,7 @@ pub(crate) fn json_in_items( .severity(Severity::WeakWarning) .with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id); - let scope = match import_scope.clone() { + let scope = match import_scope { ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), diff --git a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index 5f8b3e543b944..c5db8c3741b81 100644 --- a/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -13,7 +13,7 @@ pub(crate) fn mismatched_arg_count( d: &hir::MismatchedArgCount, ) -> Diagnostic { let s = if d.expected == 1 { "" } else { "s" }; - let message = format!("expected {} argument{}, found {}", d.expected, s, d.found); + let message = format!("expected {} argument{s}, found {}", d.expected, d.found); Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d)) } diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index 7f140eb6a74a6..43af4d4f16aab 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -128,9 +128,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::MissingFields) -> Option u32; // Safe intrinsic pub fn floorf32(x: f32) -> f32; // Unsafe intrinsic } diff --git a/crates/ide-diagnostics/src/handlers/no_such_field.rs b/crates/ide-diagnostics/src/handlers/no_such_field.rs index d8f2a9de9818f..8da04e628d670 100644 --- a/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -68,7 +68,7 @@ fn missing_record_expr_field_fixes( } let new_field = make::record_field( None, - make::name(&record_expr_field.field_name()?.ident_token()?.text()), + make::name(record_expr_field.field_name()?.ident_token()?.text()), make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?), ); @@ -78,13 +78,13 @@ fn missing_record_expr_field_fixes( let mut new_field = new_field.to_string(); if usage_file_id != def_file_id { - new_field = format!("pub(crate) {}", new_field); + new_field = format!("pub(crate) {new_field}"); } - new_field = format!("\n{}{}", indent, new_field); + new_field = format!("\n{indent}{new_field}"); let needs_comma = !last_field_syntax.to_string().ends_with(','); if needs_comma { - new_field = format!(",{}", new_field); + new_field = format!(",{new_field}"); } let source_change = SourceChange::from_text_edit( diff --git a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs new file mode 100644 index 0000000000000..b363a516dd1c0 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -0,0 +1,124 @@ +use either::Either; + +use crate::{Diagnostic, DiagnosticsContext}; + +// Diagnostic: private-assoc-item +// +// This diagnostic is triggered if the referenced associated item is not visible from the current +// module. +pub(crate) fn private_assoc_item( + ctx: &DiagnosticsContext<'_>, + d: &hir::PrivateAssocItem, +) -> Diagnostic { + // FIXME: add quickfix + let name = match d.item.name(ctx.sema.db) { + Some(name) => format!("`{}` ", name), + None => String::new(), + }; + Diagnostic::new( + "private-assoc-item", + format!( + "{} {}is private", + match d.item { + hir::AssocItem::Function(_) => "function", + hir::AssocItem::Const(_) => "const", + hir::AssocItem::TypeAlias(_) => "type alias", + }, + name, + ), + ctx.sema + .diagnostics_display_range(d.expr_or_pat.clone().map(|it| match it { + Either::Left(it) => it.into(), + Either::Right(it) => match it { + Either::Left(it) => it.into(), + Either::Right(it) => it.into(), + }, + })) + .range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn private_method() { + check_diagnostics( + r#" +mod module { + pub struct Struct; + impl Struct { + fn method(&self) {} + } +} +fn main(s: module::Struct) { + s.method(); + //^^^^^^^^^^ error: function `method` is private +} +"#, + ); + } + + #[test] + fn private_func() { + check_diagnostics( + r#" +mod module { + pub struct Struct; + impl Struct { + fn func() {} + } +} +fn main() { + module::Struct::func(); + //^^^^^^^^^^^^^^^^^^^^ error: function `func` is private +} +"#, + ); + } + + #[test] + fn private_const() { + check_diagnostics( + r#" +mod module { + pub struct Struct; + impl Struct { + const CONST: u32 = 0; + } +} +fn main() { + module::Struct::CONST; + //^^^^^^^^^^^^^^^^^^^^^ error: const `CONST` is private +} +"#, + ); + } + + #[test] + fn private_but_shadowed_in_deref() { + check_diagnostics( + r#" +//- minicore: deref +mod module { + pub struct Struct { field: Inner } + pub struct Inner; + impl core::ops::Deref for Struct { + type Target = Inner; + fn deref(&self) -> &Inner { &self.field } + } + impl Struct { + fn method(&self) {} + } + impl Inner { + pub fn method(&self) {} + } +} +fn main(s: module::Struct) { + s.method(); +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/private_field.rs b/crates/ide-diagnostics/src/handlers/private_field.rs new file mode 100644 index 0000000000000..e630ae36866d3 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/private_field.rs @@ -0,0 +1,68 @@ +use crate::{Diagnostic, DiagnosticsContext}; + +// Diagnostic: private-field +// +// This diagnostic is triggered if the accessed field is not visible from the current module. +pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic { + // FIXME: add quickfix + Diagnostic::new( + "private-field", + format!( + "field `{}` of `{}` is private", + d.field.name(ctx.sema.db), + d.field.parent_def(ctx.sema.db).name(ctx.sema.db) + ), + ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn private_field() { + check_diagnostics( + r#" +mod module { pub struct Struct { field: u32 } } +fn main(s: module::Struct) { + s.field; + //^^^^^^^ error: field `field` of `Struct` is private +} +"#, + ); + } + + #[test] + fn private_tuple_field() { + check_diagnostics( + r#" +mod module { pub struct Struct(u32); } +fn main(s: module::Struct) { + s.0; + //^^^ error: field `0` of `Struct` is private +} +"#, + ); + } + + #[test] + fn private_but_shadowed_in_deref() { + check_diagnostics( + r#" +//- minicore: deref +mod module { + pub struct Struct { field: Inner } + pub struct Inner { pub field: u32 } + impl core::ops::Deref for Struct { + type Target = Inner; + fn deref(&self) -> &Inner { &self.field } + } +} +fn main(s: module::Struct) { + s.field; +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 62c69f90baa4f..2adae165e4d25 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -106,11 +106,11 @@ fn add_missing_ok_or_some( } let mut builder = TextEdit::builder(); - builder.insert(expr.syntax().text_range().start(), format!("{}(", variant_name)); + builder.insert(expr.syntax().text_range().start(), format!("{variant_name}(")); builder.insert(expr.syntax().text_range().end(), ")".to_string()); let source_change = SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), builder.finish()); - let name = format!("Wrap in {}", variant_name); + let name = format!("Wrap in {variant_name}"); acc.push(fix("wrap_in_constructor", &name, source_change, expr_range)); Some(()) } diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index c626932f196ba..be70f0ac4f797 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -64,7 +64,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { // `submod/bla.rs` -> `submod.rs` let parent_mod = (|| { let (name, _) = parent.name_and_extension()?; - parent.parent()?.join(&format!("{}.rs", name)) + parent.parent()?.join(&format!("{name}.rs")) })(); paths.extend(parent_mod); paths @@ -99,8 +99,8 @@ fn make_fixes( matches!(item, ast::Item::Module(m) if m.item_list().is_none()) } - let mod_decl = format!("mod {};", new_mod_name); - let pub_mod_decl = format!("pub mod {};", new_mod_name); + let mod_decl = format!("mod {new_mod_name};"); + let pub_mod_decl = format!("pub mod {new_mod_name};"); let ast: ast::SourceFile = db.parse(parent_file_id).tree(); @@ -125,8 +125,8 @@ fn make_fixes( Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("\n{}", mod_decl)); - pub_mod_decl_builder.insert(offset, format!("\n{}", pub_mod_decl)); + mod_decl_builder.insert(offset, format!("\n{mod_decl}")); + pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}")); } None => { // Prepend before the first item in the file. @@ -134,15 +134,15 @@ fn make_fixes( Some(item) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); let offset = item.syntax().text_range().start(); - mod_decl_builder.insert(offset, format!("{}\n\n", mod_decl)); - pub_mod_decl_builder.insert(offset, format!("{}\n\n", pub_mod_decl)); + mod_decl_builder.insert(offset, format!("{mod_decl}\n\n")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n")); } None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); let offset = ast.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("{}\n", mod_decl)); - pub_mod_decl_builder.insert(offset, format!("{}\n", pub_mod_decl)); + mod_decl_builder.insert(offset, format!("{mod_decl}\n")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); } } } @@ -152,13 +152,13 @@ fn make_fixes( Some(vec![ fix( "add_mod_declaration", - &format!("Insert `{}`", mod_decl), + &format!("Insert `{mod_decl}`"), SourceChange::from_text_edit(parent_file_id, mod_decl_builder.finish()), trigger_range, ), fix( "add_pub_mod_declaration", - &format!("Insert `{}`", pub_mod_decl), + &format!("Insert `{pub_mod_decl}`"), SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()), trigger_range, ), diff --git a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 87531f4acfb75..1a5efff2c0c60 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -13,7 +13,7 @@ pub(crate) fn unresolved_macro_call( let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( "unresolved-macro-call", - format!("unresolved macro `{}{}`", d.path, bang), + format!("unresolved macro `{}{bang}`", d.path), display_range, ) .experimental() diff --git a/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/crates/ide-diagnostics/src/handlers/unresolved_module.rs index b8f2a9e94a40e..91395f1d841ad 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -16,7 +16,7 @@ pub(crate) fn unresolved_module( "unresolved-module", match &*d.candidates { [] => "unresolved module".to_string(), - [candidate] => format!("unresolved module, can't find module file: {}", candidate), + [candidate] => format!("unresolved module, can't find module file: {candidate}"), [candidates @ .., last] => { format!( "unresolved module, can't find module file: {}, or {}", diff --git a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index 23818d883f731..b2ed19104e278 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -26,7 +26,7 @@ pub(crate) fn unresolved_proc_macro( }; let message = match &d.macro_name { - Some(name) => format!("proc macro `{}` not expanded", name), + Some(name) => format!("proc macro `{name}` not expanded"), None => "proc macro not expanded".to_string(), }; let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning }; diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index d81e36a1f8632..64ba08ac883ba 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -37,6 +37,8 @@ mod handlers { pub(crate) mod missing_match_arms; pub(crate) mod missing_unsafe; pub(crate) mod no_such_field; + pub(crate) mod private_assoc_item; + pub(crate) mod private_field; pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod type_mismatch; pub(crate) mod unimplemented_builtin_macro; @@ -218,7 +220,7 @@ pub fn diagnostics( // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. res.extend( parse.errors().iter().take(128).map(|err| { - Diagnostic::new("syntax-error", format!("Syntax Error: {}", err), err.range()) + Diagnostic::new("syntax-error", format!("Syntax Error: {err}"), err.range()) }), ); @@ -227,7 +229,7 @@ pub fn diagnostics( for node in parse.syntax().descendants() { handlers::useless_braces::useless_braces(&mut res, file_id, &node); handlers::field_shorthand::field_shorthand(&mut res, file_id, &node); - handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config); + handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, config); } let module = sema.to_module_def(file_id); @@ -254,6 +256,8 @@ pub fn diagnostics( AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), + AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), + AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d), AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d), diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 729619cfde03f..afa641c733ebb 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -75,7 +75,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) { ) .pop() .unwrap(); - assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic); + assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}"); } pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) { @@ -102,7 +102,7 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur for file_id in files { let diagnostics = super::diagnostics(&db, &config, &AssistResolveStrategy::All, file_id); - let expected = extract_annotations(&*db.file_text(file_id)); + let expected = extract_annotations(&db.file_text(file_id)); let mut actual = diagnostics .into_iter() .map(|d| { diff --git a/crates/ide-diagnostics/src/tests/sourcegen.rs b/crates/ide-diagnostics/src/tests/sourcegen.rs index ec6558a46efb4..9e7fcfc590b70 100644 --- a/crates/ide-diagnostics/src/tests/sourcegen.rs +++ b/crates/ide-diagnostics/src/tests/sourcegen.rs @@ -11,7 +11,7 @@ fn sourcegen_diagnostic_docs() { diagnostics.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); let contents = sourcegen::add_preamble("sourcegen_diagnostic_docs", contents); let dst = project_root().join("docs/user/generated_diagnostic.adoc"); - fs::write(&dst, &contents).unwrap(); + fs::write(dst, contents).unwrap(); } #[derive(Debug)] @@ -39,7 +39,7 @@ impl Diagnostic { for block in comment_blocks { let id = block.id; if let Err(msg) = is_valid_diagnostic_name(&id) { - panic!("invalid diagnostic name: {:?}:\n {}", id, msg) + panic!("invalid diagnostic name: {id:?}:\n {msg}") } let doc = block.contents.join("\n"); let location = sourcegen::Location { file: path.clone(), line: block.line }; diff --git a/crates/ide-ssr/src/parsing.rs b/crates/ide-ssr/src/parsing.rs index f6220b928a4c6..d78d009681a60 100644 --- a/crates/ide-ssr/src/parsing.rs +++ b/crates/ide-ssr/src/parsing.rs @@ -352,7 +352,7 @@ impl NodeKind { impl Placeholder { fn new(name: SmolStr, constraints: Vec) -> Self { Self { - stand_in_name: format!("__placeholder_{}", name), + stand_in_name: format!("__placeholder_{name}"), constraints, ident: Var(name.to_string()), } diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs index 1ecb7aa9aa701..61698fca80fee 100644 --- a/crates/ide-ssr/src/tests.rs +++ b/crates/ide-ssr/src/tests.rs @@ -121,7 +121,7 @@ fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snipp snippet ); for (index, d) in debug_info.iter().enumerate() { - println!("Node #{}\n{:#?}\n", index, d); + println!("Node #{index}\n{d:#?}\n"); } } @@ -144,7 +144,7 @@ fn assert_no_match(pattern: &str, code: &str) { let matches = match_finder.matches().flattened().matches; if !matches.is_empty() { print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text()); - panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches); + panic!("Got {} matches when we expected none: {matches:#?}", matches.len()); } } diff --git a/crates/ide/src/call_hierarchy.rs b/crates/ide/src/call_hierarchy.rs index 5a8cda8fb3dda..48bcd37b62c61 100644 --- a/crates/ide/src/call_hierarchy.rs +++ b/crates/ide/src/call_hierarchy.rs @@ -57,7 +57,8 @@ pub(crate) fn incoming_calls( .flat_map(|func| func.usages(sema).all()); for (_, references) in references { - let references = references.into_iter().map(|FileReference { name, .. }| name); + let references = + references.iter().filter_map(|FileReference { name, .. }| name.as_name_ref()); for name in references { // This target is the containing function let nav = sema.ancestors_with_macros(name.syntax().clone()).find_map(|node| { @@ -457,4 +458,28 @@ fn caller$0() { expect![[]], ); } + + #[test] + fn test_trait_method_call_hierarchy() { + check_hierarchy( + r#" +trait T1 { + fn call$0ee(); +} + +struct S1; + +impl T1 for S1 { + fn callee() {} +} + +fn caller() { + S1::callee(); +} +"#, + expect![["callee Function FileId(0) 15..27 18..24"]], + expect![["caller Function FileId(0) 82..115 85..91 : [104..110]"]], + expect![[]], + ); + } } diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index d96827326cfd8..b4a7f2b918a46 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -273,7 +273,7 @@ impl DocCommentToken { let (in_expansion_range, link, ns) = extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| { let mapped = doc_mapping.map(range)?; - (mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns)) + (mapped.value.contains(abs_in_expansion_offset)).then_some((mapped.value, link, ns)) })?; // get the relative range to the doc/attribute in the expansion let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start; @@ -285,7 +285,7 @@ impl DocCommentToken { } } -fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> { +fn broken_link_clone_cb(link: BrokenLink<'_>) -> Option<(CowStr<'_>, CowStr<'_>)> { Some((/*url*/ link.reference.clone(), /*title*/ link.reference)) } @@ -453,7 +453,7 @@ fn get_doc_base_url(db: &RootDatabase, def: Definition) -> Option { })? } }; - Url::parse(&base).ok()?.join(&format!("{}/", display_name)).ok() + Url::parse(&base).ok()?.join(&format!("{display_name}/")).ok() } /// Get the filename and extension generated for a symbol by rustdoc. @@ -488,7 +488,7 @@ fn filename_and_frag_for_def( Some(kw) => { format!("keyword.{}.html", kw.trim_matches('"')) } - None => format!("{}/index.html", name), + None => format!("{name}/index.html"), }, None => String::from("index.html"), }, diff --git a/crates/ide/src/doc_links/intra_doc_links.rs b/crates/ide/src/doc_links/intra_doc_links.rs index 1df9aaae281ee..13088bdc3b30f 100644 --- a/crates/ide/src/doc_links/intra_doc_links.rs +++ b/crates/ide/src/doc_links/intra_doc_links.rs @@ -63,8 +63,8 @@ mod tests { fn check(link: &str, expected: Expect) { let (l, a) = parse_intra_doc_link(link); - let a = a.map_or_else(String::new, |a| format!(" ({:?})", a)); - expected.assert_eq(&format!("{}{}", l, a)); + let a = a.map_or_else(String::new, |a| format!(" ({a:?})")); + expected.assert_eq(&format!("{l}{a}")); } #[test] diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs index c6bfb6b9d0975..104181a33e68d 100644 --- a/crates/ide/src/doc_links/tests.rs +++ b/crates/ide/src/doc_links/tests.rs @@ -40,7 +40,7 @@ fn check_doc_links(ra_fixture: &str) { .into_iter() .map(|(_, link, ns)| { let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns) - .unwrap_or_else(|| panic!("Failed to resolve {}", link)); + .unwrap_or_else(|| panic!("Failed to resolve {link}")); let nav_target = def.try_to_nav(sema.db).unwrap(); let range = FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() }; diff --git a/crates/ide/src/expand_macro.rs b/crates/ide/src/expand_macro.rs index 93252339cd4a8..418043d679811 100644 --- a/crates/ide/src/expand_macro.rs +++ b/crates/ide/src/expand_macro.rs @@ -163,7 +163,7 @@ fn _format( ) -> Option { use ide_db::base_db::{FileLoader, SourceDatabase}; // hack until we get hygiene working (same character amount to preserve formatting as much as possible) - const DOLLAR_CRATE_REPLACE: &str = &"__r_a_"; + const DOLLAR_CRATE_REPLACE: &str = "__r_a_"; let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE); let (prefix, suffix) = match kind { SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"), diff --git a/crates/ide/src/extend_selection.rs b/crates/ide/src/extend_selection.rs index 45f1fd74841c6..9f78c75e90aa2 100644 --- a/crates/ide/src/extend_selection.rs +++ b/crates/ide/src/extend_selection.rs @@ -205,7 +205,7 @@ fn extend_single_word_in_comment_or_string( } let start_idx = before.rfind(non_word_char)? as u32; - let end_idx = after.find(non_word_char).unwrap_or_else(|| after.len()) as u32; + let end_idx = after.find(non_word_char).unwrap_or(after.len()) as u32; let from: TextSize = (start_idx + 1).into(); let to: TextSize = (cursor_position + end_idx).into(); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index 926292c9b3ce1..c7130a2a4bb0a 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -1,18 +1,22 @@ -use hir::Semantics; +use hir::{AsAssocItem, Semantics}; use ide_db::{ defs::{Definition, NameClass, NameRefClass}, RootDatabase, }; use syntax::{ast, match_ast, AstNode, SyntaxKind::*, T}; -use crate::{FilePosition, NavigationTarget, RangeInfo}; +use crate::{ + goto_definition::goto_definition, navigation_target::TryToNav, FilePosition, NavigationTarget, + RangeInfo, +}; // Feature: Go to Declaration // // Navigates to the declaration of an identifier. // -// This is currently the same as `Go to Definition` with the exception of outline modules where it -// will navigate to the `mod name;` item declaration. +// This is the same as `Go to Definition` with the following exceptions: +// - outline modules will navigate to the `mod name;` item declaration +// - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl pub(crate) fn goto_declaration( db: &RootDatabase, position: FilePosition, @@ -32,25 +36,37 @@ pub(crate) fn goto_declaration( match parent { ast::NameRef(name_ref) => match NameRefClass::classify(&sema, &name_ref)? { NameRefClass::Definition(it) => Some(it), - _ => None + NameRefClass::FieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db), }, ast::Name(name) => match NameClass::classify(&sema, &name)? { - NameClass::Definition(it) => Some(it), - _ => None + NameClass::Definition(it) | NameClass::ConstReference(it) => Some(it), + NameClass::PatFieldShorthand { field_ref, .. } => return field_ref.try_to_nav(db), }, _ => None } }; - match def? { + let assoc = match def? { Definition::Module(module) => { - Some(NavigationTarget::from_module_to_decl(db, module)) + return Some(NavigationTarget::from_module_to_decl(db, module)) } + Definition::Const(c) => c.as_assoc_item(db), + Definition::TypeAlias(ta) => ta.as_assoc_item(db), + Definition::Function(f) => f.as_assoc_item(db), _ => None, - } + }?; + + let trait_ = assoc.containing_trait_impl(db)?; + let name = Some(assoc.name(db)?); + let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; + item.try_to_nav(db) }) .collect(); - Some(RangeInfo::new(range, info)) + if info.is_empty() { + goto_definition(db, position) + } else { + Some(RangeInfo::new(range, info)) + } } #[cfg(test)] @@ -109,4 +125,89 @@ mod foo { "#, ) } + + #[test] + fn goto_decl_goto_def_fallback() { + check( + r#" +struct Foo; + // ^^^ +impl Foo$0 {} +"#, + ); + } + + #[test] + fn goto_decl_assoc_item_no_impl_item() { + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () {} + +fn main() { + <()>::C$0; +} +"#, + ); + } + + #[test] + fn goto_decl_assoc_item() { + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () { + const C: () = (); +} + +fn main() { + <()>::C$0; +} +"#, + ); + check( + r#" +trait Trait { + const C: () = (); + // ^ +} +impl Trait for () { + const C$0: () = (); +} +"#, + ); + } + + #[test] + fn goto_decl_field_pat_shorthand() { + check( + r#" +struct Foo { field: u32 } + //^^^^^ +fn main() { + let Foo { field$0 }; +} +"#, + ); + } + + #[test] + fn goto_decl_constructor_shorthand() { + check( + r#" +struct Foo { field: u32 } + //^^^^^ +fn main() { + let field = 0; + Foo { field$0 }; +} +"#, + ); + } } diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 43f7a529bc297..73fd518a9ef08 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -187,7 +187,7 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture); let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info; - assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs) + assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}") } #[test] diff --git a/crates/ide/src/goto_implementation.rs b/crates/ide/src/goto_implementation.rs index b3f711b6b88c4..190ab80ba0ff3 100644 --- a/crates/ide/src/goto_implementation.rs +++ b/crates/ide/src/goto_implementation.rs @@ -110,7 +110,7 @@ fn impls_for_trait_item( .filter_map(|imp| { let item = imp.items(sema.db).iter().find_map(|itm| { let itm_name = itm.name(sema.db)?; - (itm_name == fun_name).then(|| *itm) + (itm_name == fun_name).then_some(*itm) })?; item.try_to_nav(sema.db) }) diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 540a115832d3e..55f8779eed7d0 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -110,7 +110,7 @@ fn highlight_references( .and_then(|decl| decl.focus_range) .map(|range| { let category = - references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write); + references::decl_mutability(&def, node, range).then_some(ReferenceCategory::Write); HighlightedRange { range, category } }); if let Some(hl_range) = hl_range { @@ -365,7 +365,7 @@ mod tests { let mut expected = annotations .into_iter() - .map(|(r, access)| (r.range, (!access.is_empty()).then(|| access))) + .map(|(r, access)| (r.range, (!access.is_empty()).then_some(access))) .collect::>(); let mut actual = hls @@ -765,6 +765,23 @@ fn foo() ->$0 u32 { ); } + #[test] + fn test_hl_inner_tail_exit_points_loops() { + check( + r#" +fn foo() ->$0 u32 { + 'foo: while { return 0; true } { + // ^^^^^^ + break 'foo 0; + // ^^^^^ + return 0; + // ^^^^^^ + } +} +"#, + ); + } + #[test] fn test_hl_break_loop() { check( diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 838fb18c3d590..b214fa12a4fec 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -127,6 +127,7 @@ pub(crate) fn hover( original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())) ); + // prefer descending the same token kind in attribute expansions, in normal macros text // equivalency is more important let descended = if in_attr { @@ -135,54 +136,67 @@ pub(crate) fn hover( sema.descend_into_macros_with_same_text(original_token.clone()) }; - // FIXME: Definition should include known lints and the like instead of having this special case here - let hovered_lint = descended.iter().find_map(|token| { - let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; - render::try_for_lint(&attr, token) - }); - if let Some(res) = hovered_lint { - return Some(RangeInfo::new(original_token.text_range(), res)); - } - + // try lint hover let result = descended .iter() - .filter_map(|token| { - let node = token.parent()?; - let class = IdentClass::classify_token(sema, token)?; - if let IdentClass::Operator(OperatorClass::Await(_)) = class { - // It's better for us to fall back to the keyword hover here, - // rendering poll is very confusing - return None; - } - Some(class.definitions().into_iter().zip(iter::once(node).cycle())) + .find_map(|token| { + // FIXME: Definition should include known lints and the like instead of having this special case here + let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; + render::try_for_lint(&attr, token) }) - .flatten() - .unique_by(|&(def, _)| def) - .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) - .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { - acc.actions.extend(actions); - acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup)); - acc - }); + // try item definitions + .or_else(|| { + descended + .iter() + .filter_map(|token| { + let node = token.parent()?; + let class = IdentClass::classify_token(sema, token)?; + if let IdentClass::Operator(OperatorClass::Await(_)) = class { + // It's better for us to fall back to the keyword hover here, + // rendering poll is very confusing + return None; + } + Some(class.definitions().into_iter().zip(iter::once(node).cycle())) + }) + .flatten() + .unique_by(|&(def, _)| def) + .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) + .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { + acc.actions.extend(actions); + acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); + acc + }) + }) + // try keywords + .or_else(|| descended.iter().find_map(|token| render::keyword(sema, config, token))) + // try rest item hover + .or_else(|| { + descended.iter().find_map(|token| { + if token.kind() != DOT2 { + return None; + } - if result.is_none() { - // fallbacks, show keywords or types + let rest_pat = token.parent().and_then(ast::RestPat::cast)?; + let record_pat_field_list = + rest_pat.syntax().parent().and_then(ast::RecordPatFieldList::cast)?; - let res = descended.iter().find_map(|token| render::keyword(sema, config, token)); - if let Some(res) = res { - return Some(RangeInfo::new(original_token.text_range(), res)); - } - let res = descended - .iter() - .find_map(|token| hover_type_fallback(sema, config, token, &original_token)); - if let Some(_) = res { - return res; - } - } - result.map(|mut res: HoverResult| { - res.actions = dedupe_or_merge_hover_actions(res.actions); - RangeInfo::new(original_token.text_range(), res) - }) + let record_pat = + record_pat_field_list.syntax().parent().and_then(ast::RecordPat::cast)?; + + Some(render::struct_rest_pat(sema, config, &record_pat)) + }) + }); + + result + .map(|mut res: HoverResult| { + res.actions = dedupe_or_merge_hover_actions(res.actions); + RangeInfo::new(original_token.text_range(), res) + }) + // fallback to type hover if there aren't any other suggestions + // this finds its own range instead of using the closest token's range + .or_else(|| { + descended.iter().find_map(|token| hover_type_fallback(sema, config, token, token)) + }) } pub(crate) fn hover_for_definition( @@ -269,6 +283,7 @@ fn hover_type_fallback( }; let res = render::type_info(sema, config, &expr_or_pat)?; + let range = sema .original_range_opt(&node) .map(|frange| frange.range) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index d109c0769194f..47257f0bfad05 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -2,7 +2,9 @@ use std::fmt::Display; use either::Either; -use hir::{AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo}; +use hir::{ + Adt, AsAssocItem, AttributeTemplate, HasAttrs, HasSource, HirDisplay, Semantics, TypeInfo, +}; use ide_db::{ base_db::SourceDatabase, defs::Definition, @@ -14,7 +16,9 @@ use ide_db::{ use itertools::Itertools; use stdx::format_to; use syntax::{ - algo, ast, match_ast, AstNode, Direction, + algo, + ast::{self, RecordPat}, + match_ast, AstNode, Direction, SyntaxKind::{LET_EXPR, LET_STMT}, SyntaxToken, T, }; @@ -250,6 +254,50 @@ pub(super) fn keyword( Some(HoverResult { markup, actions }) } +/// Returns missing types in a record pattern. +/// Only makes sense when there's a rest pattern in the record pattern. +/// i.e. `let S {a, ..} = S {a: 1, b: 2}` +pub(super) fn struct_rest_pat( + sema: &Semantics<'_, RootDatabase>, + config: &HoverConfig, + pattern: &RecordPat, +) -> HoverResult { + let missing_fields = sema.record_pattern_missing_fields(pattern); + + // if there are no missing fields, the end result is a hover that shows ".." + // should be left in to indicate that there are no more fields in the pattern + // example, S {a: 1, b: 2, ..} when struct S {a: u32, b: u32} + + let mut res = HoverResult::default(); + let mut targets: Vec = Vec::new(); + let mut push_new_def = |item: hir::ModuleDef| { + if !targets.contains(&item) { + targets.push(item); + } + }; + for (_, t) in &missing_fields { + walk_and_push_ty(sema.db, t, &mut push_new_def); + } + + res.markup = { + let mut s = String::from(".., "); + for (f, _) in &missing_fields { + s += f.display(sema.db).to_string().as_ref(); + s += ", "; + } + // get rid of trailing comma + s.truncate(s.len() - 2); + + if config.markdown() { + Markup::fenced_block(&s) + } else { + s.into() + } + }; + res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); + res +} + pub(super) fn try_for_lint(attr: &ast::Attr, token: &SyntaxToken) -> Option { let (path, tt) = attr.as_simple_call()?; if !tt.syntax().text_range().contains(token.text_range().start()) { @@ -342,15 +390,35 @@ pub(super) fn definition( let mod_path = definition_mod_path(db, &def); let (label, docs) = match def { Definition::Macro(it) => label_and_docs(db, it), - Definition::Field(it) => label_and_docs(db, it), + Definition::Field(it) => label_and_layout_info_and_docs(db, it, |&it| { + let var_def = it.parent_def(db); + let id = it.index(); + let layout = it.layout(db).ok()?; + let offset = match var_def { + hir::VariantDef::Struct(s) => Adt::from(s) + .layout(db) + .ok() + .map(|layout| format!(", offset = {}", layout.fields.offset(id).bytes())), + _ => None, + }; + Some(format!( + "size = {}, align = {}{}", + layout.size.bytes(), + layout.align.abi.bytes(), + offset.as_deref().unwrap_or_default() + )) + }), Definition::Module(it) => label_and_docs(db, it), Definition::Function(it) => label_and_docs(db, it), - Definition::Adt(it) => label_and_docs(db, it), + Definition::Adt(it) => label_and_layout_info_and_docs(db, it, |&it| { + let layout = it.layout(db).ok()?; + Some(format!("size = {}, align = {}", layout.size.bytes(), layout.align.abi.bytes())) + }), Definition::Variant(it) => label_value_and_docs(db, it, |&it| { if !it.parent_enum(db).is_data_carrying(db) { match it.eval(db) { - Ok(x) => Some(format!("{}", x)), - Err(_) => it.value(db).map(|x| format!("{:?}", x)), + Ok(x) => Some(format!("{x}")), + Err(_) => it.value(db).map(|x| format!("{x:?}")), } } else { None @@ -359,7 +427,7 @@ pub(super) fn definition( Definition::Const(it) => label_value_and_docs(db, it, |it| { let body = it.eval(db); match body { - Ok(x) => Some(format!("{}", x)), + Ok(x) => Some(format!("{x}")), Err(_) => { let source = it.source(db)?; let mut body = source.value.body()?.syntax().clone(); @@ -415,7 +483,7 @@ pub(super) fn definition( fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option { let name = attr.name(db); - let desc = format!("#[{}]", name); + let desc = format!("#[{name}]"); let AttributeTemplate { word, list, name_value_str } = match attr.template(db) { Some(template) => template, @@ -443,6 +511,25 @@ where (label, docs) } +fn label_and_layout_info_and_docs( + db: &RootDatabase, + def: D, + value_extractor: E, +) -> (String, Option) +where + D: HasAttrs + HirDisplay, + E: Fn(&D) -> Option, + V: Display, +{ + let label = if let Some(value) = value_extractor(&def) { + format!("{} // {value}", def.display(db)) + } else { + def.display(db).to_string() + }; + let docs = def.attrs(db).docs(); + (label, docs) +} + fn label_value_and_docs( db: &RootDatabase, def: D, @@ -454,7 +541,7 @@ where V: Display, { let label = if let Some(value) = value_extractor(&def) { - format!("{} = {}", def.display(db), value) + format!("{} = {value}", def.display(db)) } else { def.display(db).to_string() }; @@ -518,9 +605,9 @@ fn local(db: &RootDatabase, it: hir::Local) -> Option { } else { "" }; - format!("{}{}{}: {}", let_kw, is_mut, name, ty) + format!("{let_kw}{is_mut}{name}: {ty}") } - Either::Right(_) => format!("{}self: {}", is_mut, ty), + Either::Right(_) => format!("{is_mut}self: {ty}"), }; markup(None, desc, None) } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index eb997e6fef830..c7f241f2fea64 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -37,7 +37,7 @@ fn check(ra_fixture: &str, expect: Expect) { let content = analysis.db.file_text(position.file_id); let hovered_element = &content[hover.range]; - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) } @@ -58,7 +58,7 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) { let content = analysis.db.file_text(position.file_id); let hovered_element = &content[hover.range]; - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) } @@ -79,7 +79,7 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) { let content = analysis.db.file_text(position.file_id); let hovered_element = &content[hover.range]; - let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup); + let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup); expect.assert_eq(&actual) } @@ -522,6 +522,27 @@ fn main() { } ); } +#[test] +fn hover_field_offset() { + // Hovering over the field when instantiating + check( + r#" +struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 } +"#, + expect![[r#" + *field_a* + + ```rust + test::Foo + ``` + + ```rust + field_a: u8 // size = 1, align = 1, offset = 4 + ``` + "#]], + ); +} + #[test] fn hover_shows_struct_field_info() { // Hovering over the field when instantiating @@ -534,16 +555,16 @@ fn main() { } "#, expect![[r#" - *field_a* + *field_a* - ```rust - test::Foo - ``` + ```rust + test::Foo + ``` - ```rust - field_a: u32 - ``` - "#]], + ```rust + field_a: u32 // size = 4, align = 4, offset = 0 + ``` + "#]], ); // Hovering over the field in the definition @@ -556,16 +577,16 @@ fn main() { } "#, expect![[r#" - *field_a* + *field_a* - ```rust - test::Foo - ``` + ```rust + test::Foo + ``` - ```rust - field_a: u32 - ``` - "#]], + ```rust + field_a: u32 // size = 4, align = 4, offset = 0 + ``` + "#]], ); } @@ -1508,30 +1529,30 @@ struct Bar; fn foo() { let bar = Ba$0r; } "#, - expect![[r##" - *Bar* + expect![[r#" + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - struct Bar - ``` + ```rust + struct Bar // size = 0, align = 1 + ``` - --- + --- - This is an example - multiline doc + This is an example + multiline doc - # Example + # Example - ``` - let five = 5; + ``` + let five = 5; - assert_eq!(6, my_crate::add_one(5)); - ``` - "##]], + assert_eq!(6, my_crate::add_one(5)); + ``` + "#]], ); } @@ -1545,20 +1566,20 @@ struct Bar; fn foo() { let bar = Ba$0r; } "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - struct Bar - ``` + ```rust + struct Bar // size = 0, align = 1 + ``` - --- + --- - bar docs - "#]], + bar docs + "#]], ); } @@ -1574,22 +1595,22 @@ struct Bar; fn foo() { let bar = Ba$0r; } "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - struct Bar - ``` + ```rust + struct Bar // size = 0, align = 1 + ``` - --- + --- - bar docs 0 - bar docs 1 - bar docs 2 - "#]], + bar docs 0 + bar docs 1 + bar docs 2 + "#]], ); } @@ -1602,20 +1623,20 @@ pub struct Foo; pub struct B$0ar "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - pub struct Bar - ``` + ```rust + pub struct Bar // size = 0, align = 1 + ``` - --- + --- - [external](https://www.google.com) - "#]], + [external](https://www.google.com) + "#]], ); } @@ -1629,20 +1650,20 @@ pub struct Foo; pub struct B$0ar "#, expect![[r#" - *Bar* + *Bar* - ```rust - test - ``` + ```rust + test + ``` - ```rust - pub struct Bar - ``` + ```rust + pub struct Bar // size = 0, align = 1 + ``` - --- + --- - [baz](Baz) - "#]], + [baz](Baz) + "#]], ); } @@ -2960,7 +2981,7 @@ fn main() { ``` ```rust - f: i32 + f: i32 // size = 4, align = 4, offset = 0 ``` "#]], ); @@ -3636,6 +3657,163 @@ enum E { #[test] fn hover_const_eval() { + check( + r#" +trait T { + const B: bool = false; +} +impl T for <()> { + /// true + const B: bool = true; +} +fn main() { + <()>::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = true + ``` + + --- + + true + "#]], + ); + + check( + r#" +struct A { + i: i32 +}; + +trait T { + const AA: A = A { + i: 1 + }; +} +impl T for i32 { + const AA: A = A { + i: 2 + } +} +fn main() { + ::AA$0; +} +"#, + expect![[r#" + *AA* + + ```rust + test + ``` + + ```rust + const AA: A = A { + i: 2 + } + ``` + "#]], + ); + + check( + r#" +trait T { + /// false + const B: bool = false; +} +impl T for () { + /// true + const B: bool = true; +} +fn main() { + T::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = false + ``` + + --- + + false + "#]], + ); + + check( + r#" +trait T { + /// false + const B: bool = false; +} +impl T for () { +} +fn main() { + <()>::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = false + ``` + + --- + + false + "#]], + ); + + check( + r#" +trait T { + /// false + const B: bool = false; +} +impl T for () { + /// true + const B: bool = true; +} +impl T for i32 {} +fn main() { + ::B$0; +} +"#, + expect![[r#" + *B* + + ```rust + test + ``` + + ```rust + const B: bool = false + ``` + + --- + + false + "#]], + ); + // show hex for <10 check( r#" @@ -3901,6 +4079,37 @@ const FOO$0: f64 = 1.0f64; ); } +#[test] +fn hover_const_eval_in_generic_trait() { + // Doesn't compile, but we shouldn't crash. + check( + r#" +trait Trait { + const FOO: bool = false; +} +struct S(T); +impl Trait for S { + const FOO: bool = true; +} + +fn test() { + S::FOO$0; +} +"#, + expect![[r#" + *FOO* + + ```rust + test + ``` + + ```rust + const FOO: bool = true + ``` + "#]], + ); +} + #[test] fn hover_const_pat() { check( @@ -4203,20 +4412,20 @@ pub fn gimme() -> theitem::TheItem { } "#, expect![[r#" - *[`TheItem`]* + *[`TheItem`]* - ```rust - test::theitem - ``` + ```rust + test::theitem + ``` - ```rust - pub struct TheItem - ``` + ```rust + pub struct TheItem // size = 0, align = 1 + ``` - --- + --- - This is the item. Cool! - "#]], + This is the item. Cool! + "#]], ); } @@ -4351,20 +4560,20 @@ mod string { } "#, expect![[r#" - *String* + *String* - ```rust - main - ``` + ```rust + main + ``` - ```rust - struct String - ``` + ```rust + struct String // size = 0, align = 1 + ``` - --- + --- - Custom `String` type. - "#]], + Custom `String` type. + "#]], ) } @@ -5025,7 +5234,7 @@ foo_macro!( ``` ```rust - pub struct Foo + pub struct Foo // size = 0, align = 1 ``` --- @@ -5040,7 +5249,7 @@ fn hover_intra_in_attr() { check( r#" #[doc = "Doc comment for [`Foo$0`]"] -pub struct Foo; +pub struct Foo(i32); "#, expect![[r#" *[`Foo`]* @@ -5050,7 +5259,7 @@ pub struct Foo; ``` ```rust - pub struct Foo + pub struct Foo // size = 4, align = 4 ``` --- @@ -5155,6 +5364,28 @@ enum Enum { ); } +#[test] +fn hover_record_variant_field() { + check( + r#" +enum Enum { + RecordV { field$0: u32 } +} +"#, + expect![[r#" + *field* + + ```rust + test::RecordV + ``` + + ```rust + field: u32 // size = 4, align = 4 + ``` + "#]], + ); +} + #[test] fn hover_trait_impl_assoc_item_def_doc_forwarding() { check( @@ -5307,3 +5538,38 @@ fn main() { $0V; } "#]], ); } + +#[test] +fn hover_rest_pat() { + check( + r#" +struct Struct {a: u32, b: u32, c: u8, d: u16}; + +fn main() { + let Struct {a, c, .$0.} = Struct {a: 1, b: 2, c: 3, d: 4}; +} +"#, + expect![[r#" + *..* + ```rust + .., b: u32, d: u16 + ``` + "#]], + ); + + check( + r#" +struct Struct {a: u32, b: u32, c: u8, d: u16}; + +fn main() { + let Struct {a, b, c, d, .$0.} = Struct {a: 1, b: 2, c: 3, d: 4}; +} +"#, + expect![[r#" + *..* + ```rust + .. + ``` + "#]], + ); +} diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 37384c4e7e075..48a7bbfecffa0 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -1,31 +1,42 @@ -use std::fmt; +use std::{ + fmt::{self, Write}, + mem::take, +}; use either::Either; -use hir::{ - known, Adjust, AutoBorrow, Callable, HasVisibility, HirDisplay, Mutability, OverloadedDeref, - PointerCast, Safety, Semantics, TypeInfo, -}; -use ide_db::{ - base_db::FileRange, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap, - RootDatabase, -}; +use hir::{known, HasVisibility, HirDisplay, HirWrite, ModuleDef, ModuleDefId, Semantics}; +use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; -use stdx::to_lower_snake_case; +use stdx::never; use syntax::{ - ast::{self, AstNode, HasArgList, HasGenericParams, HasName, UnaryOp}, - match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, - TextSize, T, + ast::{self, AstNode}, + match_ast, NodeOrToken, SyntaxNode, TextRange, TextSize, }; -use crate::FileId; +use crate::{navigation_target::TryToNav, FileId}; + +mod closing_brace; +mod implicit_static; +mod fn_lifetime_fn; +mod closure_ret; +mod adjustment; +mod chaining; +mod param_name; +mod binding_mode; +mod bind_pat; +mod discriminant; #[derive(Clone, Debug, PartialEq, Eq)] pub struct InlayHintsConfig { + pub location_links: bool, pub render_colons: bool, pub type_hints: bool, + pub discriminant_hints: DiscriminantHints, pub parameter_hints: bool, pub chaining_hints: bool, pub adjustment_hints: AdjustmentHints, + pub adjustment_hints_mode: AdjustmentHintsMode, + pub adjustment_hints_hide_outside_unsafe: bool, pub closure_return_type_hints: ClosureReturnTypeHints, pub binding_mode_hints: bool, pub lifetime_elision_hints: LifetimeElisionHints, @@ -43,6 +54,13 @@ pub enum ClosureReturnTypeHints { Never, } +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum DiscriminantHints { + Always, + Never, + Fieldless, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum LifetimeElisionHints { Always, @@ -57,6 +75,14 @@ pub enum AdjustmentHints { Never, } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum AdjustmentHintsMode { + Prefix, + Postfix, + PreferPrefix, + PreferPostfix, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum InlayKind { BindingModeHint, @@ -65,10 +91,13 @@ pub enum InlayKind { ClosureReturnTypeHint, GenericParamListHint, AdjustmentHint, - AdjustmentHintClosingParenthesis, + AdjustmentHintPostfix, LifetimeHint, ParameterHint, TypeHint, + DiscriminantHint, + OpeningParenthesis, + ClosingParenthesis, } #[derive(Debug)] @@ -86,6 +115,7 @@ pub enum InlayTooltip { HoverOffset(FileId, TextSize), } +#[derive(Default)] pub struct InlayHintLabel { pub parts: Vec, } @@ -169,6 +199,101 @@ impl fmt::Debug for InlayHintLabelPart { } } +#[derive(Debug)] +struct InlayHintLabelBuilder<'a> { + db: &'a RootDatabase, + result: InlayHintLabel, + last_part: String, + location_link_enabled: bool, + location: Option, +} + +impl fmt::Write for InlayHintLabelBuilder<'_> { + fn write_str(&mut self, s: &str) -> fmt::Result { + self.last_part.write_str(s) + } +} + +impl HirWrite for InlayHintLabelBuilder<'_> { + fn start_location_link(&mut self, def: ModuleDefId) { + if !self.location_link_enabled { + return; + } + if self.location.is_some() { + never!("location link is already started"); + } + self.make_new_part(); + let Some(location) = ModuleDef::from(def).try_to_nav(self.db) else { return }; + let location = + FileRange { file_id: location.file_id, range: location.focus_or_full_range() }; + self.location = Some(location); + } + + fn end_location_link(&mut self) { + if !self.location_link_enabled { + return; + } + self.make_new_part(); + } +} + +impl InlayHintLabelBuilder<'_> { + fn make_new_part(&mut self) { + self.result.parts.push(InlayHintLabelPart { + text: take(&mut self.last_part), + linked_location: self.location.take(), + }); + } + + fn finish(mut self) -> InlayHintLabel { + self.make_new_part(); + self.result + } +} + +fn label_of_ty( + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + ty: hir::Type, +) -> Option { + fn rec( + sema: &Semantics<'_, RootDatabase>, + famous_defs: &FamousDefs<'_, '_>, + mut max_length: Option, + ty: hir::Type, + label_builder: &mut InlayHintLabelBuilder<'_>, + ) { + let iter_item_type = hint_iterator(sema, famous_defs, &ty); + match iter_item_type { + Some(ty) => { + const LABEL_START: &str = "impl Iterator { + let _ = ty.display_truncated(sema.db, max_length).write_to(label_builder); + } + }; + } + + let mut label_builder = InlayHintLabelBuilder { + db: sema.db, + last_part: String::new(), + location: None, + location_link_enabled: config.location_links, + result: InlayHintLabel::default(), + }; + rec(sema, famous_defs, config.max_length, ty, &mut label_builder); + let r = label_builder.finish(); + Some(r) +} + // Feature: Inlay Hints // // rust-analyzer shows additional information inline with the source code. @@ -200,7 +325,7 @@ pub(crate) fn inlay_hints( let mut acc = Vec::new(); - if let Some(scope) = sema.scope(&file) { + if let Some(scope) = sema.scope(file) { let famous_defs = FamousDefs(&sema, scope.krate()); let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node); @@ -226,18 +351,18 @@ fn hints( file_id: FileId, node: SyntaxNode, ) { - closing_brace_hints(hints, sema, config, file_id, node.clone()); + closing_brace::hints(hints, sema, config, file_id, node.clone()); match_ast! { match node { ast::Expr(expr) => { - chaining_hints(hints, sema, &famous_defs, config, file_id, &expr); - adjustment_hints(hints, sema, config, &expr); + chaining::hints(hints, famous_defs, config, file_id, &expr); + adjustment::hints(hints, sema, config, &expr); match expr { - ast::Expr::CallExpr(it) => param_name_hints(hints, sema, config, ast::Expr::from(it)), + ast::Expr::CallExpr(it) => param_name::hints(hints, sema, config, ast::Expr::from(it)), ast::Expr::MethodCallExpr(it) => { - param_name_hints(hints, sema, config, ast::Expr::from(it)) + param_name::hints(hints, sema, config, ast::Expr::from(it)) } - ast::Expr::ClosureExpr(it) => closure_ret_hints(hints, sema, &famous_defs, config, file_id, it), + ast::Expr::ClosureExpr(it) => closure_ret::hints(hints, famous_defs, config, file_id, it), // We could show reborrows for all expressions, but usually that is just noise to the user // and the main point here is to show why "moving" a mutable reference doesn't necessarily move it // ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr), @@ -245,21 +370,24 @@ fn hints( } }, ast::Pat(it) => { - binding_mode_hints(hints, sema, config, &it); + binding_mode::hints(hints, sema, config, &it); if let ast::Pat::IdentPat(it) = it { - bind_pat_hints(hints, sema, config, file_id, &it); + bind_pat::hints(hints, famous_defs, config, file_id, &it); } Some(()) }, ast::Item(it) => match it { // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints ast::Item::Impl(_) => None, - ast::Item::Fn(it) => fn_lifetime_fn_hints(hints, config, it), + ast::Item::Fn(it) => fn_lifetime_fn::hints(hints, config, it), // static type elisions - ast::Item::Static(it) => implicit_static_hints(hints, config, Either::Left(it)), - ast::Item::Const(it) => implicit_static_hints(hints, config, Either::Right(it)), + ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)), + ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)), _ => None, }, + ast::Variant(v) => { + discriminant::hints(hints, famous_defs, config, file_id, &v) + }, // FIXME: fn-ptr type, dyn fn type, and trait object type elisions ast::Type(_) => None, _ => None, @@ -267,733 +395,12 @@ fn hints( }; } -fn closing_brace_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - file_id: FileId, - node: SyntaxNode, -) -> Option<()> { - let min_lines = config.closing_brace_hints_min_lines?; - - let name = |it: ast::Name| it.syntax().text_range(); - - let mut closing_token; - let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) { - closing_token = item_list.r_curly_token()?; - - let parent = item_list.syntax().parent()?; - match_ast! { - match parent { - ast::Impl(imp) => { - let imp = sema.to_def(&imp)?; - let ty = imp.self_ty(sema.db); - let trait_ = imp.trait_(sema.db); - let hint_text = match trait_ { - Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)), - None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)), - }; - (hint_text, None) - }, - ast::Trait(tr) => { - (format!("trait {}", tr.name()?), tr.name().map(name)) - }, - _ => return None, - } - } - } else if let Some(list) = ast::ItemList::cast(node.clone()) { - closing_token = list.r_curly_token()?; - - let module = ast::Module::cast(list.syntax().parent()?)?; - (format!("mod {}", module.name()?), module.name().map(name)) - } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { - closing_token = block.stmt_list()?.r_curly_token()?; - - let parent = block.syntax().parent()?; - match_ast! { - match parent { - ast::Fn(it) => { - // FIXME: this could include parameters, but `HirDisplay` prints too much info - // and doesn't respect the max length either, so the hints end up way too long - (format!("fn {}", it.name()?), it.name().map(name)) - }, - ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)), - ast::Const(it) => { - if it.underscore_token().is_some() { - ("const _".into(), None) - } else { - (format!("const {}", it.name()?), it.name().map(name)) - } - }, - _ => return None, - } - } - } else if let Some(mac) = ast::MacroCall::cast(node.clone()) { - let last_token = mac.syntax().last_token()?; - if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY { - return None; - } - closing_token = last_token; - - ( - format!("{}!", mac.path()?), - mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range()), - ) - } else { - return None; - }; - - if let Some(mut next) = closing_token.next_token() { - if next.kind() == T![;] { - if let Some(tok) = next.next_token() { - closing_token = next; - next = tok; - } - } - if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) { - // Only display the hint if the `}` is the last token on the line - return None; - } - } - - let mut lines = 1; - node.text().for_each_chunk(|s| lines += s.matches('\n').count()); - if lines < min_lines { - return None; - } - - let linked_location = name_range.map(|range| FileRange { file_id, range }); - acc.push(InlayHint { - range: closing_token.text_range(), - kind: InlayKind::ClosingBraceHint, - label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] }, - tooltip: None, // provided by label part location - }); - - None -} - -fn implicit_static_hints( - acc: &mut Vec, - config: &InlayHintsConfig, - statik_or_const: Either, -) -> Option<()> { - if config.lifetime_elision_hints != LifetimeElisionHints::Always { - return None; - } - - if let Either::Right(it) = &statik_or_const { - if ast::AssocItemList::can_cast( - it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()), - ) { - return None; - } - } - - if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) { - if ty.lifetime().is_none() { - let t = ty.amp_token()?; - acc.push(InlayHint { - range: t.text_range(), - kind: InlayKind::LifetimeHint, - label: "'static".to_owned().into(), - tooltip: Some(InlayTooltip::String("Elided static lifetime".into())), - }); - } - } - - Some(()) -} - -fn fn_lifetime_fn_hints( - acc: &mut Vec, - config: &InlayHintsConfig, - func: ast::Fn, -) -> Option<()> { - if config.lifetime_elision_hints == LifetimeElisionHints::Never { - return None; - } - - let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { - range: t.text_range(), - kind: InlayKind::LifetimeHint, - label: label.into(), - tooltip: Some(InlayTooltip::String("Elided lifetime".into())), - }; - - let param_list = func.param_list()?; - let generic_param_list = func.generic_param_list(); - let ret_type = func.ret_type(); - let self_param = param_list.self_param().filter(|it| it.amp_token().is_some()); - - let is_elided = |lt: &Option| match lt { - Some(lt) => matches!(lt.text().as_str(), "'_"), - None => true, - }; - - let potential_lt_refs = { - let mut acc: Vec<_> = vec![]; - if let Some(self_param) = &self_param { - let lifetime = self_param.lifetime(); - let is_elided = is_elided(&lifetime); - acc.push((None, self_param.amp_token(), lifetime, is_elided)); - } - param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| { - // FIXME: check path types - walk_ty(&ty, &mut |ty| match ty { - ast::Type::RefType(r) => { - let lifetime = r.lifetime(); - let is_elided = is_elided(&lifetime); - acc.push(( - pat.as_ref().and_then(|it| match it { - ast::Pat::IdentPat(p) => p.name(), - _ => None, - }), - r.amp_token(), - lifetime, - is_elided, - )) - } - _ => (), - }) - }); - acc - }; - - // allocate names - let mut gen_idx_name = { - let mut gen = (0u8..).map(|idx| match idx { - idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]), - idx => format!("'{idx}").into(), - }); - move || gen.next().unwrap_or_default() - }; - let mut allocated_lifetimes = vec![]; - - let mut used_names: FxHashMap = - match config.param_names_for_lifetime_elision_hints { - true => generic_param_list - .iter() - .flat_map(|gpl| gpl.lifetime_params()) - .filter_map(|param| param.lifetime()) - .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0))) - .collect(), - false => Default::default(), - }; - { - let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided); - if let Some(_) = &self_param { - if let Some(_) = potential_lt_refs.next() { - allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { - // self can't be used as a lifetime, so no need to check for collisions - "'self".into() - } else { - gen_idx_name() - }); - } - } - potential_lt_refs.for_each(|(name, ..)| { - let name = match name { - Some(it) if config.param_names_for_lifetime_elision_hints => { - if let Some(c) = used_names.get_mut(it.text().as_str()) { - *c += 1; - SmolStr::from(format!("'{text}{c}", text = it.text().as_str())) - } else { - used_names.insert(it.text().as_str().into(), 0); - SmolStr::from_iter(["\'", it.text().as_str()]) - } - } - _ => gen_idx_name(), - }; - allocated_lifetimes.push(name); - }); - } - - // fetch output lifetime if elision rule applies - let output = match potential_lt_refs.as_slice() { - [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => { - match lifetime { - Some(lt) => match lt.text().as_str() { - "'_" => allocated_lifetimes.get(0).cloned(), - "'static" => None, - name => Some(name.into()), - }, - None => allocated_lifetimes.get(0).cloned(), - } - } - [..] => None, - }; - - if allocated_lifetimes.is_empty() && output.is_none() { - return None; - } - - // apply hints - // apply output if required - let mut is_trivial = true; - if let (Some(output_lt), Some(r)) = (&output, ret_type) { - if let Some(ty) = r.ty() { - walk_ty(&ty, &mut |ty| match ty { - ast::Type::RefType(ty) if ty.lifetime().is_none() => { - if let Some(amp) = ty.amp_token() { - is_trivial = false; - acc.push(mk_lt_hint(amp, output_lt.to_string())); - } - } - _ => (), - }) - } - } - - if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial { - return None; - } - - let mut a = allocated_lifetimes.iter(); - for (_, amp_token, _, is_elided) in potential_lt_refs { - if is_elided { - let t = amp_token?; - let lt = a.next()?; - acc.push(mk_lt_hint(t, lt.to_string())); - } - } - - // generate generic param list things - match (generic_param_list, allocated_lifetimes.as_slice()) { - (_, []) => (), - (Some(gpl), allocated_lifetimes) => { - let angle_tok = gpl.l_angle_token()?; - let is_empty = gpl.generic_params().next().is_none(); - acc.push(InlayHint { - range: angle_tok.text_range(), - kind: InlayKind::LifetimeHint, - label: format!( - "{}{}", - allocated_lifetimes.iter().format(", "), - if is_empty { "" } else { ", " } - ) - .into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), - }); - } - (None, allocated_lifetimes) => acc.push(InlayHint { - range: func.name()?.syntax().text_range(), - kind: InlayKind::GenericParamListHint, - label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), - }), - } - Some(()) -} - -fn closure_ret_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - famous_defs: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, - file_id: FileId, - closure: ast::ClosureExpr, -) -> Option<()> { - if config.closure_return_type_hints == ClosureReturnTypeHints::Never { - return None; - } - - if closure.ret_type().is_some() { - return None; - } - - if !closure_has_block_body(&closure) - && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock - { - return None; - } - - let param_list = closure.param_list()?; - - let closure = sema.descend_node_into_attributes(closure.clone()).pop()?; - let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted(); - let callable = ty.as_callable(sema.db)?; - let ty = callable.return_type(); - if ty.is_unit() { - return None; - } - acc.push(InlayHint { - range: param_list.syntax().text_range(), - kind: InlayKind::ClosureReturnTypeHint, - label: hint_iterator(sema, &famous_defs, config, &ty) - .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()) - .into(), - tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())), - }); - Some(()) -} - -fn adjustment_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - expr: &ast::Expr, -) -> Option<()> { - if config.adjustment_hints == AdjustmentHints::Never { - return None; - } - - if let ast::Expr::ParenExpr(_) = expr { - // These inherit from the inner expression which would result in duplicate hints - return None; - } - - let parent = expr.syntax().parent().and_then(ast::Expr::cast); - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let desc_expr = descended.as_ref().unwrap_or(expr); - let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; - let needs_parens = match parent { - Some(parent) => { - match parent { - ast::Expr::AwaitExpr(_) - | ast::Expr::CallExpr(_) - | ast::Expr::CastExpr(_) - | ast::Expr::FieldExpr(_) - | ast::Expr::MethodCallExpr(_) - | ast::Expr::TryExpr(_) => true, - // FIXME: shorthands need special casing, though not sure if adjustments are even valid there - ast::Expr::RecordExpr(_) => false, - ast::Expr::IndexExpr(index) => index.base().as_ref() == Some(expr), - _ => false, - } - } - None => false, - }; - if needs_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHint, - label: "(".into(), - tooltip: None, - }); - } - for adjustment in adjustments.into_iter().rev() { - // FIXME: Add some nicer tooltips to each of these - let text = match adjustment { - Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => { - "" - } - Adjust::Deref(None) => "*", - Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => "*", - Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => "*", - Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => "&", - Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => "&mut ", - Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => "&raw const ", - Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => "&raw mut ", - // some of these could be represented via `as` casts, but that's not too nice and - // handling everything as a prefix expr makes the `(` and `)` insertion easier - Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => { - match cast { - PointerCast::ReifyFnPointer => "", - PointerCast::UnsafeFnPointer => "", - PointerCast::ClosureFnPointer(Safety::Unsafe) => { - "" - } - PointerCast::ClosureFnPointer(Safety::Safe) => "", - PointerCast::MutToConstPointer => "", - PointerCast::ArrayToPointer => "", - PointerCast::Unsize => "", - } - } - _ => continue, - }; - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHint, - label: text.into(), - tooltip: None, - }); - } - if needs_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::AdjustmentHintClosingParenthesis, - label: ")".into(), - tooltip: None, - }); - } - Some(()) -} - -fn chaining_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - famous_defs: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, - file_id: FileId, - expr: &ast::Expr, -) -> Option<()> { - if !config.chaining_hints { - return None; - } - - if matches!(expr, ast::Expr::RecordExpr(_)) { - return None; - } - - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let desc_expr = descended.as_ref().unwrap_or(expr); - - let mut tokens = expr - .syntax() - .siblings_with_tokens(Direction::Next) - .filter_map(NodeOrToken::into_token) - .filter(|t| match t.kind() { - SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, - SyntaxKind::COMMENT => false, - _ => true, - }); - - // Chaining can be defined as an expression whose next sibling tokens are newline and dot - // Ignoring extra whitespace and comments - let next = tokens.next()?.kind(); - if next == SyntaxKind::WHITESPACE { - let mut next_next = tokens.next()?.kind(); - while next_next == SyntaxKind::WHITESPACE { - next_next = tokens.next()?.kind(); - } - if next_next == T![.] { - let ty = sema.type_of_expr(desc_expr)?.original; - if ty.is_unknown() { - return None; - } - if matches!(expr, ast::Expr::PathExpr(_)) { - if let Some(hir::Adt::Struct(st)) = ty.as_adt() { - if st.fields(sema.db).is_empty() { - return None; - } - } - } - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ChainingHint, - label: hint_iterator(sema, &famous_defs, config, &ty) - .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()) - .into(), - tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())), - }); - } - } - Some(()) -} - -fn param_name_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - expr: ast::Expr, -) -> Option<()> { - if !config.parameter_hints { - return None; - } - - let (callable, arg_list) = get_callable(sema, &expr)?; - let hints = callable - .params(sema.db) - .into_iter() - .zip(arg_list.args()) - .filter_map(|((param, _ty), arg)| { - // Only annotate hints for expressions that exist in the original file - let range = sema.original_range_opt(arg.syntax())?; - let (param_name, name_syntax) = match param.as_ref()? { - Either::Left(pat) => ("self".to_string(), pat.name()), - Either::Right(pat) => match pat { - ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()), - _ => return None, - }, - }; - Some((name_syntax, param_name, arg, range)) - }) - .filter(|(_, param_name, arg, _)| { - !should_hide_param_name_hint(sema, &callable, param_name, arg) - }) - .map(|(param, param_name, _, FileRange { range, .. })| { - let mut tooltip = None; - if let Some(name) = param { - if let hir::CallableKind::Function(f) = callable.kind() { - // assert the file is cached so we can map out of macros - if let Some(_) = sema.source(f) { - tooltip = sema.original_range_opt(name.syntax()); - } - } - } - - InlayHint { - range, - kind: InlayKind::ParameterHint, - label: param_name.into(), - tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())), - } - }); - - acc.extend(hints); - Some(()) -} - -fn binding_mode_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - pat: &ast::Pat, -) -> Option<()> { - if !config.binding_mode_hints { - return None; - } - - let range = pat.syntax().text_range(); - sema.pattern_adjustments(&pat).iter().for_each(|ty| { - let reference = ty.is_reference(); - let mut_reference = ty.is_mutable_reference(); - let r = match (reference, mut_reference) { - (true, true) => "&mut", - (true, false) => "&", - _ => return, - }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingModeHint, - label: r.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), - }); - }); - match pat { - ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { - let bm = sema.binding_mode_of_pat(pat)?; - let bm = match bm { - hir::BindingMode::Move => return None, - hir::BindingMode::Ref(Mutability::Mut) => "ref mut", - hir::BindingMode::Ref(Mutability::Shared) => "ref", - }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingModeHint, - label: bm.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), - }); - } - _ => (), - } - - Some(()) -} - -fn bind_pat_hints( - acc: &mut Vec, - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - file_id: FileId, - pat: &ast::IdentPat, -) -> Option<()> { - if !config.type_hints { - return None; - } - - let descended = sema.descend_node_into_attributes(pat.clone()).pop(); - let desc_pat = descended.as_ref().unwrap_or(pat); - let ty = sema.type_of_pat(&desc_pat.clone().into())?.original; - - if should_not_display_type_hint(sema, config, pat, &ty) { - return None; - } - - let krate = sema.scope(desc_pat.syntax())?.krate(); - let famous_defs = FamousDefs(sema, krate); - let label = hint_iterator(sema, &famous_defs, config, &ty); - - let label = match label { - Some(label) => label, - None => { - let ty_name = ty.display_truncated(sema.db, config.max_length).to_string(); - if config.hide_named_constructor_hints - && is_named_constructor(sema, pat, &ty_name).is_some() - { - return None; - } - ty_name - } - }; - - acc.push(InlayHint { - range: match pat.name() { - Some(name) => name.syntax().text_range(), - None => pat.syntax().text_range(), - }, - kind: InlayKind::TypeHint, - label: label.into(), - tooltip: pat - .name() - .map(|it| it.syntax().text_range()) - .map(|it| InlayTooltip::HoverRanged(file_id, it)), - }); - - Some(()) -} - -fn is_named_constructor( - sema: &Semantics<'_, RootDatabase>, - pat: &ast::IdentPat, - ty_name: &str, -) -> Option<()> { - let let_node = pat.syntax().parent()?; - let expr = match_ast! { - match let_node { - ast::LetStmt(it) => it.initializer(), - ast::LetExpr(it) => it.expr(), - _ => None, - } - }?; - - let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr); - // unwrap postfix expressions - let expr = match expr { - ast::Expr::TryExpr(it) => it.expr(), - ast::Expr::AwaitExpr(it) => it.expr(), - expr => Some(expr), - }?; - let expr = match expr { - ast::Expr::CallExpr(call) => match call.expr()? { - ast::Expr::PathExpr(path) => path, - _ => return None, - }, - ast::Expr::PathExpr(path) => path, - _ => return None, - }; - let path = expr.path()?; - - let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db); - let callable_kind = callable.map(|it| it.kind()); - let qual_seg = match callable_kind { - Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => { - path.qualifier()?.segment() - } - _ => path.segment(), - }?; - - let ctor_name = match qual_seg.kind()? { - ast::PathSegmentKind::Name(name_ref) => { - match qual_seg.generic_arg_list().map(|it| it.generic_args()) { - Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")), - None => name_ref.to_string(), - } - } - ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(), - _ => return None, - }; - (ctor_name == ty_name).then(|| ()) -} - -/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator`. +/// Checks if the type is an Iterator from std::iter and returns its item type. fn hint_iterator( sema: &Semantics<'_, RootDatabase>, famous_defs: &FamousDefs<'_, '_>, - config: &InlayHintsConfig, ty: &hir::Type, -) -> Option { +) -> Option { let db = sema.db; let strukt = ty.strip_references().as_adt()?; let krate = strukt.module(db).krate(); @@ -1016,289 +423,32 @@ fn hint_iterator( _ => None, })?; if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) { - const LABEL_START: &str = "impl Iterator bool { - if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() { - let pat_text = bind_pat.to_string(); - enum_data - .variants(db) - .into_iter() - .map(|variant| variant.name(db).to_smol_str()) - .any(|enum_name| enum_name == pat_text) - } else { - false - } -} - -fn should_not_display_type_hint( - sema: &Semantics<'_, RootDatabase>, - config: &InlayHintsConfig, - bind_pat: &ast::IdentPat, - pat_ty: &hir::Type, -) -> bool { - let db = sema.db; - - if pat_ty.is_unknown() { - return true; - } - - if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() { - if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() { - return true; - } - } - - if config.hide_closure_initialization_hints { - if let Some(parent) = bind_pat.syntax().parent() { - if let Some(it) = ast::LetStmt::cast(parent.clone()) { - if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { - if closure_has_block_body(&closure) { - return true; - } - } - } - } - } - - for node in bind_pat.syntax().ancestors() { - match_ast! { - match node { - ast::LetStmt(it) => return it.ty().is_some(), - // FIXME: We might wanna show type hints in parameters for non-top level patterns as well - ast::Param(it) => return it.ty().is_some(), - ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), - ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), - ast::IfExpr(_) => return false, - ast::WhileExpr(_) => return false, - ast::ForExpr(it) => { - // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit). - // Type of expr should be iterable. - return it.in_token().is_none() || - it.iterable() - .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr)) - .map(TypeInfo::original) - .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit()) - }, - _ => (), - } - } - } - false -} - fn closure_has_block_body(closure: &ast::ClosureExpr) -> bool { matches!(closure.body(), Some(ast::Expr::BlockExpr(_))) } -fn should_hide_param_name_hint( - sema: &Semantics<'_, RootDatabase>, - callable: &hir::Callable, - param_name: &str, - argument: &ast::Expr, -) -> bool { - // These are to be tested in the `parameter_hint_heuristics` test - // hide when: - // - the parameter name is a suffix of the function's name - // - the argument is a qualified constructing or call expression where the qualifier is an ADT - // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix - // of argument with _ splitting it off - // - param starts with `ra_fixture` - // - param is a well known name in a unary function - - let param_name = param_name.trim_start_matches('_'); - if param_name.is_empty() { - return true; - } - - if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) { - return false; - } - - let fn_name = match callable.kind() { - hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()), - _ => None, - }; - let fn_name = fn_name.as_deref(); - is_param_name_suffix_of_fn_name(param_name, callable, fn_name) - || is_argument_similar_to_param_name(argument, param_name) - || param_name.starts_with("ra_fixture") - || (callable.n_params() == 1 && is_obvious_param(param_name)) - || is_adt_constructor_similar_to_param_name(sema, argument, param_name) -} - -fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { - // check whether param_name and argument are the same or - // whether param_name is a prefix/suffix of argument(split at `_`) - let argument = match get_string_representation(argument) { - Some(argument) => argument, - None => return false, - }; - - // std is honestly too panic happy... - let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at)); - - let param_name = param_name.trim_start_matches('_'); - let argument = argument.trim_start_matches('_'); - - match str_split_at(argument, param_name.len()) { - Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => { - return rest.is_empty() || rest.starts_with('_'); - } - _ => (), - } - match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) { - Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => { - return rest.is_empty() || rest.ends_with('_'); - } - _ => (), - } - false -} - -/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal. -/// -/// `fn strip_suffix(suffix)` will be hidden. -/// `fn stripsuffix(suffix)` will not be hidden. -fn is_param_name_suffix_of_fn_name( - param_name: &str, - callable: &Callable, - fn_name: Option<&str>, -) -> bool { - match (callable.n_params(), fn_name) { - (1, Some(function)) => { - function == param_name - || function - .len() - .checked_sub(param_name.len()) - .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at))) - .map_or(false, |(prefix, suffix)| { - suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_') - }) - } - _ => false, - } -} - -fn is_adt_constructor_similar_to_param_name( - sema: &Semantics<'_, RootDatabase>, - argument: &ast::Expr, - param_name: &str, -) -> bool { - let path = match argument { - ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e { - ast::Expr::PathExpr(p) => p.path(), - _ => None, - }), - ast::Expr::PathExpr(p) => p.path(), - ast::Expr::RecordExpr(r) => r.path(), - _ => return false, - }; - let path = match path { - Some(it) => it, - None => return false, - }; - (|| match sema.resolve_path(&path)? { - hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { - Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name) - } - hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => { - if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name { - return Some(true); - } - let qual = path.qualifier()?; - match sema.resolve_path(&qual)? { - hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { - Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name) - } - _ => None, - } - } - _ => None, - })() - .unwrap_or(false) -} - -fn get_string_representation(expr: &ast::Expr) -> Option { - match expr { - ast::Expr::MethodCallExpr(method_call_expr) => { - let name_ref = method_call_expr.name_ref()?; - match name_ref.text().as_str() { - "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()), - name_ref => Some(name_ref.to_owned()), - } - } - ast::Expr::MacroExpr(macro_expr) => { - Some(macro_expr.macro_call()?.path()?.segment()?.to_string()) - } - ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()), - ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()), - ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?), - ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), - ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?), - _ => None, - } -} - -fn is_obvious_param(param_name: &str) -> bool { - // avoid displaying hints for common functions like map, filter, etc. - // or other obvious words used in std - let is_obvious_param_name = - matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); - param_name.len() == 1 || is_obvious_param_name -} - -fn get_callable( - sema: &Semantics<'_, RootDatabase>, - expr: &ast::Expr, -) -> Option<(hir::Callable, ast::ArgList)> { - match expr { - ast::Expr::CallExpr(expr) => { - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let expr = descended.as_ref().unwrap_or(expr); - sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list()) - } - ast::Expr::MethodCallExpr(expr) => { - let descended = sema.descend_node_into_attributes(expr.clone()).pop(); - let expr = descended.as_ref().unwrap_or(expr); - sema.resolve_method_call_as_callable(expr).zip(expr.arg_list()) - } - _ => None, - } -} - #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; + use expect_test::Expect; use itertools::Itertools; - use syntax::{TextRange, TextSize}; use test_utils::extract_annotations; - use crate::inlay_hints::AdjustmentHints; + use crate::inlay_hints::{AdjustmentHints, AdjustmentHintsMode}; + use crate::DiscriminantHints; use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints}; use super::ClosureReturnTypeHints; - const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig { + pub(super) const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig { + location_links: false, + discriminant_hints: DiscriminantHints::Never, render_colons: false, type_hints: false, parameter_hints: false, @@ -1306,6 +456,8 @@ mod tests { lifetime_elision_hints: LifetimeElisionHints::Never, closure_return_type_hints: ClosureReturnTypeHints::Never, adjustment_hints: AdjustmentHints::Never, + adjustment_hints_mode: AdjustmentHintsMode::Prefix, + adjustment_hints_hide_outside_unsafe: false, binding_mode_hints: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, @@ -1313,43 +465,27 @@ mod tests { max_length: None, closing_brace_hints_min_lines: None, }; - const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { + pub(super) const DISABLED_CONFIG_WITH_LINKS: InlayHintsConfig = + InlayHintsConfig { location_links: true, ..DISABLED_CONFIG }; + pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig { type_hints: true, parameter_hints: true, chaining_hints: true, closure_return_type_hints: ClosureReturnTypeHints::WithBlock, binding_mode_hints: true, lifetime_elision_hints: LifetimeElisionHints::Always, - ..DISABLED_CONFIG + ..DISABLED_CONFIG_WITH_LINKS }; #[track_caller] - fn check(ra_fixture: &str) { + pub(super) fn check(ra_fixture: &str) { check_with_config(TEST_CONFIG, ra_fixture); } #[track_caller] - fn check_params(ra_fixture: &str) { - check_with_config( - InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG }, - ra_fixture, - ); - } - - #[track_caller] - fn check_types(ra_fixture: &str) { - check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture); - } - - #[track_caller] - fn check_chains(ra_fixture: &str) { - check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture); - } - - #[track_caller] - fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { + pub(super) fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) { let (analysis, file_id) = fixture::file(ra_fixture); - let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); + let mut expected = extract_annotations(&analysis.file_text(file_id).unwrap()); let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); let actual = inlay_hints .into_iter() @@ -1358,11 +494,11 @@ mod tests { .collect::>(); expected.sort_by_key(|(range, _)| range.start()); - assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); + assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}"); } #[track_caller] - fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { + pub(super) fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) { let (analysis, file_id) = fixture::file(ra_fixture); let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap(); expect.assert_debug_eq(&inlay_hints) @@ -1379,1720 +515,4 @@ fn main() { }"#, ); } - - // Parameter hint tests - - #[test] - fn param_hints_only() { - check_params( - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo( - 4, - //^ a - 4, - //^ b - ); -}"#, - ); - } - - #[test] - fn param_hints_on_closure() { - check_params( - r#" -fn main() { - let clo = |a: u8, b: u8| a + b; - clo( - 1, - //^ a - 2, - //^ b - ); -} - "#, - ); - } - - #[test] - fn param_name_similar_to_fn_name_still_hints() { - check_params( - r#" -fn max(x: i32, y: i32) -> i32 { x + y } -fn main() { - let _x = max( - 4, - //^ x - 4, - //^ y - ); -}"#, - ); - } - - #[test] - fn param_name_similar_to_fn_name() { - check_params( - r#" -fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore } -fn main() { - let _x = param_with_underscore( - 4, - ); -}"#, - ); - check_params( - r#" -fn param_with_underscore(underscore: i32) -> i32 { underscore } -fn main() { - let _x = param_with_underscore( - 4, - ); -}"#, - ); - } - - #[test] - fn param_name_same_as_fn_name() { - check_params( - r#" -fn foo(foo: i32) -> i32 { foo } -fn main() { - let _x = foo( - 4, - ); -}"#, - ); - } - - #[test] - fn never_hide_param_when_multiple_params() { - check_params( - r#" -fn foo(foo: i32, bar: i32) -> i32 { bar + baz } -fn main() { - let _x = foo( - 4, - //^ foo - 8, - //^ bar - ); -}"#, - ); - } - - #[test] - fn param_hints_look_through_as_ref_and_clone() { - check_params( - r#" -fn foo(bar: i32, baz: f32) {} - -fn main() { - let bar = 3; - let baz = &"baz"; - let fez = 1.0; - foo(bar.clone(), bar.clone()); - //^^^^^^^^^^^ baz - foo(bar.as_ref(), bar.as_ref()); - //^^^^^^^^^^^^ baz -} -"#, - ); - } - - #[test] - fn self_param_hints() { - check_params( - r#" -struct Foo; - -impl Foo { - fn foo(self: Self) {} - fn bar(self: &Self) {} -} - -fn main() { - Foo::foo(Foo); - //^^^ self - Foo::bar(&Foo); - //^^^^ self -} -"#, - ) - } - - #[test] - fn param_name_hints_show_for_literals() { - check_params( - r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] } -fn main() { - test( - 0xa_b, - //^^^^^ a - 0xa_b, - //^^^^^ b - ); -}"#, - ) - } - - #[test] - fn function_call_parameter_hint() { - check_params( - r#" -//- minicore: option -struct FileId {} -struct SmolStr {} - -struct TextRange {} -struct SyntaxKind {} -struct NavigationTarget {} - -struct Test {} - -impl Test { - fn method(&self, mut param: i32) -> i32 { param * 2 } - - fn from_syntax( - file_id: FileId, - name: SmolStr, - focus_range: Option, - full_range: TextRange, - kind: SyntaxKind, - docs: Option, - ) -> NavigationTarget { - NavigationTarget {} - } -} - -fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { - foo + bar -} - -fn main() { - let not_literal = 1; - let _: i32 = test_func(1, 2, "hello", 3, not_literal); - //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last - let t: Test = Test {}; - t.method(123); - //^^^ param - Test::method(&t, 3456); - //^^ self ^^^^ param - Test::from_syntax( - FileId {}, - "impl".into(), - //^^^^^^^^^^^^^ name - None, - //^^^^ focus_range - TextRange {}, - //^^^^^^^^^^^^ full_range - SyntaxKind {}, - //^^^^^^^^^^^^^ kind - None, - //^^^^ docs - ); -}"#, - ); - } - - #[test] - fn parameter_hint_heuristics() { - check_params( - r#" -fn check(ra_fixture_thing: &str) {} - -fn map(f: i32) {} -fn filter(predicate: i32) {} - -fn strip_suffix(suffix: &str) {} -fn stripsuffix(suffix: &str) {} -fn same(same: u32) {} -fn same2(_same2: u32) {} - -fn enum_matches_param_name(completion_kind: CompletionKind) {} - -fn foo(param: u32) {} -fn bar(param_eter: u32) {} - -enum CompletionKind { - Keyword, -} - -fn non_ident_pat((a, b): (u32, u32)) {} - -fn main() { - const PARAM: u32 = 0; - foo(PARAM); - foo(!PARAM); - // ^^^^^^ param - check(""); - - map(0); - filter(0); - - strip_suffix(""); - stripsuffix(""); - //^^ suffix - same(0); - same2(0); - - enum_matches_param_name(CompletionKind::Keyword); - - let param = 0; - foo(param); - foo(param as _); - let param_end = 0; - foo(param_end); - let start_param = 0; - foo(start_param); - let param2 = 0; - foo(param2); - //^^^^^^ param - - macro_rules! param { - () => {}; - }; - foo(param!()); - - let param_eter = 0; - bar(param_eter); - let param_eter_end = 0; - bar(param_eter_end); - let start_param_eter = 0; - bar(start_param_eter); - let param_eter2 = 0; - bar(param_eter2); - //^^^^^^^^^^^ param_eter - - non_ident_pat((0, 0)); -}"#, - ); - } - - // Type-Hint tests - - #[test] - fn type_hints_only() { - check_types( - r#" -fn foo(a: i32, b: i32) -> i32 { a + b } -fn main() { - let _x = foo(4, 4); - //^^ i32 -}"#, - ); - } - - #[test] - fn type_hints_bindings_after_at() { - check_types( - r#" -//- minicore: option -fn main() { - let ref foo @ bar @ ref mut baz = 0; - //^^^ &i32 - //^^^ i32 - //^^^ &mut i32 - let [x @ ..] = [0]; - //^ [i32; 1] - if let x @ Some(_) = Some(0) {} - //^ Option - let foo @ (bar, baz) = (3, 3); - //^^^ (i32, i32) - //^^^ i32 - //^^^ i32 -}"#, - ); - } - - #[test] - fn default_generic_types_should_not_be_displayed() { - check( - r#" -struct Test { k: K, t: T } - -fn main() { - let zz = Test { t: 23u8, k: 33 }; - //^^ Test - let zz_ref = &zz; - //^^^^^^ &Test - let test = || zz; - //^^^^ || -> Test -}"#, - ); - } - - #[test] - fn shorten_iterators_in_associated_params() { - check_types( - r#" -//- minicore: iterators -use core::iter; - -pub struct SomeIter {} - -impl SomeIter { - pub fn new() -> Self { SomeIter {} } - pub fn push(&mut self, t: T) {} -} - -impl Iterator for SomeIter { - type Item = T; - fn next(&mut self) -> Option { - None - } -} - -fn main() { - let mut some_iter = SomeIter::new(); - //^^^^^^^^^ SomeIter>> - some_iter.push(iter::repeat(2).take(2)); - let iter_of_iters = some_iter.take(2); - //^^^^^^^^^^^^^ impl Iterator> -} -"#, - ); - } - - #[test] - fn iterator_hint_regression_issue_12674() { - // Ensure we don't crash while solving the projection type of iterators. - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -//- minicore: iterators -struct S(T); -impl S { - fn iter(&self) -> Iter<'_, T> { loop {} } -} -struct Iter<'a, T: 'a>(&'a T); -impl<'a, T> Iterator for Iter<'a, T> { - type Item = &'a T; - fn next(&mut self) -> Option { loop {} } -} -struct Container<'a> { - elements: S<&'a str>, -} -struct SliceIter<'a, T>(&'a T); -impl<'a, T> Iterator for SliceIter<'a, T> { - type Item = &'a T; - fn next(&mut self) -> Option { loop {} } -} - -fn main(a: SliceIter<'_, Container>) { - a - .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v)))) - .map(|e| e); -} - "#, - expect![[r#" - [ - InlayHint { - range: 484..554, - kind: ChainingHint, - label: [ - "impl Iterator>", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..554, - ), - ), - }, - InlayHint { - range: 484..485, - kind: ChainingHint, - label: [ - "SliceIter", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..485, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn infer_call_method_return_associated_types_with_generic() { - check_types( - r#" - pub trait Default { - fn default() -> Self; - } - pub trait Foo { - type Bar: Default; - } - - pub fn quux() -> T::Bar { - let y = Default::default(); - //^ ::Bar - - y - } - "#, - ); - } - - #[test] - fn fn_hints() { - check_types( - r#" -//- minicore: fn, sized -fn foo() -> impl Fn() { loop {} } -fn foo1() -> impl Fn(f64) { loop {} } -fn foo2() -> impl Fn(f64, f64) { loop {} } -fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } -fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } -fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } -fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } -fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } - -fn main() { - let foo = foo(); - // ^^^ impl Fn() - let foo = foo1(); - // ^^^ impl Fn(f64) - let foo = foo2(); - // ^^^ impl Fn(f64, f64) - let foo = foo3(); - // ^^^ impl Fn(f64, f64) -> u32 - let foo = foo4(); - // ^^^ &dyn Fn(f64, f64) -> u32 - let foo = foo5(); - // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 - let foo = foo6(); - // ^^^ impl Fn(f64, f64) -> u32 - let foo = foo7(); - // ^^^ *const impl Fn(f64, f64) -> u32 -} -"#, - ) - } - - #[test] - fn check_hint_range_limit() { - let fixture = r#" - //- minicore: fn, sized - fn foo() -> impl Fn() { loop {} } - fn foo1() -> impl Fn(f64) { loop {} } - fn foo2() -> impl Fn(f64, f64) { loop {} } - fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } - fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } - fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } - fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } - fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } - - fn main() { - let foo = foo(); - let foo = foo1(); - let foo = foo2(); - // ^^^ impl Fn(f64, f64) - let foo = foo3(); - // ^^^ impl Fn(f64, f64) -> u32 - let foo = foo4(); - let foo = foo5(); - let foo = foo6(); - let foo = foo7(); - } - "#; - let (analysis, file_id) = fixture::file(fixture); - let expected = extract_annotations(&*analysis.file_text(file_id).unwrap()); - let inlay_hints = analysis - .inlay_hints( - &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, - file_id, - Some(TextRange::new(TextSize::from(500), TextSize::from(600))), - ) - .unwrap(); - let actual = - inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); - assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual); - } - - #[test] - fn fn_hints_ptr_rpit_fn_parentheses() { - check_types( - r#" -//- minicore: fn, sized -trait Trait {} - -fn foo1() -> *const impl Fn() { loop {} } -fn foo2() -> *const (impl Fn() + Sized) { loop {} } -fn foo3() -> *const (impl Fn() + ?Sized) { loop {} } -fn foo4() -> *const (impl Sized + Fn()) { loop {} } -fn foo5() -> *const (impl ?Sized + Fn()) { loop {} } -fn foo6() -> *const (impl Fn() + Trait) { loop {} } -fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} } -fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} } -fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} } -fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} } - -fn main() { - let foo = foo1(); - // ^^^ *const impl Fn() - let foo = foo2(); - // ^^^ *const impl Fn() - let foo = foo3(); - // ^^^ *const (impl Fn() + ?Sized) - let foo = foo4(); - // ^^^ *const impl Fn() - let foo = foo5(); - // ^^^ *const (impl Fn() + ?Sized) - let foo = foo6(); - // ^^^ *const (impl Fn() + Trait) - let foo = foo7(); - // ^^^ *const (impl Fn() + Trait) - let foo = foo8(); - // ^^^ *const (impl Fn() + Trait + ?Sized) - let foo = foo9(); - // ^^^ *const (impl Fn() -> u8 + ?Sized) - let foo = foo10(); - // ^^^ *const impl Fn() -} -"#, - ) - } - - #[test] - fn unit_structs_have_no_type_hints() { - check_types( - r#" -//- minicore: result -struct SyntheticSyntax; - -fn main() { - match Ok(()) { - Ok(_) => (), - Err(SyntheticSyntax) => (), - } -}"#, - ); - } - - #[test] - fn let_statement() { - check_types( - r#" -#[derive(PartialEq)] -enum Option { None, Some(T) } - -#[derive(PartialEq)] -struct Test { a: Option, b: u8 } - -fn main() { - struct InnerStruct {} - - let test = 54; - //^^^^ i32 - let test: i32 = 33; - let mut test = 33; - //^^^^ i32 - let _ = 22; - let test = "test"; - //^^^^ &str - let test = InnerStruct {}; - //^^^^ InnerStruct - - let test = unresolved(); - - let test = (42, 'a'); - //^^^^ (i32, char) - let (a, (b, (c,)) = (2, (3, (9.2,)); - //^ i32 ^ i32 ^ f64 - let &x = &92; - //^ i32 -}"#, - ); - } - - #[test] - fn if_expr() { - check_types( - r#" -//- minicore: option -struct Test { a: Option, b: u8 } - -fn main() { - let test = Some(Test { a: Some(3), b: 1 }); - //^^^^ Option - if let None = &test {}; - if let test = &test {}; - //^^^^ &Option - if let Some(test) = &test {}; - //^^^^ &Test - if let Some(Test { a, b }) = &test {}; - //^ &Option ^ &u8 - if let Some(Test { a: x, b: y }) = &test {}; - //^ &Option ^ &u8 - if let Some(Test { a: Some(x), b: y }) = &test {}; - //^ &u32 ^ &u8 - if let Some(Test { a: None, b: y }) = &test {}; - //^ &u8 - if let Some(Test { b: y, .. }) = &test {}; - //^ &u8 - if test == None {} -}"#, - ); - } - - #[test] - fn while_expr() { - check_types( - r#" -//- minicore: option -struct Test { a: Option, b: u8 } - -fn main() { - let test = Some(Test { a: Some(3), b: 1 }); - //^^^^ Option - while let Some(Test { a: Some(x), b: y }) = &test {}; - //^ &u32 ^ &u8 -}"#, - ); - } - - #[test] - fn match_arm_list() { - check_types( - r#" -//- minicore: option -struct Test { a: Option, b: u8 } - -fn main() { - match Some(Test { a: Some(3), b: 1 }) { - None => (), - test => (), - //^^^^ Option - Some(Test { a: Some(x), b: y }) => (), - //^ u32 ^ u8 - _ => {} - } -}"#, - ); - } - - #[test] - fn complete_for_hint() { - check_types( - r#" -//- minicore: iterator -pub struct Vec {} - -impl Vec { - pub fn new() -> Self { Vec {} } - pub fn push(&mut self, t: T) {} -} - -impl IntoIterator for Vec { - type Item = T; - type IntoIter = IntoIter; -} - -struct IntoIter {} - -impl Iterator for IntoIter { - type Item = T; -} - -fn main() { - let mut data = Vec::new(); - //^^^^ Vec<&str> - data.push("foo"); - for i in data { - //^ &str - let z = i; - //^ &str - } -} -"#, - ); - } - - #[test] - fn multi_dyn_trait_bounds() { - check_types( - r#" -pub struct Vec {} - -impl Vec { - pub fn new() -> Self { Vec {} } -} - -pub struct Box {} - -trait Display {} -auto trait Sync {} - -fn main() { - // The block expression wrapping disables the constructor hint hiding logic - let _v = { Vec::>::new() }; - //^^ Vec> - let _v = { Vec::>::new() }; - //^^ Vec> - let _v = { Vec::>::new() }; - //^^ Vec> -} -"#, - ); - } - - #[test] - fn shorten_iterator_hints() { - check_types( - r#" -//- minicore: iterators -use core::iter; - -struct MyIter; - -impl Iterator for MyIter { - type Item = (); - fn next(&mut self) -> Option { - None - } -} - -fn main() { - let _x = MyIter; - //^^ MyIter - let _x = iter::repeat(0); - //^^ impl Iterator - fn generic(t: T) { - let _x = iter::repeat(t); - //^^ impl Iterator - let _chained = iter::repeat(t).take(10); - //^^^^^^^^ impl Iterator - } -} -"#, - ); - } - - #[test] - fn skip_constructor_and_enum_type_hints() { - check_with_config( - InlayHintsConfig { - type_hints: true, - hide_named_constructor_hints: true, - ..DISABLED_CONFIG - }, - r#" -//- minicore: try, option -use core::ops::ControlFlow; - -mod x { - pub mod y { pub struct Foo; } - pub struct Foo; - pub enum AnotherEnum { - Variant() - }; -} -struct Struct; -struct TupleStruct(); - -impl Struct { - fn new() -> Self { - Struct - } - fn try_new() -> ControlFlow<(), Self> { - ControlFlow::Continue(Struct) - } -} - -struct Generic(T); -impl Generic { - fn new() -> Self { - Generic(0) - } -} - -enum Enum { - Variant(u32) -} - -fn times2(value: i32) -> i32 { - 2 * value -} - -fn main() { - let enumb = Enum::Variant(0); - - let strukt = x::Foo; - let strukt = x::y::Foo; - let strukt = Struct; - let strukt = Struct::new(); - - let tuple_struct = TupleStruct(); - - let generic0 = Generic::new(); - // ^^^^^^^^ Generic - let generic1 = Generic(0); - // ^^^^^^^^ Generic - let generic2 = Generic::::new(); - let generic3 = >::new(); - let generic4 = Generic::(0); - - - let option = Some(0); - // ^^^^^^ Option - let func = times2; - // ^^^^ fn times2(i32) -> i32 - let closure = |x: i32| x * 2; - // ^^^^^^^ |i32| -> i32 -} - -fn fallible() -> ControlFlow<()> { - let strukt = Struct::try_new()?; -} -"#, - ); - } - - #[test] - fn shows_constructor_type_hints_when_enabled() { - check_types( - r#" -//- minicore: try -use core::ops::ControlFlow; - -struct Struct; -struct TupleStruct(); - -impl Struct { - fn new() -> Self { - Struct - } - fn try_new() -> ControlFlow<(), Self> { - ControlFlow::Continue(Struct) - } -} - -struct Generic(T); -impl Generic { - fn new() -> Self { - Generic(0) - } -} - -fn main() { - let strukt = Struct::new(); - // ^^^^^^ Struct - let tuple_struct = TupleStruct(); - // ^^^^^^^^^^^^ TupleStruct - let generic0 = Generic::new(); - // ^^^^^^^^ Generic - let generic1 = Generic::::new(); - // ^^^^^^^^ Generic - let generic2 = >::new(); - // ^^^^^^^^ Generic -} - -fn fallible() -> ControlFlow<()> { - let strukt = Struct::try_new()?; - // ^^^^^^ Struct -} -"#, - ); - } - - #[test] - fn closures() { - check( - r#" -fn main() { - let mut start = 0; - //^^^^^ i32 - (0..2).for_each(|increment | { start += increment; }); - //^^^^^^^^^ i32 - - let multiply = - //^^^^^^^^ |i32, i32| -> i32 - | a, b| a * b - //^ i32 ^ i32 - - ; - - let _: i32 = multiply(1, 2); - //^ a ^ b - let multiply_ref = &multiply; - //^^^^^^^^^^^^ &|i32, i32| -> i32 - - let return_42 = || 42; - //^^^^^^^^^ || -> i32 - || { 42 }; - //^^ i32 -}"#, - ); - } - - #[test] - fn return_type_hints_for_closure_without_block() { - check_with_config( - InlayHintsConfig { - closure_return_type_hints: ClosureReturnTypeHints::Always, - ..DISABLED_CONFIG - }, - r#" -fn main() { - let a = || { 0 }; - //^^ i32 - let b = || 0; - //^^ i32 -}"#, - ); - } - - #[test] - fn skip_closure_type_hints() { - check_with_config( - InlayHintsConfig { - type_hints: true, - hide_closure_initialization_hints: true, - ..DISABLED_CONFIG - }, - r#" -//- minicore: fn -fn main() { - let multiple_2 = |x: i32| { x * 2 }; - - let multiple_2 = |x: i32| x * 2; - // ^^^^^^^^^^ |i32| -> i32 - - let (not) = (|x: bool| { !x }); - // ^^^ |bool| -> bool - - let (is_zero, _b) = (|x: usize| { x == 0 }, false); - // ^^^^^^^ |usize| -> bool - // ^^ bool - - let plus_one = |x| { x + 1 }; - // ^ u8 - foo(plus_one); - - let add_mul = bar(|x: u8| { x + 1 }); - // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized - - let closure = if let Some(6) = add_mul(2).checked_sub(1) { - // ^^^^^^^ fn(i32) -> i32 - |x: i32| { x * 2 } - } else { - |x: i32| { x * 3 } - }; -} - -fn foo(f: impl FnOnce(u8) -> u8) {} - -fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 { - move |x: u8| f(x) * 2 -} -"#, - ); - } - - #[test] - fn hint_truncation() { - check_with_config( - InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG }, - r#" -struct Smol(T); - -struct VeryLongOuterName(T); - -fn main() { - let a = Smol(0u32); - //^ Smol - let b = VeryLongOuterName(0usize); - //^ VeryLongOuterName<…> - let c = Smol(Smol(0u32)) - //^ Smol> -}"#, - ); - } - - // Chaining hint tests - - #[test] - fn chaining_hints_ignore_comments() { - check_expect( - InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG }, - r#" -struct A(B); -impl A { fn into_b(self) -> B { self.0 } } -struct B(C); -impl B { fn into_c(self) -> C { self.0 } } -struct C; - -fn main() { - let c = A(B(C)) - .into_b() // This is a comment - // This is another comment - .into_c(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 147..172, - kind: ChainingHint, - label: [ - "B", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..172, - ), - ), - }, - InlayHint { - range: 147..154, - kind: ChainingHint, - label: [ - "A", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..154, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn chaining_hints_without_newlines() { - check_chains( - r#" -struct A(B); -impl A { fn into_b(self) -> B { self.0 } } -struct B(C); -impl B { fn into_c(self) -> C { self.0 } } -struct C; - -fn main() { - let c = A(B(C)).into_b().into_c(); -}"#, - ); - } - - #[test] - fn struct_access_chaining_hints() { - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -struct A { pub b: B } -struct B { pub c: C } -struct C(pub bool); -struct D; - -impl D { - fn foo(&self) -> i32 { 42 } -} - -fn main() { - let x = A { b: B { c: C(true) } } - .b - .c - .0; - let x = D - .foo(); -}"#, - expect![[r#" - [ - InlayHint { - range: 143..190, - kind: ChainingHint, - label: [ - "C", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..190, - ), - ), - }, - InlayHint { - range: 143..179, - kind: ChainingHint, - label: [ - "B", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..179, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn generic_chaining_hints() { - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -struct A(T); -struct B(T); -struct C(T); -struct X(T, R); - -impl A { - fn new(t: T) -> Self { A(t) } - fn into_b(self) -> B { B(self.0) } -} -impl B { - fn into_c(self) -> C { C(self.0) } -} -fn main() { - let c = A::new(X(42, true)) - .into_b() - .into_c(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 246..283, - kind: ChainingHint, - label: [ - "B>", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..283, - ), - ), - }, - InlayHint { - range: 246..265, - kind: ChainingHint, - label: [ - "A>", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..265, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn shorten_iterator_chaining_hints() { - check_expect( - InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, - r#" -//- minicore: iterators -use core::iter; - -struct MyIter; - -impl Iterator for MyIter { - type Item = (); - fn next(&mut self) -> Option { - None - } -} - -fn main() { - let _x = MyIter.by_ref() - .take(5) - .by_ref() - .take(5) - .by_ref(); -} -"#, - expect![[r#" - [ - InlayHint { - range: 174..241, - kind: ChainingHint, - label: [ - "impl Iterator", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..241, - ), - ), - }, - InlayHint { - range: 174..224, - kind: ChainingHint, - label: [ - "impl Iterator", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..224, - ), - ), - }, - InlayHint { - range: 174..206, - kind: ChainingHint, - label: [ - "impl Iterator", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..206, - ), - ), - }, - InlayHint { - range: 174..189, - kind: ChainingHint, - label: [ - "&mut MyIter", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..189, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn hints_in_attr_call() { - check_expect( - TEST_CONFIG, - r#" -//- proc_macros: identity, input_replace -struct Struct; -impl Struct { - fn chain(self) -> Self { - self - } -} -#[proc_macros::identity] -fn main() { - let strukt = Struct; - strukt - .chain() - .chain() - .chain(); - Struct::chain(strukt); -} -"#, - expect![[r#" - [ - InlayHint { - range: 124..130, - kind: TypeHint, - label: [ - "Struct", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 124..130, - ), - ), - }, - InlayHint { - range: 145..185, - kind: ChainingHint, - label: [ - "Struct", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..185, - ), - ), - }, - InlayHint { - range: 145..168, - kind: ChainingHint, - label: [ - "Struct", - ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..168, - ), - ), - }, - InlayHint { - range: 222..228, - kind: ParameterHint, - label: [ - "self", - ], - tooltip: Some( - HoverOffset( - FileId( - 0, - ), - 42, - ), - ), - }, - ] - "#]], - ); - } - - #[test] - fn hints_lifetimes() { - check( - r#" -fn empty() {} - -fn no_gpl(a: &()) {} - //^^^^^^<'0> - // ^'0 -fn empty_gpl<>(a: &()) {} - // ^'0 ^'0 -fn partial<'b>(a: &(), b: &'b ()) {} -// ^'0, $ ^'0 -fn partial<'a>(a: &'a (), b: &()) {} -// ^'0, $ ^'0 - -fn single_ret(a: &()) -> &() {} -// ^^^^^^^^^^<'0> - // ^'0 ^'0 -fn full_mul(a: &(), b: &()) {} -// ^^^^^^^^<'0, '1> - // ^'0 ^'1 - -fn foo<'c>(a: &'c ()) -> &() {} - // ^'c - -fn nested_in(a: & &X< &()>) {} -// ^^^^^^^^^<'0, '1, '2> - //^'0 ^'1 ^'2 -fn nested_out(a: &()) -> & &X< &()>{} -// ^^^^^^^^^^<'0> - //^'0 ^'0 ^'0 ^'0 - -impl () { - fn foo(&self) {} - // ^^^<'0> - // ^'0 - fn foo(&self) -> &() {} - // ^^^<'0> - // ^'0 ^'0 - fn foo(&self, a: &()) -> &() {} - // ^^^<'0, '1> - // ^'0 ^'1 ^'0 -} -"#, - ); - } - - #[test] - fn hints_lifetimes_named() { - check_with_config( - InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, - r#" -fn nested_in<'named>(named: & &X< &()>) {} -// ^'named1, 'named2, 'named3, $ - //^'named1 ^'named2 ^'named3 -"#, - ); - } - - #[test] - fn hints_lifetimes_trivial_skip() { - check_with_config( - InlayHintsConfig { - lifetime_elision_hints: LifetimeElisionHints::SkipTrivial, - ..TEST_CONFIG - }, - r#" -fn no_gpl(a: &()) {} -fn empty_gpl<>(a: &()) {} -fn partial<'b>(a: &(), b: &'b ()) {} -fn partial<'a>(a: &'a (), b: &()) {} - -fn single_ret(a: &()) -> &() {} -// ^^^^^^^^^^<'0> - // ^'0 ^'0 -fn full_mul(a: &(), b: &()) {} - -fn foo<'c>(a: &'c ()) -> &() {} - // ^'c - -fn nested_in(a: & &X< &()>) {} -fn nested_out(a: &()) -> & &X< &()>{} -// ^^^^^^^^^^<'0> - //^'0 ^'0 ^'0 ^'0 - -impl () { - fn foo(&self) {} - fn foo(&self) -> &() {} - // ^^^<'0> - // ^'0 ^'0 - fn foo(&self, a: &()) -> &() {} - // ^^^<'0, '1> - // ^'0 ^'1 ^'0 -} -"#, - ); - } - - #[test] - fn hints_lifetimes_static() { - check_with_config( - InlayHintsConfig { - lifetime_elision_hints: LifetimeElisionHints::Always, - ..TEST_CONFIG - }, - r#" -trait Trait {} -static S: &str = ""; -// ^'static -const C: &str = ""; -// ^'static -const C: &dyn Trait = panic!(); -// ^'static - -impl () { - const C: &str = ""; - const C: &dyn Trait = panic!(); -} -"#, - ); - } - - #[test] - fn hints_binding_modes() { - check_with_config( - InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG }, - r#" -fn __( - (x,): (u32,), - (x,): &(u32,), - //^^^^& - //^ ref - (x,): &mut (u32,) - //^^^^&mut - //^ ref mut -) { - let (x,) = (0,); - let (x,) = &(0,); - //^^^^ & - //^ ref - let (x,) = &mut (0,); - //^^^^ &mut - //^ ref mut - let &mut (x,) = &mut (0,); - let (ref mut x,) = &mut (0,); - //^^^^^^^^^^^^ &mut - let &mut (ref mut x,) = &mut (0,); - let (mut x,) = &mut (0,); - //^^^^^^^^ &mut - match (0,) { - (x,) => () - } - match &(0,) { - (x,) => () - //^^^^ & - //^ ref - } - match &mut (0,) { - (x,) => () - //^^^^ &mut - //^ ref mut - } -}"#, - ); - } - - #[test] - fn hints_closing_brace() { - check_with_config( - InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG }, - r#" -fn a() {} - -fn f() { -} // no hint unless `}` is the last token on the line - -fn g() { - } -//^ fn g - -fn h(with: T, arguments: u8, ...) { - } -//^ fn h - -trait Tr { - fn f(); - fn g() { - } - //^ fn g - } -//^ trait Tr -impl Tr for () { - } -//^ impl Tr for () -impl dyn Tr { - } -//^ impl dyn Tr - -static S0: () = 0; -static S1: () = {}; -static S2: () = { - }; -//^ static S2 -const _: () = { - }; -//^ const _ - -mod m { - } -//^ mod m - -m! {} -m!(); -m!( - ); -//^ m! - -m! { - } -//^ m! - -fn f() { - let v = vec![ - ]; - } -//^ fn f -"#, - ); - } - - #[test] - fn adjustment_hints() { - check_with_config( - InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, - r#" -//- minicore: coerce_unsized -fn main() { - let _: u32 = loop {}; - //^^^^^^^ - let _: &u32 = &mut 0; - //^^^^^^& - //^^^^^^* - let _: &mut u32 = &mut 0; - //^^^^^^&mut $ - //^^^^^^* - let _: *const u32 = &mut 0; - //^^^^^^&raw const $ - //^^^^^^* - let _: *mut u32 = &mut 0; - //^^^^^^&raw mut $ - //^^^^^^* - let _: fn() = main; - //^^^^ - let _: unsafe fn() = main; - //^^^^ - //^^^^ - let _: unsafe fn() = main as fn(); - //^^^^^^^^^^^^ - let _: fn() = || {}; - //^^^^^ - let _: unsafe fn() = || {}; - //^^^^^ - let _: *const u32 = &mut 0u32 as *mut u32; - //^^^^^^^^^^^^^^^^^^^^^ - let _: &mut [_] = &mut [0; 0]; - //^^^^^^^^^^^ - //^^^^^^^^^^^&mut $ - //^^^^^^^^^^^* - - Struct.consume(); - Struct.by_ref(); - //^^^^^^( - //^^^^^^& - //^^^^^^) - Struct.by_ref_mut(); - //^^^^^^( - //^^^^^^&mut $ - //^^^^^^) - - (&Struct).consume(); - //^^^^^^^* - (&Struct).by_ref(); - - (&mut Struct).consume(); - //^^^^^^^^^^^* - (&mut Struct).by_ref(); - //^^^^^^^^^^^& - //^^^^^^^^^^^* - (&mut Struct).by_ref_mut(); -} - -#[derive(Copy, Clone)] -struct Struct; -impl Struct { - fn consume(self) {} - fn by_ref(&self) {} - fn by_ref_mut(&mut self) {} -} -"#, - ) - } } diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs new file mode 100644 index 0000000000000..bdd7c05e008c1 --- /dev/null +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -0,0 +1,630 @@ +//! Implementation of "adjustment" inlay hints: +//! ```no_run +//! let _: u32 = /* */ loop {}; +//! let _: &u32 = /* &* */ &mut 0; +//! ``` +use hir::{Adjust, AutoBorrow, Mutability, OverloadedDeref, PointerCast, Safety, Semantics}; +use ide_db::RootDatabase; + +use syntax::{ + ast::{self, make, AstNode}, + ted, +}; + +use crate::{AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintsConfig, InlayKind}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + expr: &ast::Expr, +) -> Option<()> { + if config.adjustment_hints_hide_outside_unsafe && !sema.is_inside_unsafe(expr) { + return None; + } + + if config.adjustment_hints == AdjustmentHints::Never { + return None; + } + + // These inherit from the inner expression which would result in duplicate hints + if let ast::Expr::ParenExpr(_) + | ast::Expr::IfExpr(_) + | ast::Expr::BlockExpr(_) + | ast::Expr::MatchExpr(_) = expr + { + return None; + } + + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let desc_expr = descended.as_ref().unwrap_or(expr); + let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?; + + let (postfix, needs_outer_parens, needs_inner_parens) = + mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); + + if needs_outer_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + } + + if postfix && needs_inner_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + + let (mut tmp0, mut tmp1); + let iter: &mut dyn Iterator = if postfix { + tmp0 = adjustments.into_iter(); + &mut tmp0 + } else { + tmp1 = adjustments.into_iter().rev(); + &mut tmp1 + }; + + for adjustment in iter { + if adjustment.source == adjustment.target { + continue; + } + + // FIXME: Add some nicer tooltips to each of these + let text = match adjustment.kind { + Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => { + "" + } + Adjust::Deref(None) => "*", + Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => "*", + Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => "*", + Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => "&", + Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => "&mut ", + Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => "&raw const ", + Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => "&raw mut ", + // some of these could be represented via `as` casts, but that's not too nice and + // handling everything as a prefix expr makes the `(` and `)` insertion easier + Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => { + match cast { + PointerCast::ReifyFnPointer => "", + PointerCast::UnsafeFnPointer => "", + PointerCast::ClosureFnPointer(Safety::Unsafe) => { + "" + } + PointerCast::ClosureFnPointer(Safety::Safe) => "", + PointerCast::MutToConstPointer => "", + PointerCast::ArrayToPointer => "", + PointerCast::Unsize => "", + } + } + _ => continue, + }; + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: if postfix { + InlayKind::AdjustmentHintPostfix + } else { + InlayKind::AdjustmentHint + }, + label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, + tooltip: None, + }); + } + if !postfix && needs_inner_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + if needs_outer_parens { + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + Some(()) +} + +/// Returns whatever the hint should be postfix and if we need to add paretheses on the inside and/or outside of `expr`, +/// if we are going to add (`postfix`) adjustments hints to it. +fn mode_and_needs_parens_for_adjustment_hints( + expr: &ast::Expr, + mode: AdjustmentHintsMode, +) -> (bool, bool, bool) { + use {std::cmp::Ordering::*, AdjustmentHintsMode::*}; + + match mode { + Prefix | Postfix => { + let postfix = matches!(mode, Postfix); + let (inside, outside) = needs_parens_for_adjustment_hints(expr, postfix); + (postfix, inside, outside) + } + PreferPrefix | PreferPostfix => { + let prefer_postfix = matches!(mode, PreferPostfix); + + let (pre_inside, pre_outside) = needs_parens_for_adjustment_hints(expr, false); + let prefix = (false, pre_inside, pre_outside); + let pre_count = pre_inside as u8 + pre_outside as u8; + + let (post_inside, post_outside) = needs_parens_for_adjustment_hints(expr, true); + let postfix = (true, post_inside, post_outside); + let post_count = post_inside as u8 + post_outside as u8; + + match pre_count.cmp(&post_count) { + Less => prefix, + Greater => postfix, + Equal if prefer_postfix => postfix, + Equal => prefix, + } + } + } +} + +/// Returns whatever we need to add paretheses on the inside and/or outside of `expr`, +/// if we are going to add (`postfix`) adjustments hints to it. +fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, bool) { + // This is a very miserable pile of hacks... + // + // `Expr::needs_parens_in` requires that the expression is the child of the other expression, + // that is supposed to be its parent. + // + // But we want to check what would happen if we add `*`/`.*` to the inner expression. + // To check for inner we need `` expr.needs_parens_in(`*expr`) ``, + // to check for outer we need `` `*expr`.needs_parens_in(parent) ``, + // where "expr" is the `expr` parameter, `*expr` is the editted `expr`, + // and "parent" is the parent of the original expression... + // + // For this we utilize mutable mutable trees, which is a HACK, but it works. + // + // FIXME: comeup with a better API for `needs_parens_in`, so that we don't have to do *this* + + // Make `&expr`/`expr?` + let dummy_expr = { + // `make::*` function go through a string, so they parse wrongly. + // for example `` make::expr_try(`|| a`) `` would result in a + // `|| (a?)` and not `(|| a)?`. + // + // Thus we need dummy parens to preserve the relationship we want. + // The parens are then simply ignored by the following code. + let dummy_paren = make::expr_paren(expr.clone()); + if postfix { + make::expr_try(dummy_paren) + } else { + make::expr_ref(dummy_paren, false) + } + }; + + // Do the dark mutable tree magic. + // This essentially makes `dummy_expr` and `expr` switch places (families), + // so that `expr`'s parent is not `dummy_expr`'s parent. + let dummy_expr = dummy_expr.clone_for_update(); + let expr = expr.clone_for_update(); + ted::replace(expr.syntax(), dummy_expr.syntax()); + + let parent = dummy_expr.syntax().parent(); + let expr = if postfix { + let ast::Expr::TryExpr(e) = &dummy_expr else { unreachable!() }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + + e.expr().unwrap() + } else { + let ast::Expr::RefExpr(e) = &dummy_expr else { unreachable!() }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + + e.expr().unwrap() + }; + + // At this point + // - `parent` is the parrent of the original expression + // - `dummy_expr` is the original expression wrapped in the operator we want (`*`/`.*`) + // - `expr` is the clone of the original expression (with `dummy_expr` as the parent) + + let needs_outer_parens = parent.map_or(false, |p| dummy_expr.needs_parens_in(p)); + let needs_inner_parens = expr.needs_parens_in(dummy_expr.syntax().clone()); + + (needs_outer_parens, needs_inner_parens) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + AdjustmentHints, AdjustmentHintsMode, InlayHintsConfig, + }; + + #[test] + fn adjustment_hints() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +//- minicore: coerce_unsized, fn +fn main() { + let _: u32 = loop {}; + //^^^^^^^ + let _: &u32 = &mut 0; + //^^^^^^& + //^^^^^^* + let _: &mut u32 = &mut 0; + //^^^^^^&mut $ + //^^^^^^* + let _: *const u32 = &mut 0; + //^^^^^^&raw const $ + //^^^^^^* + let _: *mut u32 = &mut 0; + //^^^^^^&raw mut $ + //^^^^^^* + let _: fn() = main; + //^^^^ + let _: unsafe fn() = main; + //^^^^ + //^^^^ + let _: unsafe fn() = main as fn(); + //^^^^^^^^^^^^ + //^^^^^^^^^^^^( + //^^^^^^^^^^^^) + let _: fn() = || {}; + //^^^^^ + let _: unsafe fn() = || {}; + //^^^^^ + let _: *const u32 = &mut 0u32 as *mut u32; + //^^^^^^^^^^^^^^^^^^^^^ + //^^^^^^^^^^^^^^^^^^^^^( + //^^^^^^^^^^^^^^^^^^^^^) + let _: &mut [_] = &mut [0; 0]; + //^^^^^^^^^^^ + //^^^^^^^^^^^&mut $ + //^^^^^^^^^^^* + + Struct.consume(); + Struct.by_ref(); + //^^^^^^( + //^^^^^^& + //^^^^^^) + Struct.by_ref_mut(); + //^^^^^^( + //^^^^^^&mut $ + //^^^^^^) + + (&Struct).consume(); + //^^^^^^^* + (&Struct).by_ref(); + + (&mut Struct).consume(); + //^^^^^^^^^^^* + (&mut Struct).by_ref(); + //^^^^^^^^^^^& + //^^^^^^^^^^^* + (&mut Struct).by_ref_mut(); + + // Check that block-like expressions don't duplicate hints + let _: &mut [u32] = (&mut []); + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + let _: &mut [u32] = { &mut [] }; + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + let _: &mut [u32] = unsafe { &mut [] }; + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + let _: &mut [u32] = if true { + &mut [] + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + } else { + loop {} + //^^^^^^^ + }; + let _: &mut [u32] = match () { () => &mut [] } + //^^^^^^^ + //^^^^^^^&mut $ + //^^^^^^^* + + let _: &mut dyn Fn() = &mut || (); + //^^^^^^^^^^ + //^^^^^^^^^^&mut $ + //^^^^^^^^^^* +} + +#[derive(Copy, Clone)] +struct Struct; +impl Struct { + fn consume(self) {} + fn by_ref(&self) {} + fn by_ref_mut(&mut self) {} +} +"#, + ) + } + + #[test] + fn adjustment_hints_postfix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::Postfix, + ..DISABLED_CONFIG + }, + r#" +//- minicore: coerce_unsized, fn +fn main() { + + Struct.consume(); + Struct.by_ref(); + //^^^^^^.& + Struct.by_ref_mut(); + //^^^^^^.&mut + + (&Struct).consume(); + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + (&Struct).by_ref(); + + (&mut Struct).consume(); + //^^^^^^^^^^^( + //^^^^^^^^^^^) + //^^^^^^^^^^^.* + (&mut Struct).by_ref(); + //^^^^^^^^^^^( + //^^^^^^^^^^^) + //^^^^^^^^^^^.* + //^^^^^^^^^^^.& + (&mut Struct).by_ref_mut(); + + // Check that block-like expressions don't duplicate hints + let _: &mut [u32] = (&mut []); + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = { &mut [] }; + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = unsafe { &mut [] }; + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + let _: &mut [u32] = if true { + &mut [] + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + } else { + loop {} + //^^^^^^^. + }; + let _: &mut [u32] = match () { () => &mut [] } + //^^^^^^^( + //^^^^^^^) + //^^^^^^^.* + //^^^^^^^.&mut + //^^^^^^^. + + let _: &mut dyn Fn() = &mut || (); + //^^^^^^^^^^( + //^^^^^^^^^^) + //^^^^^^^^^^.* + //^^^^^^^^^^.&mut + //^^^^^^^^^^. +} + +#[derive(Copy, Clone)] +struct Struct; +impl Struct { + fn consume(self) {} + fn by_ref(&self) {} + fn by_ref_mut(&mut self) {} +} +"#, + ); + } + + #[test] + fn adjustment_hints_prefer_prefix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::PreferPrefix, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let _: u32 = loop {}; + //^^^^^^^ + + Struct.by_ref(); + //^^^^^^.& + + let (): () = return (); + //^^^^^^^^^ + + struct Struct; + impl Struct { fn by_ref(&self) {} } +} + "#, + ) + } + + #[test] + fn adjustment_hints_prefer_postfix() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_mode: AdjustmentHintsMode::PreferPostfix, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let _: u32 = loop {}; + //^^^^^^^. + + Struct.by_ref(); + //^^^^^^.& + + let (): () = return (); + //^^^^^^^^^ + + struct Struct; + impl Struct { fn by_ref(&self) {} } +} + "#, + ) + } + + #[test] + fn never_to_never_is_never_shown() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +fn never() -> ! { + return loop {}; +} + +fn or_else() { + let () = () else { return }; +} + "#, + ) + } + + #[test] + fn adjustment_hints_unsafe_only() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_hide_outside_unsafe: true, + ..DISABLED_CONFIG + }, + r#" +unsafe fn enabled() { + f(&&()); + //^^^^& + //^^^^* + //^^^^* +} + +fn disabled() { + f(&&()); +} + +fn mixed() { + f(&&()); + + unsafe { + f(&&()); + //^^^^& + //^^^^* + //^^^^* + } +} + +const _: () = { + f(&&()); + + unsafe { + f(&&()); + //^^^^& + //^^^^* + //^^^^* + } +}; + +static STATIC: () = { + f(&&()); + + unsafe { + f(&&()); + //^^^^& + //^^^^* + //^^^^* + } +}; + +enum E { + Disable = { f(&&()); 0 }, + Enable = unsafe { f(&&()); 1 }, + //^^^^& + //^^^^* + //^^^^* +} + +const fn f(_: &()) {} + "#, + ) + } + + #[test] + fn adjustment_hints_unsafe_only_with_item() { + check_with_config( + InlayHintsConfig { + adjustment_hints: AdjustmentHints::Always, + adjustment_hints_hide_outside_unsafe: true, + ..DISABLED_CONFIG + }, + r#" +fn a() { + struct Struct; + impl Struct { + fn by_ref(&self) {} + } + + _ = Struct.by_ref(); + + _ = unsafe { Struct.by_ref() }; + //^^^^^^( + //^^^^^^& + //^^^^^^) +} + "#, + ); + } + + #[test] + fn bug() { + check_with_config( + InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG }, + r#" +fn main() { + // These should be identical, but they are not... + + let () = return; + let (): () = return; + //^^^^^^ +} + "#, + ) + } +} diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs new file mode 100644 index 0000000000000..adec19c765a14 --- /dev/null +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -0,0 +1,978 @@ +//! Implementation of "type" inlay hints: +//! ```no_run +//! fn f(a: i32, b: i32) -> i32 { a + b } +//! let _x /* i32 */= f(4, 4); +//! ``` +use hir::{Semantics, TypeInfo}; +use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; + +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode, HasName}, + match_ast, +}; + +use crate::{ + inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, +}; + +use super::label_of_ty; + +pub(super) fn hints( + acc: &mut Vec, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: FileId, + pat: &ast::IdentPat, +) -> Option<()> { + if !config.type_hints { + return None; + } + + let descended = sema.descend_node_into_attributes(pat.clone()).pop(); + let desc_pat = descended.as_ref().unwrap_or(pat); + let ty = sema.type_of_pat(&desc_pat.clone().into())?.original; + + if should_not_display_type_hint(sema, config, pat, &ty) { + return None; + } + + let label = label_of_ty(famous_defs, config, ty)?; + + if config.hide_named_constructor_hints + && is_named_constructor(sema, pat, &label.to_string()).is_some() + { + return None; + } + + acc.push(InlayHint { + range: match pat.name() { + Some(name) => name.syntax().text_range(), + None => pat.syntax().text_range(), + }, + kind: InlayKind::TypeHint, + label, + tooltip: pat + .name() + .map(|it| it.syntax().text_range()) + .map(|it| InlayTooltip::HoverRanged(file_id, it)), + }); + + Some(()) +} + +fn should_not_display_type_hint( + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + bind_pat: &ast::IdentPat, + pat_ty: &hir::Type, +) -> bool { + let db = sema.db; + + if pat_ty.is_unknown() { + return true; + } + + if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() { + if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() { + return true; + } + } + + if config.hide_closure_initialization_hints { + if let Some(parent) = bind_pat.syntax().parent() { + if let Some(it) = ast::LetStmt::cast(parent) { + if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { + if closure_has_block_body(&closure) { + return true; + } + } + } + } + } + + for node in bind_pat.syntax().ancestors() { + match_ast! { + match node { + ast::LetStmt(it) => return it.ty().is_some(), + // FIXME: We might wanna show type hints in parameters for non-top level patterns as well + ast::Param(it) => return it.ty().is_some(), + ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), + ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), + ast::IfExpr(_) => return false, + ast::WhileExpr(_) => return false, + ast::ForExpr(it) => { + // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit). + // Type of expr should be iterable. + return it.in_token().is_none() || + it.iterable() + .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr)) + .map(TypeInfo::original) + .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit()) + }, + _ => (), + } + } + } + false +} + +fn is_named_constructor( + sema: &Semantics<'_, RootDatabase>, + pat: &ast::IdentPat, + ty_name: &str, +) -> Option<()> { + let let_node = pat.syntax().parent()?; + let expr = match_ast! { + match let_node { + ast::LetStmt(it) => it.initializer(), + ast::LetExpr(it) => it.expr(), + _ => None, + } + }?; + + let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr); + // unwrap postfix expressions + let expr = match expr { + ast::Expr::TryExpr(it) => it.expr(), + ast::Expr::AwaitExpr(it) => it.expr(), + expr => Some(expr), + }?; + let expr = match expr { + ast::Expr::CallExpr(call) => match call.expr()? { + ast::Expr::PathExpr(path) => path, + _ => return None, + }, + ast::Expr::PathExpr(path) => path, + _ => return None, + }; + let path = expr.path()?; + + let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db); + let callable_kind = callable.map(|it| it.kind()); + let qual_seg = match callable_kind { + Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => { + path.qualifier()?.segment() + } + _ => path.segment(), + }?; + + let ctor_name = match qual_seg.kind()? { + ast::PathSegmentKind::Name(name_ref) => { + match qual_seg.generic_arg_list().map(|it| it.generic_args()) { + Some(generics) => format!("{name_ref}<{}>", generics.format(", ")), + None => name_ref.to_string(), + } + } + ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(), + _ => return None, + }; + (ctor_name == ty_name).then_some(()) +} + +fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool { + if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() { + let pat_text = bind_pat.to_string(); + enum_data + .variants(db) + .into_iter() + .map(|variant| variant.name(db).to_smol_str()) + .any(|enum_name| enum_name == pat_text) + } else { + false + } +} + +#[cfg(test)] +mod tests { + // This module also contains tests for super::closure_ret + + use expect_test::expect; + use syntax::{TextRange, TextSize}; + use test_utils::extract_annotations; + + use crate::{fixture, inlay_hints::InlayHintsConfig}; + + use crate::inlay_hints::tests::{ + check, check_expect, check_with_config, DISABLED_CONFIG, DISABLED_CONFIG_WITH_LINKS, + TEST_CONFIG, + }; + use crate::ClosureReturnTypeHints; + + #[track_caller] + fn check_types(ra_fixture: &str) { + check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture); + } + + #[test] + fn type_hints_only() { + check_types( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo(4, 4); + //^^ i32 +}"#, + ); + } + + #[test] + fn type_hints_bindings_after_at() { + check_types( + r#" +//- minicore: option +fn main() { + let ref foo @ bar @ ref mut baz = 0; + //^^^ &i32 + //^^^ i32 + //^^^ &mut i32 + let [x @ ..] = [0]; + //^ [i32; 1] + if let x @ Some(_) = Some(0) {} + //^ Option + let foo @ (bar, baz) = (3, 3); + //^^^ (i32, i32) + //^^^ i32 + //^^^ i32 +}"#, + ); + } + + #[test] + fn default_generic_types_should_not_be_displayed() { + check( + r#" +struct Test { k: K, t: T } + +fn main() { + let zz = Test { t: 23u8, k: 33 }; + //^^ Test + let zz_ref = &zz; + //^^^^^^ &Test + let test = || zz; + //^^^^ || -> Test +}"#, + ); + } + + #[test] + fn shorten_iterators_in_associated_params() { + check_types( + r#" +//- minicore: iterators +use core::iter; + +pub struct SomeIter {} + +impl SomeIter { + pub fn new() -> Self { SomeIter {} } + pub fn push(&mut self, t: T) {} +} + +impl Iterator for SomeIter { + type Item = T; + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let mut some_iter = SomeIter::new(); + //^^^^^^^^^ SomeIter>> + some_iter.push(iter::repeat(2).take(2)); + let iter_of_iters = some_iter.take(2); + //^^^^^^^^^^^^^ impl Iterator> +} +"#, + ); + } + + #[test] + fn iterator_hint_regression_issue_12674() { + // Ensure we don't crash while solving the projection type of iterators. + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +//- minicore: iterators +struct S(T); +impl S { + fn iter(&self) -> Iter<'_, T> { loop {} } +} +struct Iter<'a, T: 'a>(&'a T); +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + fn next(&mut self) -> Option { loop {} } +} +struct Container<'a> { + elements: S<&'a str>, +} +struct SliceIter<'a, T>(&'a T); +impl<'a, T> Iterator for SliceIter<'a, T> { + type Item = &'a T; + fn next(&mut self) -> Option { loop {} } +} + +fn main(a: SliceIter<'_, Container>) { + a + .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v)))) + .map(|e| e); +} + "#, + expect![[r#" + [ + InlayHint { + range: 484..554, + kind: ChainingHint, + label: [ + "impl Iterator>", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 484..554, + ), + ), + }, + InlayHint { + range: 484..485, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "SliceIter", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 289..298, + }, + ), + }, + "<", + InlayHintLabelPart { + text: "Container", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 238..247, + }, + ), + }, + ">", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 484..485, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn infer_call_method_return_associated_types_with_generic() { + check_types( + r#" + pub trait Default { + fn default() -> Self; + } + pub trait Foo { + type Bar: Default; + } + + pub fn quux() -> T::Bar { + let y = Default::default(); + //^ ::Bar + + y + } + "#, + ); + } + + #[test] + fn fn_hints() { + check_types( + r#" +//- minicore: fn, sized +fn foo() -> impl Fn() { loop {} } +fn foo1() -> impl Fn(f64) { loop {} } +fn foo2() -> impl Fn(f64, f64) { loop {} } +fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } +fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } +fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } +fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } +fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } + +fn main() { + let foo = foo(); + // ^^^ impl Fn() + let foo = foo1(); + // ^^^ impl Fn(f64) + let foo = foo2(); + // ^^^ impl Fn(f64, f64) + let foo = foo3(); + // ^^^ impl Fn(f64, f64) -> u32 + let foo = foo4(); + // ^^^ &dyn Fn(f64, f64) -> u32 + let foo = foo5(); + // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32 + let foo = foo6(); + // ^^^ impl Fn(f64, f64) -> u32 + let foo = foo7(); + // ^^^ *const impl Fn(f64, f64) -> u32 +} +"#, + ) + } + + #[test] + fn check_hint_range_limit() { + let fixture = r#" + //- minicore: fn, sized + fn foo() -> impl Fn() { loop {} } + fn foo1() -> impl Fn(f64) { loop {} } + fn foo2() -> impl Fn(f64, f64) { loop {} } + fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} } + fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} } + fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} } + fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} } + fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} } + + fn main() { + let foo = foo(); + let foo = foo1(); + let foo = foo2(); + // ^^^ impl Fn(f64, f64) + let foo = foo3(); + // ^^^ impl Fn(f64, f64) -> u32 + let foo = foo4(); + let foo = foo5(); + let foo = foo6(); + let foo = foo7(); + } + "#; + let (analysis, file_id) = fixture::file(fixture); + let expected = extract_annotations(&analysis.file_text(file_id).unwrap()); + let inlay_hints = analysis + .inlay_hints( + &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, + file_id, + Some(TextRange::new(TextSize::from(500), TextSize::from(600))), + ) + .unwrap(); + let actual = + inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::>(); + assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}"); + } + + #[test] + fn fn_hints_ptr_rpit_fn_parentheses() { + check_types( + r#" +//- minicore: fn, sized +trait Trait {} + +fn foo1() -> *const impl Fn() { loop {} } +fn foo2() -> *const (impl Fn() + Sized) { loop {} } +fn foo3() -> *const (impl Fn() + ?Sized) { loop {} } +fn foo4() -> *const (impl Sized + Fn()) { loop {} } +fn foo5() -> *const (impl ?Sized + Fn()) { loop {} } +fn foo6() -> *const (impl Fn() + Trait) { loop {} } +fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} } +fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} } +fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} } +fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} } + +fn main() { + let foo = foo1(); + // ^^^ *const impl Fn() + let foo = foo2(); + // ^^^ *const impl Fn() + let foo = foo3(); + // ^^^ *const (impl Fn() + ?Sized) + let foo = foo4(); + // ^^^ *const impl Fn() + let foo = foo5(); + // ^^^ *const (impl Fn() + ?Sized) + let foo = foo6(); + // ^^^ *const (impl Fn() + Trait) + let foo = foo7(); + // ^^^ *const (impl Fn() + Trait) + let foo = foo8(); + // ^^^ *const (impl Fn() + Trait + ?Sized) + let foo = foo9(); + // ^^^ *const (impl Fn() -> u8 + ?Sized) + let foo = foo10(); + // ^^^ *const impl Fn() +} +"#, + ) + } + + #[test] + fn unit_structs_have_no_type_hints() { + check_types( + r#" +//- minicore: result +struct SyntheticSyntax; + +fn main() { + match Ok(()) { + Ok(_) => (), + Err(SyntheticSyntax) => (), + } +}"#, + ); + } + + #[test] + fn let_statement() { + check_types( + r#" +#[derive(PartialEq)] +enum Option { None, Some(T) } + +#[derive(PartialEq)] +struct Test { a: Option, b: u8 } + +fn main() { + struct InnerStruct {} + + let test = 54; + //^^^^ i32 + let test: i32 = 33; + let mut test = 33; + //^^^^ i32 + let _ = 22; + let test = "test"; + //^^^^ &str + let test = InnerStruct {}; + //^^^^ InnerStruct + + let test = unresolved(); + + let test = (42, 'a'); + //^^^^ (i32, char) + let (a, (b, (c,)) = (2, (3, (9.2,)); + //^ i32 ^ i32 ^ f64 + let &x = &92; + //^ i32 +}"#, + ); + } + + #[test] + fn if_expr() { + check_types( + r#" +//- minicore: option +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + if let None = &test {}; + if let test = &test {}; + //^^^^ &Option + if let Some(test) = &test {}; + //^^^^ &Test + if let Some(Test { a, b }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: x, b: y }) = &test {}; + //^ &Option ^ &u8 + if let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 + if let Some(Test { a: None, b: y }) = &test {}; + //^ &u8 + if let Some(Test { b: y, .. }) = &test {}; + //^ &u8 + if test == None {} +}"#, + ); + } + + #[test] + fn while_expr() { + check_types( + r#" +//- minicore: option +struct Test { a: Option, b: u8 } + +fn main() { + let test = Some(Test { a: Some(3), b: 1 }); + //^^^^ Option + while let Some(Test { a: Some(x), b: y }) = &test {}; + //^ &u32 ^ &u8 +}"#, + ); + } + + #[test] + fn match_arm_list() { + check_types( + r#" +//- minicore: option +struct Test { a: Option, b: u8 } + +fn main() { + match Some(Test { a: Some(3), b: 1 }) { + None => (), + test => (), + //^^^^ Option + Some(Test { a: Some(x), b: y }) => (), + //^ u32 ^ u8 + _ => {} + } +}"#, + ); + } + + #[test] + fn complete_for_hint() { + check_types( + r#" +//- minicore: iterator +pub struct Vec {} + +impl Vec { + pub fn new() -> Self { Vec {} } + pub fn push(&mut self, t: T) {} +} + +impl IntoIterator for Vec { + type Item = T; + type IntoIter = IntoIter; +} + +struct IntoIter {} + +impl Iterator for IntoIter { + type Item = T; +} + +fn main() { + let mut data = Vec::new(); + //^^^^ Vec<&str> + data.push("foo"); + for i in data { + //^ &str + let z = i; + //^ &str + } +} +"#, + ); + } + + #[test] + fn multi_dyn_trait_bounds() { + check_types( + r#" +pub struct Vec {} + +impl Vec { + pub fn new() -> Self { Vec {} } +} + +pub struct Box {} + +trait Display {} +auto trait Sync {} + +fn main() { + // The block expression wrapping disables the constructor hint hiding logic + let _v = { Vec::>::new() }; + //^^ Vec> + let _v = { Vec::>::new() }; + //^^ Vec> + let _v = { Vec::>::new() }; + //^^ Vec> +} +"#, + ); + } + + #[test] + fn shorten_iterator_hints() { + check_types( + r#" +//- minicore: iterators +use core::iter; + +struct MyIter; + +impl Iterator for MyIter { + type Item = (); + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let _x = MyIter; + //^^ MyIter + let _x = iter::repeat(0); + //^^ impl Iterator + fn generic(t: T) { + let _x = iter::repeat(t); + //^^ impl Iterator + let _chained = iter::repeat(t).take(10); + //^^^^^^^^ impl Iterator + } +} +"#, + ); + } + + #[test] + fn skip_constructor_and_enum_type_hints() { + check_with_config( + InlayHintsConfig { + type_hints: true, + hide_named_constructor_hints: true, + ..DISABLED_CONFIG + }, + r#" +//- minicore: try, option +use core::ops::ControlFlow; + +mod x { + pub mod y { pub struct Foo; } + pub struct Foo; + pub enum AnotherEnum { + Variant() + }; +} +struct Struct; +struct TupleStruct(); + +impl Struct { + fn new() -> Self { + Struct + } + fn try_new() -> ControlFlow<(), Self> { + ControlFlow::Continue(Struct) + } +} + +struct Generic(T); +impl Generic { + fn new() -> Self { + Generic(0) + } +} + +enum Enum { + Variant(u32) +} + +fn times2(value: i32) -> i32 { + 2 * value +} + +fn main() { + let enumb = Enum::Variant(0); + + let strukt = x::Foo; + let strukt = x::y::Foo; + let strukt = Struct; + let strukt = Struct::new(); + + let tuple_struct = TupleStruct(); + + let generic0 = Generic::new(); + // ^^^^^^^^ Generic + let generic1 = Generic(0); + // ^^^^^^^^ Generic + let generic2 = Generic::::new(); + let generic3 = >::new(); + let generic4 = Generic::(0); + + + let option = Some(0); + // ^^^^^^ Option + let func = times2; + // ^^^^ fn times2(i32) -> i32 + let closure = |x: i32| x * 2; + // ^^^^^^^ |i32| -> i32 +} + +fn fallible() -> ControlFlow<()> { + let strukt = Struct::try_new()?; +} +"#, + ); + } + + #[test] + fn shows_constructor_type_hints_when_enabled() { + check_types( + r#" +//- minicore: try +use core::ops::ControlFlow; + +struct Struct; +struct TupleStruct(); + +impl Struct { + fn new() -> Self { + Struct + } + fn try_new() -> ControlFlow<(), Self> { + ControlFlow::Continue(Struct) + } +} + +struct Generic(T); +impl Generic { + fn new() -> Self { + Generic(0) + } +} + +fn main() { + let strukt = Struct::new(); + // ^^^^^^ Struct + let tuple_struct = TupleStruct(); + // ^^^^^^^^^^^^ TupleStruct + let generic0 = Generic::new(); + // ^^^^^^^^ Generic + let generic1 = Generic::::new(); + // ^^^^^^^^ Generic + let generic2 = >::new(); + // ^^^^^^^^ Generic +} + +fn fallible() -> ControlFlow<()> { + let strukt = Struct::try_new()?; + // ^^^^^^ Struct +} +"#, + ); + } + + #[test] + fn closures() { + check( + r#" +fn main() { + let mut start = 0; + //^^^^^ i32 + (0..2).for_each(|increment | { start += increment; }); + //^^^^^^^^^ i32 + + let multiply = + //^^^^^^^^ |i32, i32| -> i32 + | a, b| a * b + //^ i32 ^ i32 + + ; + + let _: i32 = multiply(1, 2); + //^ a ^ b + let multiply_ref = &multiply; + //^^^^^^^^^^^^ &|i32, i32| -> i32 + + let return_42 = || 42; + //^^^^^^^^^ || -> i32 + || { 42 }; + //^^ i32 +}"#, + ); + } + + #[test] + fn return_type_hints_for_closure_without_block() { + check_with_config( + InlayHintsConfig { + closure_return_type_hints: ClosureReturnTypeHints::Always, + ..DISABLED_CONFIG + }, + r#" +fn main() { + let a = || { 0 }; + //^^ i32 + let b = || 0; + //^^ i32 +}"#, + ); + } + + #[test] + fn skip_closure_type_hints() { + check_with_config( + InlayHintsConfig { + type_hints: true, + hide_closure_initialization_hints: true, + ..DISABLED_CONFIG + }, + r#" +//- minicore: fn +fn main() { + let multiple_2 = |x: i32| { x * 2 }; + + let multiple_2 = |x: i32| x * 2; + // ^^^^^^^^^^ |i32| -> i32 + + let (not) = (|x: bool| { !x }); + // ^^^ |bool| -> bool + + let (is_zero, _b) = (|x: usize| { x == 0 }, false); + // ^^^^^^^ |usize| -> bool + // ^^ bool + + let plus_one = |x| { x + 1 }; + // ^ u8 + foo(plus_one); + + let add_mul = bar(|x: u8| { x + 1 }); + // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized + + let closure = if let Some(6) = add_mul(2).checked_sub(1) { + // ^^^^^^^ fn(i32) -> i32 + |x: i32| { x * 2 } + } else { + |x: i32| { x * 3 } + }; +} + +fn foo(f: impl FnOnce(u8) -> u8) {} + +fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 { + move |x: u8| f(x) * 2 +} +"#, + ); + } + + #[test] + fn hint_truncation() { + check_with_config( + InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG }, + r#" +struct Smol(T); + +struct VeryLongOuterName(T); + +fn main() { + let a = Smol(0u32); + //^ Smol + let b = VeryLongOuterName(0usize); + //^ VeryLongOuterName<…> + let c = Smol(Smol(0u32)) + //^ Smol> +}"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs new file mode 100644 index 0000000000000..a0166d0048ab0 --- /dev/null +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -0,0 +1,142 @@ +//! Implementation of "binding mode" inlay hints: +//! ```no_run +//! let /* & */ (/* ref */ x,) = &(0,); +//! ``` +use hir::{Mutability, Semantics}; +use ide_db::RootDatabase; + +use syntax::ast::{self, AstNode}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + pat: &ast::Pat, +) -> Option<()> { + if !config.binding_mode_hints { + return None; + } + + let outer_paren_pat = pat + .syntax() + .ancestors() + .skip(1) + .map_while(ast::Pat::cast) + .map_while(|pat| match pat { + ast::Pat::ParenPat(pat) => Some(pat), + _ => None, + }) + .last(); + let range = + outer_paren_pat.as_ref().map_or_else(|| pat.syntax(), |it| it.syntax()).text_range(); + let pattern_adjustments = sema.pattern_adjustments(pat); + pattern_adjustments.iter().for_each(|ty| { + let reference = ty.is_reference(); + let mut_reference = ty.is_mutable_reference(); + let r = match (reference, mut_reference) { + (true, true) => "&mut", + (true, false) => "&", + _ => return, + }; + acc.push(InlayHint { + range, + kind: InlayKind::BindingModeHint, + label: r.to_string().into(), + tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), + }); + }); + match pat { + ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { + let bm = sema.binding_mode_of_pat(pat)?; + let bm = match bm { + hir::BindingMode::Move => return None, + hir::BindingMode::Ref(Mutability::Mut) => "ref mut", + hir::BindingMode::Ref(Mutability::Shared) => "ref", + }; + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::BindingModeHint, + label: bm.to_string().into(), + tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), + }); + } + ast::Pat::OrPat(pat) if !pattern_adjustments.is_empty() && outer_paren_pat.is_none() => { + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::OpeningParenthesis, + label: "(".into(), + tooltip: None, + }); + acc.push(InlayHint { + range: pat.syntax().text_range(), + kind: InlayKind::ClosingParenthesis, + label: ")".into(), + tooltip: None, + }); + } + _ => (), + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[test] + fn hints_binding_modes() { + check_with_config( + InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG }, + r#" +fn __( + (x,): (u32,), + (x,): &(u32,), + //^^^^& + //^ ref + (x,): &mut (u32,) + //^^^^&mut + //^ ref mut +) { + let (x,) = (0,); + let (x,) = &(0,); + //^^^^ & + //^ ref + let (x,) = &mut (0,); + //^^^^ &mut + //^ ref mut + let &mut (x,) = &mut (0,); + let (ref mut x,) = &mut (0,); + //^^^^^^^^^^^^ &mut + let &mut (ref mut x,) = &mut (0,); + let (mut x,) = &mut (0,); + //^^^^^^^^ &mut + match (0,) { + (x,) => () + } + match &(0,) { + (x,) | (x,) => (), + //^^^^^^^^^^^& + //^ ref + //^ ref + //^^^^^^^^^^^( + //^^^^^^^^^^^) + ((x,) | (x,)) => (), + //^^^^^^^^^^^^^& + //^ ref + //^ ref + } + match &mut (0,) { + (x,) => () + //^^^^ &mut + //^ ref mut + } +}"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs new file mode 100644 index 0000000000000..8810d5d34dbd9 --- /dev/null +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -0,0 +1,665 @@ +//! Implementation of "chaining" inlay hints. +use ide_db::famous_defs::FamousDefs; +use syntax::{ + ast::{self, AstNode}, + Direction, NodeOrToken, SyntaxKind, T, +}; + +use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +use super::label_of_ty; + +pub(super) fn hints( + acc: &mut Vec, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: FileId, + expr: &ast::Expr, +) -> Option<()> { + if !config.chaining_hints { + return None; + } + + if matches!(expr, ast::Expr::RecordExpr(_)) { + return None; + } + + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let desc_expr = descended.as_ref().unwrap_or(expr); + + let mut tokens = expr + .syntax() + .siblings_with_tokens(Direction::Next) + .filter_map(NodeOrToken::into_token) + .filter(|t| match t.kind() { + SyntaxKind::WHITESPACE if !t.text().contains('\n') => false, + SyntaxKind::COMMENT => false, + _ => true, + }); + + // Chaining can be defined as an expression whose next sibling tokens are newline and dot + // Ignoring extra whitespace and comments + let next = tokens.next()?.kind(); + if next == SyntaxKind::WHITESPACE { + let mut next_next = tokens.next()?.kind(); + while next_next == SyntaxKind::WHITESPACE { + next_next = tokens.next()?.kind(); + } + if next_next == T![.] { + let ty = sema.type_of_expr(desc_expr)?.original; + if ty.is_unknown() { + return None; + } + if matches!(expr, ast::Expr::PathExpr(_)) { + if let Some(hir::Adt::Struct(st)) = ty.as_adt() { + if st.fields(sema.db).is_empty() { + return None; + } + } + } + acc.push(InlayHint { + range: expr.syntax().text_range(), + kind: InlayKind::ChainingHint, + label: label_of_ty(famous_defs, config, ty)?, + tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())), + }); + } + } + Some(()) +} + +#[cfg(test)] +mod tests { + use expect_test::expect; + + use crate::{ + inlay_hints::tests::{ + check_expect, check_with_config, DISABLED_CONFIG, DISABLED_CONFIG_WITH_LINKS, + TEST_CONFIG, + }, + InlayHintsConfig, + }; + + #[track_caller] + fn check_chains(ra_fixture: &str) { + check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture); + } + + #[test] + fn chaining_hints_ignore_comments() { + check_expect( + InlayHintsConfig { + type_hints: false, + chaining_hints: true, + ..DISABLED_CONFIG_WITH_LINKS + }, + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)) + .into_b() // This is a comment + // This is another comment + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 147..172, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "B", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 63..64, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 147..172, + ), + ), + }, + InlayHint { + range: 147..154, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "A", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..8, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 147..154, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn chaining_hints_without_newlines() { + check_chains( + r#" +struct A(B); +impl A { fn into_b(self) -> B { self.0 } } +struct B(C); +impl B { fn into_c(self) -> C { self.0 } } +struct C; + +fn main() { + let c = A(B(C)).into_b().into_c(); +}"#, + ); + } + + #[test] + fn disabled_location_links() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, + r#" + struct A { pub b: B } + struct B { pub c: C } + struct C(pub bool); + struct D; + + impl D { + fn foo(&self) -> i32 { 42 } + } + + fn main() { + let x = A { b: B { c: C(true) } } + .b + .c + .0; + let x = D + .foo(); + }"#, + expect![[r#" + [ + InlayHint { + range: 143..190, + kind: ChainingHint, + label: [ + "C", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..190, + ), + ), + }, + InlayHint { + range: 143..179, + kind: ChainingHint, + label: [ + "B", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..179, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn struct_access_chaining_hints() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +struct A { pub b: B } +struct B { pub c: C } +struct C(pub bool); +struct D; + +impl D { + fn foo(&self) -> i32 { 42 } +} + +fn main() { + let x = A { b: B { c: C(true) } } + .b + .c + .0; + let x = D + .foo(); +}"#, + expect![[r#" + [ + InlayHint { + range: 143..190, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "C", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 51..52, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..190, + ), + ), + }, + InlayHint { + range: 143..179, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "B", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 29..30, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 143..179, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn generic_chaining_hints() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +struct A(T); +struct B(T); +struct C(T); +struct X(T, R); + +impl A { + fn new(t: T) -> Self { A(t) } + fn into_b(self) -> B { B(self.0) } +} +impl B { + fn into_c(self) -> C { C(self.0) } +} +fn main() { + let c = A::new(X(42, true)) + .into_b() + .into_c(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 246..283, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "B", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 23..24, + }, + ), + }, + "<", + InlayHintLabelPart { + text: "X", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 55..56, + }, + ), + }, + ">", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 246..283, + ), + ), + }, + InlayHint { + range: 246..265, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "A", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..8, + }, + ), + }, + "<", + InlayHintLabelPart { + text: "X", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 55..56, + }, + ), + }, + ">", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 246..265, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn shorten_iterator_chaining_hints() { + check_expect( + InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS }, + r#" +//- minicore: iterators +use core::iter; + +struct MyIter; + +impl Iterator for MyIter { + type Item = (); + fn next(&mut self) -> Option { + None + } +} + +fn main() { + let _x = MyIter.by_ref() + .take(5) + .by_ref() + .take(5) + .by_ref(); +} +"#, + expect![[r#" + [ + InlayHint { + range: 174..241, + kind: ChainingHint, + label: [ + "impl Iterator", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..241, + ), + ), + }, + InlayHint { + range: 174..224, + kind: ChainingHint, + label: [ + "impl Iterator", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..224, + ), + ), + }, + InlayHint { + range: 174..206, + kind: ChainingHint, + label: [ + "impl Iterator", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..206, + ), + ), + }, + InlayHint { + range: 174..189, + kind: ChainingHint, + label: [ + "&mut ", + InlayHintLabelPart { + text: "MyIter", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 24..30, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 174..189, + ), + ), + }, + ] + "#]], + ); + } + + #[test] + fn hints_in_attr_call() { + check_expect( + TEST_CONFIG, + r#" +//- proc_macros: identity, input_replace +struct Struct; +impl Struct { + fn chain(self) -> Self { + self + } +} +#[proc_macros::identity] +fn main() { + let strukt = Struct; + strukt + .chain() + .chain() + .chain(); + Struct::chain(strukt); +} +"#, + expect![[r#" + [ + InlayHint { + range: 124..130, + kind: TypeHint, + label: [ + "", + InlayHintLabelPart { + text: "Struct", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..13, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 124..130, + ), + ), + }, + InlayHint { + range: 145..185, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "Struct", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..13, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 145..185, + ), + ), + }, + InlayHint { + range: 145..168, + kind: ChainingHint, + label: [ + "", + InlayHintLabelPart { + text: "Struct", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 7..13, + }, + ), + }, + "", + ], + tooltip: Some( + HoverRanged( + FileId( + 0, + ), + 145..168, + ), + ), + }, + InlayHint { + range: 222..228, + kind: ParameterHint, + label: [ + "self", + ], + tooltip: Some( + HoverOffset( + FileId( + 0, + ), + 42, + ), + ), + }, + ] + "#]], + ); + } +} diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs new file mode 100644 index 0000000000000..e340c64c54b55 --- /dev/null +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -0,0 +1,196 @@ +//! Implementation of "closing brace" inlay hints: +//! ```no_run +//! fn g() { +//! } /* fn g */ +//! ``` +use hir::{HirDisplay, Semantics}; +use ide_db::{base_db::FileRange, RootDatabase}; +use syntax::{ + ast::{self, AstNode, HasName}, + match_ast, SyntaxKind, SyntaxNode, T, +}; + +use crate::{ + inlay_hints::InlayHintLabelPart, FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, +}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + file_id: FileId, + node: SyntaxNode, +) -> Option<()> { + let min_lines = config.closing_brace_hints_min_lines?; + + let name = |it: ast::Name| it.syntax().text_range(); + + let mut closing_token; + let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) { + closing_token = item_list.r_curly_token()?; + + let parent = item_list.syntax().parent()?; + match_ast! { + match parent { + ast::Impl(imp) => { + let imp = sema.to_def(&imp)?; + let ty = imp.self_ty(sema.db); + let trait_ = imp.trait_(sema.db); + let hint_text = match trait_ { + Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)), + None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)), + }; + (hint_text, None) + }, + ast::Trait(tr) => { + (format!("trait {}", tr.name()?), tr.name().map(name)) + }, + _ => return None, + } + } + } else if let Some(list) = ast::ItemList::cast(node.clone()) { + closing_token = list.r_curly_token()?; + + let module = ast::Module::cast(list.syntax().parent()?)?; + (format!("mod {}", module.name()?), module.name().map(name)) + } else if let Some(block) = ast::BlockExpr::cast(node.clone()) { + closing_token = block.stmt_list()?.r_curly_token()?; + + let parent = block.syntax().parent()?; + match_ast! { + match parent { + ast::Fn(it) => { + // FIXME: this could include parameters, but `HirDisplay` prints too much info + // and doesn't respect the max length either, so the hints end up way too long + (format!("fn {}", it.name()?), it.name().map(name)) + }, + ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)), + ast::Const(it) => { + if it.underscore_token().is_some() { + ("const _".into(), None) + } else { + (format!("const {}", it.name()?), it.name().map(name)) + } + }, + _ => return None, + } + } + } else if let Some(mac) = ast::MacroCall::cast(node.clone()) { + let last_token = mac.syntax().last_token()?; + if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY { + return None; + } + closing_token = last_token; + + ( + format!("{}!", mac.path()?), + mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range()), + ) + } else { + return None; + }; + + if let Some(mut next) = closing_token.next_token() { + if next.kind() == T![;] { + if let Some(tok) = next.next_token() { + closing_token = next; + next = tok; + } + } + if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) { + // Only display the hint if the `}` is the last token on the line + return None; + } + } + + let mut lines = 1; + node.text().for_each_chunk(|s| lines += s.matches('\n').count()); + if lines < min_lines { + return None; + } + + let linked_location = config + .location_links + .then(|| name_range.map(|range| FileRange { file_id, range })) + .flatten(); + acc.push(InlayHint { + range: closing_token.text_range(), + kind: InlayKind::ClosingBraceHint, + label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] }, + tooltip: None, // provided by label part location + }); + + None +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[test] + fn hints_closing_brace() { + check_with_config( + InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG }, + r#" +fn a() {} + +fn f() { +} // no hint unless `}` is the last token on the line + +fn g() { + } +//^ fn g + +fn h(with: T, arguments: u8, ...) { + } +//^ fn h + +trait Tr { + fn f(); + fn g() { + } + //^ fn g + } +//^ trait Tr +impl Tr for () { + } +//^ impl Tr for () +impl dyn Tr { + } +//^ impl dyn Tr + +static S0: () = 0; +static S1: () = {}; +static S2: () = { + }; +//^ static S2 +const _: () = { + }; +//^ const _ + +mod m { + } +//^ mod m + +m! {} +m!(); +m!( + ); +//^ m! + +m! { + } +//^ m! + +fn f() { + let v = vec![ + ]; + } +//^ fn f +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs new file mode 100644 index 0000000000000..d9929beaac0c2 --- /dev/null +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -0,0 +1,49 @@ +//! Implementation of "closure return type" inlay hints. +use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use syntax::ast::{self, AstNode}; + +use crate::{ + inlay_hints::closure_has_block_body, ClosureReturnTypeHints, InlayHint, InlayHintsConfig, + InlayKind, InlayTooltip, +}; + +use super::label_of_ty; + +pub(super) fn hints( + acc: &mut Vec, + famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + file_id: FileId, + closure: ast::ClosureExpr, +) -> Option<()> { + if config.closure_return_type_hints == ClosureReturnTypeHints::Never { + return None; + } + + if closure.ret_type().is_some() { + return None; + } + + if !closure_has_block_body(&closure) + && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock + { + return None; + } + + let param_list = closure.param_list()?; + + let closure = sema.descend_node_into_attributes(closure).pop()?; + let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted(); + let callable = ty.as_callable(sema.db)?; + let ty = callable.return_type(); + if ty.is_unit() { + return None; + } + acc.push(InlayHint { + range: param_list.syntax().text_range(), + kind: InlayKind::ClosureReturnTypeHint, + label: label_of_ty(famous_defs, config, ty)?, + tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())), + }); + Some(()) +} diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs new file mode 100644 index 0000000000000..f32c4bdf2883c --- /dev/null +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -0,0 +1,142 @@ +//! Implementation of "enum variant discriminant" inlay hints: +//! ```no_run +//! enum Foo { +//! Bar/* = 0*/, +//! } +//! ``` +use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use syntax::ast::{self, AstNode, HasName}; + +use crate::{DiscriminantHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + FamousDefs(sema, _): &FamousDefs<'_, '_>, + config: &InlayHintsConfig, + _: FileId, + variant: &ast::Variant, +) -> Option<()> { + let field_list = match config.discriminant_hints { + DiscriminantHints::Always => variant.field_list(), + DiscriminantHints::Fieldless => match variant.field_list() { + Some(_) => return None, + None => None, + }, + DiscriminantHints::Never => return None, + }; + + if variant.eq_token().is_some() { + return None; + } + + let name = variant.name()?; + + let descended = sema.descend_node_into_attributes(variant.clone()).pop(); + let desc_pat = descended.as_ref().unwrap_or(variant); + let v = sema.to_def(desc_pat)?; + let d = v.eval(sema.db); + + acc.push(InlayHint { + range: match field_list { + Some(field_list) => name.syntax().text_range().cover(field_list.syntax().text_range()), + None => name.syntax().text_range(), + }, + kind: InlayKind::DiscriminantHint, + label: match &d { + Ok(v) => format!("{}", v).into(), + Err(_) => "?".into(), + }, + tooltip: Some(InlayTooltip::String(match &d { + Ok(_) => "enum variant discriminant".into(), + Err(e) => format!("{e:?}").into(), + })), + }); + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::inlay_hints::{ + tests::{check_with_config, DISABLED_CONFIG}, + DiscriminantHints, InlayHintsConfig, + }; + + #[track_caller] + fn check_discriminants(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { discriminant_hints: DiscriminantHints::Always, ..DISABLED_CONFIG }, + ra_fixture, + ); + } + + #[track_caller] + fn check_discriminants_fieldless(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { + discriminant_hints: DiscriminantHints::Fieldless, + ..DISABLED_CONFIG + }, + ra_fixture, + ); + } + + #[test] + fn fieldless() { + check_discriminants( + r#" +enum Enum { + Variant, + //^^^^^^^0 + Variant1, + //^^^^^^^^1 + Variant2, + //^^^^^^^^2 + Variant5 = 5, + Variant6, + //^^^^^^^^6 +} +"#, + ); + } + + #[test] + fn datacarrying_mixed() { + check_discriminants( + r#" +enum Enum { + Variant(), + //^^^^^^^^^0 + Variant1, + //^^^^^^^^1 + Variant2 {}, + //^^^^^^^^^^^2 + Variant3, + //^^^^^^^^3 + Variant5 = 5, + Variant6, + //^^^^^^^^6 +} +"#, + ); + } + + #[test] + fn datacarrying_mixed_fieldless_set() { + check_discriminants_fieldless( + r#" +enum Enum { + Variant(), + Variant1, + //^^^^^^^^1 + Variant2 {}, + Variant3, + //^^^^^^^^3 + Variant5 = 5, + Variant6, + //^^^^^^^^6 +} +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs new file mode 100644 index 0000000000000..2aa5e3dc734fc --- /dev/null +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -0,0 +1,325 @@ +//! Implementation of "lifetime elision" inlay hints: +//! ```no_run +//! fn example/* <'0> */(a: &/* '0 */()) {} +//! ``` +use ide_db::{syntax_helpers::node_ext::walk_ty, FxHashMap}; +use itertools::Itertools; +use syntax::SmolStr; +use syntax::{ + ast::{self, AstNode, HasGenericParams, HasName}, + SyntaxToken, +}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; + +pub(super) fn hints( + acc: &mut Vec, + config: &InlayHintsConfig, + func: ast::Fn, +) -> Option<()> { + if config.lifetime_elision_hints == LifetimeElisionHints::Never { + return None; + } + + let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { + range: t.text_range(), + kind: InlayKind::LifetimeHint, + label: label.into(), + tooltip: Some(InlayTooltip::String("Elided lifetime".into())), + }; + + let param_list = func.param_list()?; + let generic_param_list = func.generic_param_list(); + let ret_type = func.ret_type(); + let self_param = param_list.self_param().filter(|it| it.amp_token().is_some()); + + let is_elided = |lt: &Option| match lt { + Some(lt) => matches!(lt.text().as_str(), "'_"), + None => true, + }; + + let potential_lt_refs = { + let mut acc: Vec<_> = vec![]; + if let Some(self_param) = &self_param { + let lifetime = self_param.lifetime(); + let is_elided = is_elided(&lifetime); + acc.push((None, self_param.amp_token(), lifetime, is_elided)); + } + param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| { + // FIXME: check path types + walk_ty(&ty, &mut |ty| match ty { + ast::Type::RefType(r) => { + let lifetime = r.lifetime(); + let is_elided = is_elided(&lifetime); + acc.push(( + pat.as_ref().and_then(|it| match it { + ast::Pat::IdentPat(p) => p.name(), + _ => None, + }), + r.amp_token(), + lifetime, + is_elided, + )); + false + } + ast::Type::FnPtrType(_) => true, + ast::Type::PathType(t) => { + t.path().and_then(|it| it.segment()).and_then(|it| it.param_list()).is_some() + } + _ => false, + }) + }); + acc + }; + + // allocate names + let mut gen_idx_name = { + let mut gen = (0u8..).map(|idx| match idx { + idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]), + idx => format!("'{idx}").into(), + }); + move || gen.next().unwrap_or_default() + }; + let mut allocated_lifetimes = vec![]; + + let mut used_names: FxHashMap = + match config.param_names_for_lifetime_elision_hints { + true => generic_param_list + .iter() + .flat_map(|gpl| gpl.lifetime_params()) + .filter_map(|param| param.lifetime()) + .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0))) + .collect(), + false => Default::default(), + }; + { + let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided); + if let Some(_) = &self_param { + if let Some(_) = potential_lt_refs.next() { + allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints { + // self can't be used as a lifetime, so no need to check for collisions + "'self".into() + } else { + gen_idx_name() + }); + } + } + potential_lt_refs.for_each(|(name, ..)| { + let name = match name { + Some(it) if config.param_names_for_lifetime_elision_hints => { + if let Some(c) = used_names.get_mut(it.text().as_str()) { + *c += 1; + SmolStr::from(format!("'{text}{c}", text = it.text().as_str())) + } else { + used_names.insert(it.text().as_str().into(), 0); + SmolStr::from_iter(["\'", it.text().as_str()]) + } + } + _ => gen_idx_name(), + }; + allocated_lifetimes.push(name); + }); + } + + // fetch output lifetime if elision rule applies + let output = match potential_lt_refs.as_slice() { + [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => { + match lifetime { + Some(lt) => match lt.text().as_str() { + "'_" => allocated_lifetimes.get(0).cloned(), + "'static" => None, + name => Some(name.into()), + }, + None => allocated_lifetimes.get(0).cloned(), + } + } + [..] => None, + }; + + if allocated_lifetimes.is_empty() && output.is_none() { + return None; + } + + // apply hints + // apply output if required + let mut is_trivial = true; + if let (Some(output_lt), Some(r)) = (&output, ret_type) { + if let Some(ty) = r.ty() { + walk_ty(&ty, &mut |ty| match ty { + ast::Type::RefType(ty) if ty.lifetime().is_none() => { + if let Some(amp) = ty.amp_token() { + is_trivial = false; + acc.push(mk_lt_hint(amp, output_lt.to_string())); + } + false + } + ast::Type::FnPtrType(_) => true, + ast::Type::PathType(t) => { + t.path().and_then(|it| it.segment()).and_then(|it| it.param_list()).is_some() + } + _ => false, + }) + } + } + + if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial { + return None; + } + + let mut a = allocated_lifetimes.iter(); + for (_, amp_token, _, is_elided) in potential_lt_refs { + if is_elided { + let t = amp_token?; + let lt = a.next()?; + acc.push(mk_lt_hint(t, lt.to_string())); + } + } + + // generate generic param list things + match (generic_param_list, allocated_lifetimes.as_slice()) { + (_, []) => (), + (Some(gpl), allocated_lifetimes) => { + let angle_tok = gpl.l_angle_token()?; + let is_empty = gpl.generic_params().next().is_none(); + acc.push(InlayHint { + range: angle_tok.text_range(), + kind: InlayKind::LifetimeHint, + label: format!( + "{}{}", + allocated_lifetimes.iter().format(", "), + if is_empty { "" } else { ", " } + ) + .into(), + tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), + }); + } + (None, allocated_lifetimes) => acc.push(InlayHint { + range: func.name()?.syntax().text_range(), + kind: InlayKind::GenericParamListHint, + label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), + tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), + }), + } + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check, check_with_config, TEST_CONFIG}, + InlayHintsConfig, LifetimeElisionHints, + }; + + #[test] + fn hints_lifetimes() { + check( + r#" +fn empty() {} + +fn no_gpl(a: &()) {} + //^^^^^^<'0> + // ^'0 +fn empty_gpl<>(a: &()) {} + // ^'0 ^'0 +fn partial<'b>(a: &(), b: &'b ()) {} +// ^'0, $ ^'0 +fn partial<'a>(a: &'a (), b: &()) {} +// ^'0, $ ^'0 + +fn single_ret(a: &()) -> &() {} +// ^^^^^^^^^^<'0> + // ^'0 ^'0 +fn full_mul(a: &(), b: &()) {} +// ^^^^^^^^<'0, '1> + // ^'0 ^'1 + +fn foo<'c>(a: &'c ()) -> &() {} + // ^'c + +fn nested_in(a: & &X< &()>) {} +// ^^^^^^^^^<'0, '1, '2> + //^'0 ^'1 ^'2 +fn nested_out(a: &()) -> & &X< &()>{} +// ^^^^^^^^^^<'0> + //^'0 ^'0 ^'0 ^'0 + +impl () { + fn foo(&self) {} + // ^^^<'0> + // ^'0 + fn foo(&self) -> &() {} + // ^^^<'0> + // ^'0 ^'0 + fn foo(&self, a: &()) -> &() {} + // ^^^<'0, '1> + // ^'0 ^'1 ^'0 +} +"#, + ); + } + + #[test] + fn hints_lifetimes_named() { + check_with_config( + InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG }, + r#" +fn nested_in<'named>(named: & &X< &()>) {} +// ^'named1, 'named2, 'named3, $ + //^'named1 ^'named2 ^'named3 +"#, + ); + } + + #[test] + fn hints_lifetimes_trivial_skip() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::SkipTrivial, + ..TEST_CONFIG + }, + r#" +fn no_gpl(a: &()) {} +fn empty_gpl<>(a: &()) {} +fn partial<'b>(a: &(), b: &'b ()) {} +fn partial<'a>(a: &'a (), b: &()) {} + +fn single_ret(a: &()) -> &() {} +// ^^^^^^^^^^<'0> + // ^'0 ^'0 +fn full_mul(a: &(), b: &()) {} + +fn foo<'c>(a: &'c ()) -> &() {} + // ^'c + +fn nested_in(a: & &X< &()>) {} +fn nested_out(a: &()) -> & &X< &()>{} +// ^^^^^^^^^^<'0> + //^'0 ^'0 ^'0 ^'0 + +impl () { + fn foo(&self) {} + fn foo(&self) -> &() {} + // ^^^<'0> + // ^'0 ^'0 + fn foo(&self, a: &()) -> &() {} + // ^^^<'0, '1> + // ^'0 ^'1 ^'0 +} +"#, + ); + } + + #[test] + fn hints_lifetimes_skip_fn_likes() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::Always, + ..TEST_CONFIG + }, + r#" +fn fn_ptr(a: fn(&()) -> &()) {} +fn fn_trait<>(a: impl Fn(&()) -> &()) {} +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs new file mode 100644 index 0000000000000..588a0e3b6a4b6 --- /dev/null +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -0,0 +1,75 @@ +//! Implementation of "implicit static" inlay hints: +//! ```no_run +//! static S: &/* 'static */str = ""; +//! ``` +use either::Either; +use syntax::{ + ast::{self, AstNode}, + SyntaxKind, +}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; + +pub(super) fn hints( + acc: &mut Vec, + config: &InlayHintsConfig, + statik_or_const: Either, +) -> Option<()> { + if config.lifetime_elision_hints != LifetimeElisionHints::Always { + return None; + } + + if let Either::Right(it) = &statik_or_const { + if ast::AssocItemList::can_cast( + it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()), + ) { + return None; + } + } + + if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) { + if ty.lifetime().is_none() { + let t = ty.amp_token()?; + acc.push(InlayHint { + range: t.text_range(), + kind: InlayKind::LifetimeHint, + label: "'static".to_owned().into(), + tooltip: Some(InlayTooltip::String("Elided static lifetime".into())), + }); + } + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, TEST_CONFIG}, + InlayHintsConfig, LifetimeElisionHints, + }; + + #[test] + fn hints_lifetimes_static() { + check_with_config( + InlayHintsConfig { + lifetime_elision_hints: LifetimeElisionHints::Always, + ..TEST_CONFIG + }, + r#" +trait Trait {} +static S: &str = ""; +// ^'static +const C: &str = ""; +// ^'static +const C: &dyn Trait = panic!(); +// ^'static + +impl () { + const C: &str = ""; + const C: &dyn Trait = panic!(); +} +"#, + ); + } +} diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs new file mode 100644 index 0000000000000..ecee67632e35e --- /dev/null +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -0,0 +1,546 @@ +//! Implementation of "param name" inlay hints: +//! ```no_run +//! fn max(x: i32, y: i32) -> i32 { x + y } +//! _ = max(/*x*/4, /*y*/4); +//! ``` +use either::Either; +use hir::{Callable, Semantics}; +use ide_db::{base_db::FileRange, RootDatabase}; + +use stdx::to_lower_snake_case; +use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; + +use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; + +pub(super) fn hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + config: &InlayHintsConfig, + expr: ast::Expr, +) -> Option<()> { + if !config.parameter_hints { + return None; + } + + let (callable, arg_list) = get_callable(sema, &expr)?; + let hints = callable + .params(sema.db) + .into_iter() + .zip(arg_list.args()) + .filter_map(|((param, _ty), arg)| { + // Only annotate hints for expressions that exist in the original file + let range = sema.original_range_opt(arg.syntax())?; + let (param_name, name_syntax) = match param.as_ref()? { + Either::Left(pat) => ("self".to_string(), pat.name()), + Either::Right(pat) => match pat { + ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()), + _ => return None, + }, + }; + Some((name_syntax, param_name, arg, range)) + }) + .filter(|(_, param_name, arg, _)| { + !should_hide_param_name_hint(sema, &callable, param_name, arg) + }) + .map(|(param, param_name, _, FileRange { range, .. })| { + let mut tooltip = None; + if let Some(name) = param { + if let hir::CallableKind::Function(f) = callable.kind() { + // assert the file is cached so we can map out of macros + if let Some(_) = sema.source(f) { + tooltip = sema.original_range_opt(name.syntax()); + } + } + } + + InlayHint { + range, + kind: InlayKind::ParameterHint, + label: param_name.into(), + tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())), + } + }); + + acc.extend(hints); + Some(()) +} + +fn get_callable( + sema: &Semantics<'_, RootDatabase>, + expr: &ast::Expr, +) -> Option<(hir::Callable, ast::ArgList)> { + match expr { + ast::Expr::CallExpr(expr) => { + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let expr = descended.as_ref().unwrap_or(expr); + sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list()) + } + ast::Expr::MethodCallExpr(expr) => { + let descended = sema.descend_node_into_attributes(expr.clone()).pop(); + let expr = descended.as_ref().unwrap_or(expr); + sema.resolve_method_call_as_callable(expr).zip(expr.arg_list()) + } + _ => None, + } +} + +fn should_hide_param_name_hint( + sema: &Semantics<'_, RootDatabase>, + callable: &hir::Callable, + param_name: &str, + argument: &ast::Expr, +) -> bool { + // These are to be tested in the `parameter_hint_heuristics` test + // hide when: + // - the parameter name is a suffix of the function's name + // - the argument is a qualified constructing or call expression where the qualifier is an ADT + // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix + // of argument with _ splitting it off + // - param starts with `ra_fixture` + // - param is a well known name in a unary function + + let param_name = param_name.trim_start_matches('_'); + if param_name.is_empty() { + return true; + } + + if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) { + return false; + } + + let fn_name = match callable.kind() { + hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()), + _ => None, + }; + let fn_name = fn_name.as_deref(); + is_param_name_suffix_of_fn_name(param_name, callable, fn_name) + || is_argument_similar_to_param_name(argument, param_name) + || param_name.starts_with("ra_fixture") + || (callable.n_params() == 1 && is_obvious_param(param_name)) + || is_adt_constructor_similar_to_param_name(sema, argument, param_name) +} + +/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal. +/// +/// `fn strip_suffix(suffix)` will be hidden. +/// `fn stripsuffix(suffix)` will not be hidden. +fn is_param_name_suffix_of_fn_name( + param_name: &str, + callable: &Callable, + fn_name: Option<&str>, +) -> bool { + match (callable.n_params(), fn_name) { + (1, Some(function)) => { + function == param_name + || function + .len() + .checked_sub(param_name.len()) + .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at))) + .map_or(false, |(prefix, suffix)| { + suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_') + }) + } + _ => false, + } +} + +fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool { + // check whether param_name and argument are the same or + // whether param_name is a prefix/suffix of argument(split at `_`) + let argument = match get_string_representation(argument) { + Some(argument) => argument, + None => return false, + }; + + // std is honestly too panic happy... + let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at)); + + let param_name = param_name.trim_start_matches('_'); + let argument = argument.trim_start_matches('_'); + + match str_split_at(argument, param_name.len()) { + Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => { + return rest.is_empty() || rest.starts_with('_'); + } + _ => (), + } + match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) { + Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => { + return rest.is_empty() || rest.ends_with('_'); + } + _ => (), + } + false +} + +fn get_string_representation(expr: &ast::Expr) -> Option { + match expr { + ast::Expr::MethodCallExpr(method_call_expr) => { + let name_ref = method_call_expr.name_ref()?; + match name_ref.text().as_str() { + "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()), + name_ref => Some(name_ref.to_owned()), + } + } + ast::Expr::MacroExpr(macro_expr) => { + Some(macro_expr.macro_call()?.path()?.segment()?.to_string()) + } + ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()), + ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()), + ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?), + ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?), + ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?), + _ => None, + } +} + +fn is_obvious_param(param_name: &str) -> bool { + // avoid displaying hints for common functions like map, filter, etc. + // or other obvious words used in std + let is_obvious_param_name = + matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other"); + param_name.len() == 1 || is_obvious_param_name +} + +fn is_adt_constructor_similar_to_param_name( + sema: &Semantics<'_, RootDatabase>, + argument: &ast::Expr, + param_name: &str, +) -> bool { + let path = match argument { + ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e { + ast::Expr::PathExpr(p) => p.path(), + _ => None, + }), + ast::Expr::PathExpr(p) => p.path(), + ast::Expr::RecordExpr(r) => r.path(), + _ => return false, + }; + let path = match path { + Some(it) => it, + None => return false, + }; + (|| match sema.resolve_path(&path)? { + hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { + Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name) + } + hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => { + if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name { + return Some(true); + } + let qual = path.qualifier()?; + match sema.resolve_path(&qual)? { + hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => { + Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name) + } + _ => None, + } + } + _ => None, + })() + .unwrap_or(false) +} + +#[cfg(test)] +mod tests { + use crate::{ + inlay_hints::tests::{check_with_config, DISABLED_CONFIG}, + InlayHintsConfig, + }; + + #[track_caller] + fn check_params(ra_fixture: &str) { + check_with_config( + InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG }, + ra_fixture, + ); + } + + #[test] + fn param_hints_only() { + check_params( + r#" +fn foo(a: i32, b: i32) -> i32 { a + b } +fn main() { + let _x = foo( + 4, + //^ a + 4, + //^ b + ); +}"#, + ); + } + + #[test] + fn param_hints_on_closure() { + check_params( + r#" +fn main() { + let clo = |a: u8, b: u8| a + b; + clo( + 1, + //^ a + 2, + //^ b + ); +} + "#, + ); + } + + #[test] + fn param_name_similar_to_fn_name_still_hints() { + check_params( + r#" +fn max(x: i32, y: i32) -> i32 { x + y } +fn main() { + let _x = max( + 4, + //^ x + 4, + //^ y + ); +}"#, + ); + } + + #[test] + fn param_name_similar_to_fn_name() { + check_params( + r#" +fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore } +fn main() { + let _x = param_with_underscore( + 4, + ); +}"#, + ); + check_params( + r#" +fn param_with_underscore(underscore: i32) -> i32 { underscore } +fn main() { + let _x = param_with_underscore( + 4, + ); +}"#, + ); + } + + #[test] + fn param_name_same_as_fn_name() { + check_params( + r#" +fn foo(foo: i32) -> i32 { foo } +fn main() { + let _x = foo( + 4, + ); +}"#, + ); + } + + #[test] + fn never_hide_param_when_multiple_params() { + check_params( + r#" +fn foo(foo: i32, bar: i32) -> i32 { bar + baz } +fn main() { + let _x = foo( + 4, + //^ foo + 8, + //^ bar + ); +}"#, + ); + } + + #[test] + fn param_hints_look_through_as_ref_and_clone() { + check_params( + r#" +fn foo(bar: i32, baz: f32) {} + +fn main() { + let bar = 3; + let baz = &"baz"; + let fez = 1.0; + foo(bar.clone(), bar.clone()); + //^^^^^^^^^^^ baz + foo(bar.as_ref(), bar.as_ref()); + //^^^^^^^^^^^^ baz +} +"#, + ); + } + + #[test] + fn self_param_hints() { + check_params( + r#" +struct Foo; + +impl Foo { + fn foo(self: Self) {} + fn bar(self: &Self) {} +} + +fn main() { + Foo::foo(Foo); + //^^^ self + Foo::bar(&Foo); + //^^^^ self +} +"#, + ) + } + + #[test] + fn param_name_hints_show_for_literals() { + check_params( + r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] } +fn main() { + test( + 0xa_b, + //^^^^^ a + 0xa_b, + //^^^^^ b + ); +}"#, + ) + } + + #[test] + fn function_call_parameter_hint() { + check_params( + r#" +//- minicore: option +struct FileId {} +struct SmolStr {} + +struct TextRange {} +struct SyntaxKind {} +struct NavigationTarget {} + +struct Test {} + +impl Test { + fn method(&self, mut param: i32) -> i32 { param * 2 } + + fn from_syntax( + file_id: FileId, + name: SmolStr, + focus_range: Option, + full_range: TextRange, + kind: SyntaxKind, + docs: Option, + ) -> NavigationTarget { + NavigationTarget {} + } +} + +fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 { + foo + bar +} + +fn main() { + let not_literal = 1; + let _: i32 = test_func(1, 2, "hello", 3, not_literal); + //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last + let t: Test = Test {}; + t.method(123); + //^^^ param + Test::method(&t, 3456); + //^^ self ^^^^ param + Test::from_syntax( + FileId {}, + "impl".into(), + //^^^^^^^^^^^^^ name + None, + //^^^^ focus_range + TextRange {}, + //^^^^^^^^^^^^ full_range + SyntaxKind {}, + //^^^^^^^^^^^^^ kind + None, + //^^^^ docs + ); +}"#, + ); + } + + #[test] + fn parameter_hint_heuristics() { + check_params( + r#" +fn check(ra_fixture_thing: &str) {} + +fn map(f: i32) {} +fn filter(predicate: i32) {} + +fn strip_suffix(suffix: &str) {} +fn stripsuffix(suffix: &str) {} +fn same(same: u32) {} +fn same2(_same2: u32) {} + +fn enum_matches_param_name(completion_kind: CompletionKind) {} + +fn foo(param: u32) {} +fn bar(param_eter: u32) {} + +enum CompletionKind { + Keyword, +} + +fn non_ident_pat((a, b): (u32, u32)) {} + +fn main() { + const PARAM: u32 = 0; + foo(PARAM); + foo(!PARAM); + // ^^^^^^ param + check(""); + + map(0); + filter(0); + + strip_suffix(""); + stripsuffix(""); + //^^ suffix + same(0); + same2(0); + + enum_matches_param_name(CompletionKind::Keyword); + + let param = 0; + foo(param); + foo(param as _); + let param_end = 0; + foo(param_end); + let start_param = 0; + foo(start_param); + let param2 = 0; + foo(param2); + //^^^^^^ param + + macro_rules! param { + () => {}; + }; + foo(param!()); + + let param_eter = 0; + bar(param_eter); + let param_eter_end = 0; + bar(param_eter_end); + let start_param_eter = 0; + bar(start_param_eter); + let param_eter2 = 0; + bar(param_eter2); + //^^^^^^^^^^^ param_eter + + non_ident_pat((0, 0)); +}"#, + ); + } +} diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 7402e86f36fa4..239456cb28167 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -81,8 +81,8 @@ pub use crate::{ highlight_related::{HighlightRelatedConfig, HighlightedRange}, hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult}, inlay_hints::{ - AdjustmentHints, ClosureReturnTypeHints, InlayHint, InlayHintLabel, InlayHintsConfig, - InlayKind, InlayTooltip, LifetimeElisionHints, + AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayHint, + InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, @@ -236,6 +236,7 @@ impl Analysis { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, + None, ); change.change_file(file_id, Some(Arc::new(text))); change.set_crate_graph(crate_graph); diff --git a/crates/ide/src/markup.rs b/crates/ide/src/markup.rs index 60c193c40aba2..de9fef61a78ea 100644 --- a/crates/ide/src/markup.rs +++ b/crates/ide/src/markup.rs @@ -33,6 +33,6 @@ impl Markup { self.text.as_str() } pub fn fenced_block(contents: &impl fmt::Display) -> Markup { - format!("```rust\n{}\n```", contents).into() + format!("```rust\n{contents}\n```").into() } } diff --git a/crates/ide/src/moniker.rs b/crates/ide/src/moniker.rs index fcbf6d8e58c4b..af5e96d2381ac 100644 --- a/crates/ide/src/moniker.rs +++ b/crates/ide/src/moniker.rs @@ -273,7 +273,7 @@ mod tests { fn no_moniker(ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); if let Some(x) = analysis.moniker(position).unwrap() { - assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x); + assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {x:?}"); } } diff --git a/crates/ide/src/navigation_target.rs b/crates/ide/src/navigation_target.rs index 9f049e298ad11..3aa799d43a8a3 100644 --- a/crates/ide/src/navigation_target.rs +++ b/crates/ide/src/navigation_target.rs @@ -117,10 +117,10 @@ impl NavigationTarget { self.full_range ); if let Some(focus_range) = self.focus_range { - buf.push_str(&format!(" {:?}", focus_range)) + buf.push_str(&format!(" {focus_range:?}")) } if let Some(container_name) = &self.container_name { - buf.push_str(&format!(" {}", container_name)) + buf.push_str(&format!(" {container_name}")) } buf } diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index b4df0437050f4..15bdf14fb9b6d 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -345,7 +345,7 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture_before); let rename_result = analysis .rename(position, new_name) - .unwrap_or_else(|err| panic!("Rename to '{}' was cancelled: {}", new_name, err)); + .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { let mut text_edit_builder = TextEdit::builder(); @@ -364,14 +364,11 @@ mod tests { } Err(err) => { if ra_fixture_after.starts_with("error:") { - let error_message = ra_fixture_after - .chars() - .into_iter() - .skip("error:".len()) - .collect::(); + let error_message = + ra_fixture_after.chars().skip("error:".len()).collect::(); assert_eq!(error_message.trim(), err.to_string()); } else { - panic!("Rename to '{}' failed unexpectedly: {}", new_name, err) + panic!("Rename to '{new_name}' failed unexpectedly: {err}") } } }; @@ -397,11 +394,11 @@ mod tests { let (analysis, position) = fixture::position(ra_fixture); let result = analysis .prepare_rename(position) - .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {}", err)); + .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {err}")); match result { Ok(RangeInfo { range, info: () }) => { let source = analysis.file_text(position.file_id).unwrap(); - expect.assert_eq(&format!("{:?}: {}", range, &source[range])) + expect.assert_eq(&format!("{range:?}: {}", &source[range])) } Err(RenameError(err)) => expect.assert_eq(&err), }; diff --git a/crates/ide/src/runnables.rs b/crates/ide/src/runnables.rs index 0181c6b8e456a..5b35262aabe11 100644 --- a/crates/ide/src/runnables.rs +++ b/crates/ide/src/runnables.rs @@ -66,12 +66,12 @@ impl Runnable { // test package::module::testname pub fn label(&self, target: Option) -> String { match &self.kind { - RunnableKind::Test { test_id, .. } => format!("test {}", test_id), - RunnableKind::TestMod { path } => format!("test-mod {}", path), - RunnableKind::Bench { test_id } => format!("bench {}", test_id), - RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id), + RunnableKind::Test { test_id, .. } => format!("test {test_id}"), + RunnableKind::TestMod { path } => format!("test-mod {path}"), + RunnableKind::Bench { test_id } => format!("bench {test_id}"), + RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"), RunnableKind::Bin => { - target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t)) + target.map_or_else(|| "run binary".to_string(), |t| format!("run {t}")) } } } @@ -377,7 +377,7 @@ pub(crate) fn runnable_impl( } else { String::new() }; - let mut test_id = format!("{}{}", adt_name, params); + let mut test_id = format!("{adt_name}{params}"); test_id.retain(|c| c != ' '); let test_id = TestId::Path(test_id); diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index 2d86627643d7c..ae539a5d397f7 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -36,6 +36,7 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.proc_macro.clone(), data.is_proc_macro, data.origin.clone(), + data.target_layout.clone(), ); map.insert(old_id, new_id); } diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index e7412d27faf44..f807ba30f40a3 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -74,20 +74,28 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio ast::ArgList(arg_list) => { let cursor_outside = arg_list.r_paren_token().as_ref() == Some(&token); if cursor_outside { - return None; + continue; } - return signature_help_for_call(&sema, token); + return signature_help_for_call(&sema, arg_list, token); }, ast::GenericArgList(garg_list) => { let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token); if cursor_outside { - return None; + continue; } - return signature_help_for_generics(&sema, token); + return signature_help_for_generics(&sema, garg_list, token); }, _ => (), } } + + // Stop at multi-line expressions, since the signature of the outer call is not very + // helpful inside them. + if let Some(expr) = ast::Expr::cast(node.clone()) { + if expr.syntax().text().contains_char('\n') { + return None; + } + } } None @@ -95,10 +103,11 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio fn signature_help_for_call( sema: &Semantics<'_, RootDatabase>, + arg_list: ast::ArgList, token: SyntaxToken, ) -> Option { // Find the calling expression and its NameRef - let mut node = token.parent()?; + let mut node = arg_list.syntax().parent()?; let calling_node = loop { if let Some(callable) = ast::CallableExpr::cast(node.clone()) { if callable @@ -109,14 +118,6 @@ fn signature_help_for_call( } } - // Stop at multi-line expressions, since the signature of the outer call is not very - // helpful inside them. - if let Some(expr) = ast::Expr::cast(node.clone()) { - if expr.syntax().text().contains_char('\n') { - return None; - } - } - node = node.parent()?; }; @@ -200,10 +201,11 @@ fn signature_help_for_call( fn signature_help_for_generics( sema: &Semantics<'_, RootDatabase>, + garg_list: ast::GenericArgList, token: SyntaxToken, ) -> Option { - let parent = token.parent()?; - let arg_list = parent + let arg_list = garg_list + .syntax() .ancestors() .filter_map(ast::GenericArgList::cast) .find(|list| list.syntax().text_range().contains(token.text_range().start()))?; @@ -644,7 +646,7 @@ pub fn add_one(x: i32) -> i32 { x + 1 } -pub fn do() { +pub fn r#do() { add_one($0 }"#, expect![[r##" @@ -770,6 +772,32 @@ fn f() { "#, expect![[]], ); + check( + r#" +fn foo(a: u8) -> u8 {a} +fn bar(a: u8) -> u8 {a} +fn f() { + foo(bar(123)$0) +} +"#, + expect![[r#" + fn foo(a: u8) -> u8 + ^^^^^ + "#]], + ); + check( + r#" +struct Vec(T); +struct Vec2(T); +fn f() { + let _: Vec2$0> +} +"#, + expect![[r#" + struct Vec2 + ^ + "#]], + ); } #[test] diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 2380cf7381c1c..a6b30ba139621 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -13,6 +13,7 @@ use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T}; use crate::{ hover::hover_for_definition, + inlay_hints::AdjustmentHintsMode, moniker::{def_to_moniker, MonikerResult}, parent_module::crates_for, Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig, @@ -106,13 +107,17 @@ impl StaticIndex<'_> { .analysis .inlay_hints( &InlayHintsConfig { + location_links: true, render_colons: true, + discriminant_hints: crate::DiscriminantHints::Fieldless, type_hints: true, parameter_hints: true, chaining_hints: true, closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock, lifetime_elision_hints: crate::LifetimeElisionHints::Never, adjustment_hints: crate::AdjustmentHints::Never, + adjustment_hints_mode: AdjustmentHintsMode::Prefix, + adjustment_hints_hide_outside_unsafe: false, hide_named_constructor_hints: false, hide_closure_initialization_hints: false, param_names_for_lifetime_elision_hints: false, @@ -231,13 +236,13 @@ mod tests { for (range, _) in f.tokens { let x = FileRange { file_id: f.file_id, range }; if !range_set.contains(&x) { - panic!("additional range {:?}", x); + panic!("additional range {x:?}"); } range_set.remove(&x); } } if !range_set.is_empty() { - panic!("unfound ranges {:?}", range_set); + panic!("unfound ranges {range_set:?}"); } } @@ -252,13 +257,13 @@ mod tests { continue; } if !range_set.contains(&x) { - panic!("additional definition {:?}", x); + panic!("additional definition {x:?}"); } range_set.remove(&x); } } if !range_set.is_empty() { - panic!("unfound definitions {:?}", range_set); + panic!("unfound definitions {range_set:?}"); } } diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 20810c25b3e81..7ce782f93be14 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -52,8 +52,8 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { let crate_graph = db.crate_graph(); for krate in crates { let display_crate = |krate: CrateId| match &crate_graph[krate].display_name { - Some(it) => format!("{}({:?})", it, krate), - None => format!("{:?}", krate), + Some(it) => format!("{it}({krate:?})"), + None => format!("{krate:?}"), }; format_to!(buf, "Crate: {}\n", display_crate(krate)); let deps = crate_graph[krate] diff --git a/crates/ide/src/syntax_highlighting/highlight.rs b/crates/ide/src/syntax_highlighting/highlight.rs index e7d0a8be7f573..892e6a9bb0ab8 100644 --- a/crates/ide/src/syntax_highlighting/highlight.rs +++ b/crates/ide/src/syntax_highlighting/highlight.rs @@ -111,7 +111,7 @@ fn punctuation( let is_raw_ptr = (|| { let prefix_expr = parent.and_then(ast::PrefixExpr::cast)?; let expr = prefix_expr.expr()?; - sema.type_of_expr(&expr)?.original.is_raw_ptr().then(|| ()) + sema.type_of_expr(&expr)?.original.is_raw_ptr().then_some(()) })(); if let Some(()) = is_raw_ptr { HlTag::Operator(HlOperator::Other) | HlMod::Unsafe @@ -174,6 +174,7 @@ fn keyword( | T![return] | T![while] | T![yield] => h | HlMod::ControlFlow, + T![do] | T![yeet] if parent_matches::(&token) => h | HlMod::ControlFlow, T![for] if parent_matches::(&token) => h | HlMod::ControlFlow, T![unsafe] => h | HlMod::Unsafe, T![true] | T![false] => HlTag::BoolLiteral.into(), diff --git a/crates/ide/src/syntax_highlighting/html.rs b/crates/ide/src/syntax_highlighting/html.rs index e91fd7f125716..2c7823069b3f8 100644 --- a/crates/ide/src/syntax_highlighting/html.rs +++ b/crates/ide/src/syntax_highlighting/html.rs @@ -52,7 +52,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo let class = r.highlight.to_string().replace('.', " "); let color = match (rainbow, r.binding_hash) { (true, Some(hash)) => { - format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash)) + format!(" data-binding-hash=\"{hash}\" style=\"color: {};\"", rainbowify(hash)) } _ => "".into(), }; diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs index 46cc667fc454f..2f870d769c0fb 100644 --- a/crates/ide/src/syntax_highlighting/tests.rs +++ b/crates/ide/src/syntax_highlighting/tests.rs @@ -1028,6 +1028,26 @@ macro_rules! test {} let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); } +#[test] +fn highlight_callable_no_crash() { + // regression test for #13838. + let (analysis, file_id) = fixture::file( + r#" +//- minicore: fn, sized +impl FnOnce for &F +where + F: Fn, +{ + type Output = F::Output; +} + +trait Trait {} +fn foo(x: &fn(&dyn Trait)) {} +"#, + ); + let _ = analysis.highlight(HL_CONFIG, file_id).unwrap(); +} + /// Highlights the code given by the `ra_fixture` argument, renders the /// result as HTML, and compares it with the HTML file given as `snapshot`. /// Note that the `snapshot` file is overwritten by the rendered HTML. diff --git a/crates/ide/src/syntax_tree.rs b/crates/ide/src/syntax_tree.rs index 4256fea0f81e4..bb6827e8a44e4 100644 --- a/crates/ide/src/syntax_tree.rs +++ b/crates/ide/src/syntax_tree.rs @@ -32,7 +32,7 @@ pub(crate) fn syntax_tree( } }; - format!("{:#?}", node) + format!("{node:#?}") } else { format!("{:#?}", parse.tree().syntax()) } diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index 9118f3c699c0c..eba5a485636e2 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -397,7 +397,7 @@ mod tests { fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) { let actual = do_type_char(char_typed, ra_fixture_before) - .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed)); + .unwrap_or_else(|| panic!("typing `{char_typed}` did nothing")); assert_eq_text!(ra_fixture_after, &actual); } diff --git a/crates/ide/src/typing/on_enter.rs b/crates/ide/src/typing/on_enter.rs index 48c1713270b6c..298482f2ab5c4 100644 --- a/crates/ide/src/typing/on_enter.rs +++ b/crates/ide/src/typing/on_enter.rs @@ -108,7 +108,7 @@ fn on_enter_in_comment( } let indent = node_indent(file, comment.syntax())?; - let inserted = format!("\n{}{} $0", indent, prefix); + let inserted = format!("\n{indent}{prefix} $0"); let delete = if remove_trailing_whitespace { let trimmed_len = comment.text().trim_end().len() as u32; let trailing_whitespace_len = comment.text().len() as u32 - trimmed_len; @@ -129,7 +129,7 @@ fn on_enter_in_block(block: ast::BlockExpr, position: FilePosition) -> Option let indent = IndentLevel::from_node(list.syntax()); let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1)); - edit.union(TextEdit::insert( - list.r_curly_token()?.text_range().start(), - format!("\n{}", indent), - )) - .ok()?; + edit.union(TextEdit::insert(list.r_curly_token()?.text_range().start(), format!("\n{indent}"))) + .ok()?; Some(edit) } diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs index d6a706a7cd73a..6b2534aa4619d 100644 --- a/crates/limit/src/lib.rs +++ b/crates/limit/src/lib.rs @@ -59,7 +59,7 @@ impl Limit { .compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed) .is_ok() { - eprintln!("new max: {}", other); + eprintln!("new max: {other}"); } } diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 9c92bae6a1962..4b75002501878 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -101,7 +101,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } try_cnt += 1; if try_cnt > 100 { - panic!("invocaton fixture {} cannot be generated.\n", name); + panic!("invocaton fixture {name} cannot be generated.\n"); } } } @@ -139,9 +139,15 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } None => (), - Some(kind) => panic!("Unhandled kind {:?}", kind), + Some(kind) => panic!("Unhandled kind {kind:?}"), }, - Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()), + Op::Literal(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), + Op::Ident(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), + Op::Punct(puncts) => { + for punct in puncts { + parent.token_trees.push(tt::Leaf::from(punct.clone()).into()); + } + } Op::Repeat { tokens, kind, separator } => { let max = 10; let cnt = match kind { diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 3f656df25f7d4..88eae136f7329 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -68,7 +68,7 @@ use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, tt_iter::TtIter, - ExpandError, MetaTemplate, + ExpandError, MetaTemplate, ValueResult, }; impl Bindings { @@ -321,8 +321,8 @@ struct MatchState<'t> { /// The KleeneOp of this sequence if we are in a repetition. sep_kind: Option, - /// Number of tokens of separator parsed - sep_parsed: Option, + /// Whether we already matched separator token. + sep_matched: bool, /// Matched meta variables bindings bindings: BindingsIdx, @@ -387,7 +387,7 @@ fn match_loop_inner<'t>( None => { // We are at or past the end of the matcher of `item`. if let Some(up) = &item.up { - if item.sep_parsed.is_none() { + if !item.sep_matched { // Get the `up` matcher let mut new_pos = (**up).clone(); new_pos.bindings = bindings_builder.copy(&new_pos.bindings); @@ -401,14 +401,17 @@ fn match_loop_inner<'t>( } // Check if we need a separator. - // We check the separator one by one - let sep_idx = item.sep_parsed.unwrap_or(0); - let sep_len = item.sep.as_ref().map_or(0, Separator::tt_count); - if item.sep.is_some() && sep_idx != sep_len { + if item.sep.is_some() && !item.sep_matched { let sep = item.sep.as_ref().unwrap(); - if src.clone().expect_separator(sep, sep_idx) { + let mut fork = src.clone(); + if fork.expect_separator(sep) { + // HACK: here we use `meta_result` to pass `TtIter` back to caller because + // it might have been advanced multiple times. `ValueResult` is + // insignificant. + item.meta_result = Some((fork, ValueResult::ok(None))); item.dot.next(); - item.sep_parsed = Some(sep_idx + 1); + // item.sep_parsed = Some(sep_len); + item.sep_matched = true; try_push!(next_items, item); } } @@ -416,7 +419,7 @@ fn match_loop_inner<'t>( // and try to match again UNLESS we are only allowed to have _one_ repetition. else if item.sep_kind != Some(RepeatKind::ZeroOrOne) { item.dot = item.dot.reset(); - item.sep_parsed = None; + item.sep_matched = false; bindings_builder.push_default(&mut item.bindings); cur_items.push(item); } @@ -451,7 +454,7 @@ fn match_loop_inner<'t>( up: Some(Box::new(item)), sep: separator.clone(), sep_kind: Some(*kind), - sep_parsed: None, + sep_matched: false, bindings: bindings_builder.alloc(), meta_result: None, is_error: false, @@ -500,18 +503,69 @@ fn match_loop_inner<'t>( } } } - OpDelimited::Op(Op::Leaf(leaf)) => { - if let Err(err) = match_leaf(leaf, &mut src.clone()) { - res.add_err(err); + OpDelimited::Op(Op::Literal(lhs)) => { + if let Ok(rhs) = src.clone().expect_leaf() { + if matches!(rhs, tt::Leaf::Literal(it) if it.text == lhs.text) { + item.dot.next(); + } else { + res.add_err(ExpandError::UnexpectedToken); + item.is_error = true; + } + } else { + res.add_err(ExpandError::binding_error(format!("expected literal: `{lhs}`"))); item.is_error = true; + } + try_push!(next_items, item); + } + OpDelimited::Op(Op::Ident(lhs)) => { + if let Ok(rhs) = src.clone().expect_leaf() { + if matches!(rhs, tt::Leaf::Ident(it) if it.text == lhs.text) { + item.dot.next(); + } else { + res.add_err(ExpandError::UnexpectedToken); + item.is_error = true; + } } else { - item.dot.next(); + res.add_err(ExpandError::binding_error(format!("expected ident: `{lhs}`"))); + item.is_error = true; } try_push!(next_items, item); } + OpDelimited::Op(Op::Punct(lhs)) => { + let mut fork = src.clone(); + let error = if let Ok(rhs) = fork.expect_glued_punct() { + let first_is_single_quote = rhs[0].char == '\''; + let lhs = lhs.iter().map(|it| it.char); + let rhs = rhs.iter().map(|it| it.char); + if lhs.clone().eq(rhs) { + // HACK: here we use `meta_result` to pass `TtIter` back to caller because + // it might have been advanced multiple times. `ValueResult` is + // insignificant. + item.meta_result = Some((fork, ValueResult::ok(None))); + item.dot.next(); + next_items.push(item); + continue; + } + + if first_is_single_quote { + // If the first punct token is a single quote, that's a part of a lifetime + // ident, not a punct. + ExpandError::UnexpectedToken + } else { + let lhs: SmolStr = lhs.collect(); + ExpandError::binding_error(format!("expected punct: `{lhs}`")) + } + } else { + ExpandError::UnexpectedToken + }; + + res.add_err(error); + item.is_error = true; + error_items.push(item); + } OpDelimited::Op(Op::Ignore { .. } | Op::Index { .. }) => {} OpDelimited::Open => { - if matches!(src.clone().next(), Some(tt::TokenTree::Subtree(..))) { + if matches!(src.peek_n(0), Some(tt::TokenTree::Subtree(..))) { item.dot.next(); try_push!(next_items, item); } @@ -541,7 +595,7 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { up: None, sep: None, sep_kind: None, - sep_parsed: None, + sep_matched: false, bindings: bindings_builder.alloc(), is_error: false, meta_result: None, @@ -616,21 +670,33 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { } // Dump all possible `next_items` into `cur_items` for the next iteration. else if !next_items.is_empty() { - // Now process the next token - cur_items.extend(next_items.drain(..)); - - match src.next() { - Some(tt::TokenTree::Subtree(subtree)) => { - stack.push(src.clone()); - src = TtIter::new(subtree); + if let Some((iter, _)) = next_items[0].meta_result.take() { + // We've matched a possibly "glued" punct. The matched punct (hence + // `meta_result` also) must be the same for all items. + // FIXME: If there are multiple items, it's definitely redundant (and it's hacky! + // `meta_result` isn't supposed to be used this way). + + // We already bumped, so no need to call `.next()` like in the other branch. + src = iter; + for item in next_items.iter_mut() { + item.meta_result = None; } - None => { - if let Some(iter) = stack.pop() { - src = iter; + } else { + match src.next() { + Some(tt::TokenTree::Subtree(subtree)) => { + stack.push(src.clone()); + src = TtIter::new(subtree); } + None => { + if let Some(iter) = stack.pop() { + src = iter; + } + } + _ => (), } - _ => (), } + // Now process the next token + cur_items.extend(next_items.drain(..)); } // Finally, we have the case where we need to call the black-box parser to get some // nonterminal. @@ -663,27 +729,6 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { } } -fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter<'_>) -> Result<(), ExpandError> { - let rhs = src - .expect_leaf() - .map_err(|()| ExpandError::binding_error(format!("expected leaf: `{lhs}`")))?; - match (lhs, rhs) { - ( - tt::Leaf::Punct(tt::Punct { char: lhs, .. }), - tt::Leaf::Punct(tt::Punct { char: rhs, .. }), - ) if lhs == rhs => Ok(()), - ( - tt::Leaf::Ident(tt::Ident { text: lhs, .. }), - tt::Leaf::Ident(tt::Ident { text: rhs, .. }), - ) if lhs == rhs => Ok(()), - ( - tt::Leaf::Literal(tt::Literal { text: lhs, .. }), - tt::Leaf::Literal(tt::Literal { text: rhs, .. }), - ) if lhs == rhs => Ok(()), - _ => Err(ExpandError::UnexpectedToken), - } -} - fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult> { let fragment = match kind { MetaVarKind::Path => parser::PrefixEntryPoint::Path, @@ -698,12 +743,16 @@ fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult parser::PrefixEntryPoint::Item, MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, MetaVarKind::Expr => { - // `expr` should not match underscores. + // `expr` should not match underscores, let expressions, or inline const. The latter + // two are for [backwards compatibility][0]. // HACK: Macro expansion should not be done using "rollback and try another alternative". - // rustc [explicitly checks the next token][0]. - // [0]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576 + // rustc [explicitly checks the next token][1]. + // [0]: https://github.com/rust-lang/rust/issues/86730 + // [1]: https://github.com/rust-lang/rust/blob/f0c4da499/compiler/rustc_expand/src/mbe/macro_parser.rs#L576 match input.peek_n(0) { - Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) if it.text == "_" => { + Some(tt::TokenTree::Leaf(tt::Leaf::Ident(it))) + if it.text == "_" || it.text == "let" || it.text == "const" => + { return ExpandResult::only_err(ExpandError::NoMatchingRule) } _ => {} @@ -752,10 +801,10 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) for op in pattern.iter() { match op { Op::Var { name, .. } => collector_fun(name.clone()), - Op::Leaf(_) => (), Op::Subtree { tokens, .. } => collect_vars(collector_fun, tokens), Op::Repeat { tokens, .. } => collect_vars(collector_fun, tokens), - Op::Ignore { .. } | Op::Index { .. } => {} + Op::Ignore { .. } | Op::Index { .. } | Op::Literal(_) | Op::Ident(_) | Op::Punct(_) => { + } } } } @@ -818,14 +867,14 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } impl<'a> TtIter<'a> { - fn expect_separator(&mut self, separator: &Separator, idx: usize) -> bool { + fn expect_separator(&mut self, separator: &Separator) -> bool { let mut fork = self.clone(); let ok = match separator { - Separator::Ident(lhs) if idx == 0 => match fork.expect_ident_or_underscore() { + Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { Ok(rhs) => rhs.text == lhs.text, Err(_) => false, }, - Separator::Literal(lhs) if idx == 0 => match fork.expect_literal() { + Separator::Literal(lhs) => match fork.expect_literal() { Ok(rhs) => match rhs { tt::Leaf::Literal(rhs) => rhs.text == lhs.text, tt::Leaf::Ident(rhs) => rhs.text == lhs.text, @@ -833,11 +882,14 @@ impl<'a> TtIter<'a> { }, Err(_) => false, }, - Separator::Puncts(lhss) if idx < lhss.len() => match fork.expect_punct() { - Ok(rhs) => rhs.char == lhss[idx].char, + Separator::Puncts(lhs) => match fork.expect_glued_punct() { + Ok(rhs) => { + let lhs = lhs.iter().map(|it| it.char); + let rhs = rhs.iter().map(|it| it.char); + lhs.eq(rhs) + } Err(_) => false, }, - _ => false, }; if ok { *self = fork; @@ -846,52 +898,21 @@ impl<'a> TtIter<'a> { } fn expect_tt(&mut self) -> Result { - match self.peek_n(0) { - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => { - return self.expect_lifetime(); - } - _ => (), - } - - let tt = self.next().ok_or(())?.clone(); - let punct = match tt { - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { - punct - } - _ => return Ok(tt), - }; - - let (second, third) = match (self.peek_n(0), self.peek_n(1)) { - ( - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), - Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), - ) if p2.spacing == tt::Spacing::Joint => (p2.char, Some(p3.char)), - (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2.char, None), - _ => return Ok(tt), - }; - - match (punct.char, second, third) { - ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { - let tt2 = self.next().unwrap().clone(); - let tt3 = self.next().unwrap().clone(); - Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2, tt3] }.into()) - } - ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) - | ('-' | '=' | '>', '>', _) - | (':', ':', _) - | ('.', '.', _) - | ('&', '&', _) - | ('<', '<', _) - | ('|', '|', _) => { - let tt2 = self.next().unwrap().clone(); - Ok(tt::Subtree { delimiter: None, token_trees: vec![tt, tt2] }.into()) + if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { + if punct.char == '\'' { + self.expect_lifetime() + } else { + let puncts = self.expect_glued_punct()?; + let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); + Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter: None, token_trees })) } - _ => Ok(tt), + } else { + self.next().ok_or(()).cloned() } } fn expect_lifetime(&mut self) -> Result { - let punct = self.expect_punct()?; + let punct = self.expect_single_punct()?; if punct.char != '\'' { return Err(()); } diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index cbb59ab8e67b5..db0d327bf409b 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -134,7 +134,13 @@ fn expand_subtree( let mut err = None; for op in template.iter() { match op { - Op::Leaf(tt) => arena.push(tt.clone().into()), + Op::Literal(it) => arena.push(tt::Leaf::from(it.clone()).into()), + Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()), + Op::Punct(puncts) => { + for punct in puncts { + arena.push(tt::Leaf::from(punct.clone()).into()); + } + } Op::Subtree { tokens, delimiter } => { let ExpandResult { value: tt, err: e } = expand_subtree(ctx, tokens, *delimiter, arena); diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index c4f0fa20d6de0..2373db97a3e41 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -140,7 +140,7 @@ impl Shift { | tt::Leaf::Punct(tt::Punct { id, .. }) | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf; - (id != tt::TokenId::unspecified()).then(|| id.0) + (id != tt::TokenId::unspecified()).then_some(id.0) } }; subtree.token_trees.iter().filter_map(filter).max() diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 351c359b73c87..fad905e97f456 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -1,7 +1,7 @@ //! Parser recognizes special macro syntax, `$var` and `$(repeat)*`, in token //! trees. -use smallvec::SmallVec; +use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; use crate::{tt_iter::TtIter, ParseError}; @@ -39,7 +39,7 @@ impl MetaTemplate { let mut src = TtIter::new(tt); let mut res = Vec::new(); - while let Some(first) = src.next() { + while let Some(first) = src.peek_n(0) { let op = next_op(first, &mut src, mode)?; res.push(op); } @@ -54,8 +54,10 @@ pub(crate) enum Op { Ignore { name: SmolStr, id: tt::TokenId }, Index { depth: u32 }, Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option }, - Leaf(tt::Leaf), Subtree { tokens: MetaTemplate, delimiter: Option }, + Literal(tt::Literal), + Punct(SmallVec<[tt::Punct; 3]>), + Ident(tt::Ident), } #[derive(Copy, Clone, Debug, PartialEq, Eq)] @@ -108,28 +110,23 @@ impl PartialEq for Separator { } } -impl Separator { - pub(crate) fn tt_count(&self) -> usize { - match self { - Separator::Literal(_) => 1, - Separator::Ident(_) => 1, - Separator::Puncts(it) => it.len(), - } - } -} - #[derive(Clone, Copy)] enum Mode { Pattern, Template, } -fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result { - let res = match first { - tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { +fn next_op( + first_peeked: &tt::TokenTree, + src: &mut TtIter<'_>, + mode: Mode, +) -> Result { + let res = match first_peeked { + tt::TokenTree::Leaf(tt::Leaf::Punct(p @ tt::Punct { char: '$', .. })) => { + src.next().expect("first token already peeked"); // Note that the '$' itself is a valid token inside macro_rules. let second = match src.next() { - None => return Ok(Op::Leaf(leaf.clone())), + None => return Ok(Op::Punct(smallvec![p.clone()])), Some(it) => it, }; match second { @@ -160,7 +157,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul tt::TokenTree::Leaf(leaf) => match leaf { tt::Leaf::Ident(ident) if ident.text == "crate" => { // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. - Op::Leaf(tt::Leaf::from(tt::Ident { text: "$crate".into(), id: ident.id })) + Op::Ident(tt::Ident { text: "$crate".into(), id: ident.id }) } tt::Leaf::Ident(ident) => { let kind = eat_fragment_kind(src, mode)?; @@ -180,7 +177,7 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul "`$$` is not allowed on the pattern side", )) } - Mode::Template => Op::Leaf(tt::Leaf::Punct(*punct)), + Mode::Template => Op::Punct(smallvec![*punct]), }, tt::Leaf::Punct(_) | tt::Leaf::Literal(_) => { return Err(ParseError::expected("expected ident")) @@ -188,8 +185,25 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul }, } } - tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()), + + tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => { + src.next().expect("first token already peeked"); + Op::Literal(it.clone()) + } + + tt::TokenTree::Leaf(tt::Leaf::Ident(it)) => { + src.next().expect("first token already peeked"); + Op::Ident(it.clone()) + } + + tt::TokenTree::Leaf(tt::Leaf::Punct(_)) => { + // There's at least one punct so this shouldn't fail. + let puncts = src.expect_glued_punct().unwrap(); + Op::Punct(puncts) + } + tt::TokenTree::Subtree(subtree) => { + src.next().expect("first token already peeked"); let tokens = MetaTemplate::parse(subtree, mode)?; Op::Subtree { tokens, delimiter: subtree.delimiter } } @@ -259,7 +273,7 @@ fn parse_repeat(src: &mut TtIter<'_>) -> Result<(Option, RepeatKind), _ => return Err(ParseError::InvalidRepeat), }, }; - return Ok((has_sep.then(|| separator), repeat_kind)); + return Ok((has_sep.then_some(separator), repeat_kind)); } } } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index cf53c16726bf7..5c965055634eb 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -145,7 +145,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { } if iter.peek_n(0).is_some() { - res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() }); + res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() }); } res @@ -237,7 +237,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { let char = match token.to_char(conv) { Some(c) => c, None => { - panic!("Token from lexer must be single char: token = {:#?}", token); + panic!("Token from lexer must be single char: token = {token:#?}"); } }; tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) }) diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index 4e04d2bc1c77b..c1a6083655822 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -19,7 +19,7 @@ fn check_punct_spacing(fixture: &str) { let spacing = match annotation.as_str() { "Alone" => Spacing::Alone, "Joint" => Spacing::Joint, - a => panic!("unknown annotation: {}", a), + a => panic!("unknown annotation: {a}"), }; (token, spacing) }) @@ -30,7 +30,7 @@ fn check_punct_spacing(fixture: &str) { while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree { - if let Some(expected) = annotations.remove(&id) { + if let Some(expected) = annotations.remove(id) { assert_eq!(expected, *spacing); } } @@ -39,7 +39,7 @@ fn check_punct_spacing(fixture: &str) { cursor = cursor.bump(); } - assert!(annotations.is_empty(), "unchecked annotations: {:?}", annotations); + assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}"); } #[test] diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 783c3ca4a89f7..7013aa58b55dc 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -60,7 +60,7 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { }, tt::Leaf::Punct(punct) => { let kind = SyntaxKind::from_char(punct.char) - .unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct)); + .unwrap_or_else(|| panic!("{punct:#?} is not a valid punct")); res.push(kind); if punct.spacing == tt::Spacing::Joint { res.was_joint(); diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 7aceb676c749e..bee7b5de6ac31 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -1,6 +1,7 @@ //! A "Parser" structure for token trees. We use this when parsing a declarative //! macro definition into a list of patterns and templates. +use smallvec::{smallvec, SmallVec}; use syntax::SyntaxKind; use tt::buffer::TokenBuffer; @@ -80,13 +81,56 @@ impl<'a> TtIter<'a> { } } - pub(crate) fn expect_punct(&mut self) -> Result<&'a tt::Punct, ()> { + pub(crate) fn expect_single_punct(&mut self) -> Result<&'a tt::Punct, ()> { match self.expect_leaf()? { tt::Leaf::Punct(it) => Ok(it), _ => Err(()), } } + /// Returns consecutive `Punct`s that can be glued together. + /// + /// This method currently may return a single quotation, which is part of lifetime ident and + /// conceptually not a punct in the context of mbe. Callers should handle this. + pub(crate) fn expect_glued_punct(&mut self) -> Result, ()> { + let tt::TokenTree::Leaf(tt::Leaf::Punct(first)) = self.next().ok_or(())?.clone() else { + return Err(()); + }; + + if first.spacing == tt::Spacing::Alone { + return Ok(smallvec![first]); + } + + let (second, third) = match (self.peek_n(0), self.peek_n(1)) { + ( + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p3))), + ) if p2.spacing == tt::Spacing::Joint => (p2, Some(p3)), + (Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p2))), _) => (p2, None), + _ => return Ok(smallvec![first]), + }; + + match (first.char, second.char, third.map(|it| it.char)) { + ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { + let _ = self.next().unwrap(); + let _ = self.next().unwrap(); + Ok(smallvec![first, second.clone(), third.unwrap().clone()]) + } + ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) + | ('-' | '=' | '>', '>', _) + | ('<', '-', _) + | (':', ':', _) + | ('.', '.', _) + | ('&', '&', _) + | ('<', '<', _) + | ('|', '|', _) => { + let _ = self.next().unwrap(); + Ok(smallvec![first, second.clone()]) + } + _ => Ok(smallvec![first]), + } + } + pub(crate) fn expect_fragment( &mut self, entry_point: parser::PrefixEntryPoint, @@ -141,7 +185,7 @@ impl<'a> TtIter<'a> { ExpandResult { value: res, err } } - pub(crate) fn peek_n(&self, n: usize) -> Option<&tt::TokenTree> { + pub(crate) fn peek_n(&self, n: usize) -> Option<&'a tt::TokenTree> { self.inner.as_slice().get(n) } } diff --git a/crates/parser/src/grammar.rs b/crates/parser/src/grammar.rs index b7468329610a7..485b612f08187 100644 --- a/crates/parser/src/grammar.rs +++ b/crates/parser/src/grammar.rs @@ -51,7 +51,7 @@ pub(crate) mod entry { use super::*; pub(crate) fn vis(p: &mut Parser<'_>) { - let _ = opt_visibility(p, false); + opt_visibility(p, false); } pub(crate) fn block(p: &mut Parser<'_>) { @@ -70,10 +70,10 @@ pub(crate) mod entry { types::type_(p); } pub(crate) fn expr(p: &mut Parser<'_>) { - let _ = expressions::expr(p); + expressions::expr(p); } pub(crate) fn path(p: &mut Parser<'_>) { - let _ = paths::type_path(p); + paths::type_path(p); } pub(crate) fn item(p: &mut Parser<'_>) { items::item_or_macro(p, true); diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index dcaceade652ab..8932330b825db 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -288,7 +288,7 @@ fn expr_bp( } const LHS_FIRST: TokenSet = - atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-]])); + atom::ATOM_EXPR_FIRST.union(TokenSet::new(&[T![&], T![*], T![!], T![.], T![-], T![_]])); fn lhs(p: &mut Parser<'_>, r: Restrictions) -> Option<(CompletedMarker, BlockLike)> { let m; diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 99f42a2662348..efa3997353bf8 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -48,6 +48,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![unsafe], T![return], T![yield], + T![do], T![break], T![continue], T![async], @@ -93,6 +94,7 @@ pub(super) fn atom_expr( T![match] => match_expr(p), T![return] => return_expr(p), T![yield] => yield_expr(p), + T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p), T![continue] => continue_expr(p), T![break] => break_expr(p, r), @@ -278,6 +280,8 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { // fn main() { || -> i32 { 92 }(); } block_expr(p); } else if p.at_ts(EXPR_FIRST) { + // test closure_body_underscore_assignment + // fn main() { || _ = 0; } expr(p); } else { p.error("expected expression"); @@ -531,6 +535,7 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker { } m.complete(p, RETURN_EXPR) } + // test yield_expr // fn foo() { // yield; @@ -546,6 +551,23 @@ fn yield_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, YIELD_EXPR) } +// test yeet_expr +// fn foo() { +// do yeet; +// do yeet 1 +// } +fn yeet_expr(p: &mut Parser<'_>) -> CompletedMarker { + assert!(p.at(T![do])); + assert!(p.nth_at_contextual_kw(1, T![yeet])); + let m = p.start(); + p.bump(T![do]); + p.bump_remap(T![yeet]); + if p.at_ts(EXPR_FIRST) { + expr(p); + } + m.complete(p, YEET_EXPR) +} + // test continue_expr // fn foo() { // loop { diff --git a/crates/parser/src/grammar/paths.rs b/crates/parser/src/grammar/paths.rs index 5dc9c6c82a14e..af3b6f63cf51c 100644 --- a/crates/parser/src/grammar/paths.rs +++ b/crates/parser/src/grammar/paths.rs @@ -83,11 +83,12 @@ fn path_segment(p: &mut Parser<'_>, mode: Mode, first: bool) { } p.expect(T![>]); } else { - let mut empty = true; - if first { + let empty = if first { p.eat(T![::]); - empty = false; - } + false + } else { + true + }; match p.current() { IDENT => { name_ref(p); diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index bc1224af9b212..abcefffa23f01 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -62,39 +62,50 @@ fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) { } fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { - if let Some(lhs) = atom_pat(p, recovery_set) { - // test range_pat - // fn main() { - // match 92 { - // 0 ... 100 => (), - // 101 ..= 200 => (), - // 200 .. 301 => (), - // 302 .. => (), - // } - // - // match Some(10 as u8) { - // Some(0) | None => (), - // Some(1..) => () - // } - // - // match () { - // S { a: 0 } => (), - // S { a: 1.. } => (), - // } - // - // match () { - // [0] => (), - // [1..] => (), - // } - // - // match (10 as u8, 5 as u8) { - // (0, _) => (), - // (1.., _) => () - // } - // } + // test range_pat + // fn main() { + // match 92 { + // 0 ... 100 => (), + // 101 ..= 200 => (), + // 200 .. 301 => (), + // 302 .. => (), + // ..= 303 => (), + // } + // + // match Some(10 as u8) { + // Some(0) | None => (), + // Some(1..) => (), + // Some(..=2) => (), + // } + // + // match () { + // S { a: 0 } => (), + // S { a: 1.. } => (), + // S { a: ..=2 } => (), + // } + // + // match () { + // [0] => (), + // [1..] => (), + // [..=2] => (), + // } + // + // match (10 as u8, 5 as u8) { + // (0, _) => (), + // (1.., _) => (), + // (..=2, _) => (), + // } + // } + + if p.at(T![..=]) { + let m = p.start(); + p.bump(T![..=]); + atom_pat(p, recovery_set); + m.complete(p, RANGE_PAT); + return; + } - // FIXME: support half_open_range_patterns (`..=2`), - // exclusive_range_pattern (`..5`) with missing lhs + if let Some(lhs) = atom_pat(p, recovery_set) { for range_op in [T![...], T![..=], T![..]] { if p.at(range_op) { let m = lhs.precede(p); @@ -115,11 +126,21 @@ fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) { // ^ // `[0..]` // ^ - if matches!(p.current(), T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']']) { + // `0 .. if` + // ^ + if matches!( + p.current(), + T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']'] | T![if] + ) { // test half_open_range_pat // fn f() { // let 0 .. = 1u32; // let 0..: _ = 1u32; + // + // match 42 { + // 0 .. if true => (), + // _ => (), + // } // } } else { atom_pat(p, recovery_set); diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs index f4b9988eacb0c..b48921f19177f 100644 --- a/crates/parser/src/lexed_str.rs +++ b/crates/parser/src/lexed_str.rs @@ -57,7 +57,7 @@ impl<'a> LexedStr<'a> { let mut conv = Converter::new(text); conv.extend_token(&token.kind, text); match &*conv.res.kind { - [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg.clone()))), + [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))), _ => None, } } diff --git a/crates/parser/src/output.rs b/crates/parser/src/output.rs index e9ec9822d68c3..6ca841cfe0732 100644 --- a/crates/parser/src/output.rs +++ b/crates/parser/src/output.rs @@ -54,7 +54,7 @@ impl Output { } pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { - let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | (0 << 4) | 1; + let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1; self.event.push(e) } diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs index 48d8350e07ee8..48aecb35be128 100644 --- a/crates/parser/src/parser.rs +++ b/crates/parser/src/parser.rs @@ -148,11 +148,16 @@ impl<'t> Parser<'t> { kinds.contains(self.current()) } - /// Checks if the current token is contextual keyword with text `t`. + /// Checks if the current token is contextual keyword `kw`. pub(crate) fn at_contextual_kw(&self, kw: SyntaxKind) -> bool { self.inp.contextual_kind(self.pos) == kw } + /// Checks if the nth token is contextual keyword `kw`. + pub(crate) fn nth_at_contextual_kw(&self, n: usize, kw: SyntaxKind) -> bool { + self.inp.contextual_kind(self.pos + n) == kw + } + /// Starts a new node in the syntax tree. All nodes and tokens /// consumed between the `start` and the corresponding `Marker::complete` /// belong to the same node. @@ -162,7 +167,7 @@ impl<'t> Parser<'t> { Marker::new(pos) } - /// Consume the next token if `kind` matches. + /// Consume the next token. Panics if the parser isn't currently at `kind`. pub(crate) fn bump(&mut self, kind: SyntaxKind) { assert!(self.eat(kind)); } @@ -205,7 +210,7 @@ impl<'t> Parser<'t> { if self.eat(kind) { return true; } - self.error(format!("expected {:?}", kind)); + self.error(format!("expected {kind:?}")); false } @@ -237,6 +242,7 @@ impl<'t> Parser<'t> { fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { self.pos += n_raw_tokens as usize; + self.steps.set(0); self.push_event(Event::Token { kind, n_raw_tokens }); } diff --git a/crates/parser/src/shortcuts.rs b/crates/parser/src/shortcuts.rs index 4b805faddcba9..2be4050d13579 100644 --- a/crates/parser/src/shortcuts.rs +++ b/crates/parser/src/shortcuts.rs @@ -80,8 +80,8 @@ impl<'a> LexedStr<'a> { State::PendingEnter | State::Normal => unreachable!(), } - let is_eof = builder.pos == builder.lexed.len(); - is_eof + // is_eof? + builder.pos == builder.lexed.len() } } diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index c84f45f1f8e4e..52b3fc23d59ca 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -69,6 +69,7 @@ pub enum SyntaxKind { CONST_KW, CONTINUE_KW, CRATE_KW, + DO_KW, DYN_KW, ELSE_KW, ENUM_KW, @@ -109,6 +110,7 @@ pub enum SyntaxKind { UNION_KW, RAW_KW, MACRO_RULES_KW, + YEET_KW, INT_NUMBER, FLOAT_NUMBER, CHAR, @@ -188,6 +190,7 @@ pub enum SyntaxKind { STMT_LIST, RETURN_EXPR, YIELD_EXPR, + YEET_EXPR, LET_EXPR, UNDERSCORE_EXPR, MACRO_EXPR, @@ -272,6 +275,7 @@ impl SyntaxKind { | CONST_KW | CONTINUE_KW | CRATE_KW + | DO_KW | DYN_KW | ELSE_KW | ENUM_KW @@ -312,6 +316,7 @@ impl SyntaxKind { | UNION_KW | RAW_KW | MACRO_RULES_KW + | YEET_KW ) } pub fn is_punct(self) -> bool { @@ -384,6 +389,7 @@ impl SyntaxKind { "const" => CONST_KW, "continue" => CONTINUE_KW, "crate" => CRATE_KW, + "do" => DO_KW, "dyn" => DYN_KW, "else" => ELSE_KW, "enum" => ENUM_KW, @@ -430,6 +436,7 @@ impl SyntaxKind { "union" => UNION_KW, "raw" => RAW_KW, "macro_rules" => MACRO_RULES_KW, + "yeet" => YEET_KW, _ => return None, }; Some(kw) @@ -470,5 +477,5 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } pub use T; diff --git a/crates/parser/src/tests.rs b/crates/parser/src/tests.rs index 735c0b3e40204..c1b4e9a7d8aec 100644 --- a/crates/parser/src/tests.rs +++ b/crates/parser/src/tests.rs @@ -37,8 +37,8 @@ fn lex(text: &str) -> String { let text = lexed.text(i); let error = lexed.error(i); - let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default(); - writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap(); + let error = error.map(|err| format!(" error: {err}")).unwrap_or_default(); + writeln!(res, "{kind:?} {text:?}{error}").unwrap(); } res } @@ -47,7 +47,7 @@ fn lex(text: &str) -> String { fn parse_ok() { for case in TestCase::list("parser/ok") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual); + assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); } } @@ -56,7 +56,7 @@ fn parse_ok() { fn parse_inline_ok() { for case in TestCase::list("parser/inline/ok") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual); + assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual); } } @@ -65,7 +65,7 @@ fn parse_inline_ok() { fn parse_err() { for case in TestCase::list("parser/err") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual); + assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) } } @@ -74,7 +74,7 @@ fn parse_err() { fn parse_inline_err() { for case in TestCase::list("parser/inline/err") { let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text); - assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual); + assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display()); expect_file![case.rast].assert_eq(&actual) } } @@ -93,14 +93,12 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { crate::StrStep::Token { kind, text } => { assert!(depth > 0); len += text.len(); - write!(buf, "{}", indent).unwrap(); - write!(buf, "{:?} {:?}\n", kind, text).unwrap(); + writeln!(buf, "{indent}{kind:?} {text:?}").unwrap(); } crate::StrStep::Enter { kind } => { assert!(depth > 0 || len == 0); depth += 1; - write!(buf, "{}", indent).unwrap(); - write!(buf, "{:?}\n", kind).unwrap(); + writeln!(buf, "{indent}{kind:?}").unwrap(); indent.push_str(" "); } crate::StrStep::Exit => { @@ -111,7 +109,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { } crate::StrStep::Error { msg, pos } => { assert!(depth > 0); - errors.push(format!("error {}: {}\n", pos, msg)) + errors.push(format!("error {pos}: {msg}\n")) } }); assert_eq!( @@ -124,7 +122,7 @@ fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) { for (token, msg) in lexed.errors() { let pos = lexed.text_start(token); - errors.push(format!("error {}: {}\n", pos, msg)); + errors.push(format!("error {pos}: {msg}\n")); } let has_errors = !errors.is_empty(); @@ -149,7 +147,7 @@ impl TestCase { let mut res = Vec::new(); let read_dir = fs::read_dir(&dir) - .unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err)); + .unwrap_or_else(|err| panic!("can't `read_dir` {}: {err}", dir.display())); for file in read_dir { let file = file.unwrap(); let path = file.path(); diff --git a/crates/parser/src/tests/sourcegen_inline_tests.rs b/crates/parser/src/tests/sourcegen_inline_tests.rs index 7b2b703deb699..54e85c07344b2 100644 --- a/crates/parser/src/tests/sourcegen_inline_tests.rs +++ b/crates/parser/src/tests/sourcegen_inline_tests.rs @@ -23,7 +23,7 @@ fn sourcegen_parser_tests() { // ok is never actually read, but it needs to be specified to create a Test in existing_tests let existing = existing_tests(&tests_dir, true); for t in existing.keys().filter(|&t| !tests.contains_key(t)) { - panic!("Test is deleted: {}", t); + panic!("Test is deleted: {t}"); } let mut new_idx = existing.len() + 1; @@ -31,7 +31,7 @@ fn sourcegen_parser_tests() { let path = match existing.get(name) { Some((path, _test)) => path.clone(), None => { - let file_name = format!("{:04}_{}.rs", new_idx, name); + let file_name = format!("{new_idx:04}_{name}.rs"); new_idx += 1; tests_dir.join(file_name) } @@ -116,7 +116,7 @@ fn existing_tests(dir: &Path, ok: bool) -> HashMap { let text = fs::read_to_string(&path).unwrap(); let test = Test { name: name.clone(), text, ok }; if let Some(old) = res.insert(name, (path, test)) { - println!("Duplicate test: {:?}", old); + println!("Duplicate test: {old:?}"); } } res diff --git a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast index cfef5d3f95387..d9981c50719f3 100644 --- a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast +++ b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rast @@ -93,6 +93,21 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" COMMA "," + WHITESPACE "\n " + MATCH_ARM + RANGE_PAT + DOT2EQ "..=" + WHITESPACE " " + LITERAL_PAT + LITERAL + INT_NUMBER "303" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -169,6 +184,28 @@ SOURCE_FILE TUPLE_EXPR L_PAREN "(" R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + TUPLE_STRUCT_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "Some" + L_PAREN "(" + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + R_PAREN ")" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -240,6 +277,36 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" COMMA "," + WHITESPACE "\n " + MATCH_ARM + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "a" + COLON ":" + WHITESPACE " " + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + WHITESPACE " " + R_CURLY "}" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -285,6 +352,23 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" COMMA "," + WHITESPACE "\n " + MATCH_ARM + SLICE_PAT + L_BRACK "[" + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + R_BRACK "]" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n\n " @@ -360,6 +444,28 @@ SOURCE_FILE TUPLE_EXPR L_PAREN "(" R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + TUPLE_PAT + L_PAREN "(" + RANGE_PAT + DOT2EQ "..=" + LITERAL_PAT + LITERAL + INT_NUMBER "2" + COMMA "," + WHITESPACE " " + WILDCARD_PAT + UNDERSCORE "_" + R_PAREN ")" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," WHITESPACE "\n " R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs index 2411d51096b3b..b54354211d2dc 100644 --- a/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs +++ b/crates/parser/test_data/parser/inline/ok/0058_range_pat.rs @@ -4,25 +4,30 @@ fn main() { 101 ..= 200 => (), 200 .. 301 => (), 302 .. => (), + ..= 303 => (), } match Some(10 as u8) { Some(0) | None => (), - Some(1..) => () + Some(1..) => (), + Some(..=2) => (), } match () { S { a: 0 } => (), S { a: 1.. } => (), + S { a: ..=2 } => (), } match () { [0] => (), [1..] => (), + [..=2] => (), } match (10 as u8, 5 as u8) { (0, _) => (), - (1.., _) => () + (1.., _) => (), + (..=2, _) => (), } } diff --git a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast index 4b401b60df0ce..c85a685991104 100644 --- a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast +++ b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rast @@ -46,6 +46,49 @@ SOURCE_FILE LITERAL INT_NUMBER "1u32" SEMICOLON ";" + WHITESPACE "\n\n " + MATCH_EXPR + MATCH_KW "match" + WHITESPACE " " + LITERAL + INT_NUMBER "42" + WHITESPACE " " + MATCH_ARM_LIST + L_CURLY "{" + WHITESPACE "\n " + MATCH_ARM + RANGE_PAT + LITERAL_PAT + LITERAL + INT_NUMBER "0" + WHITESPACE " " + DOT2 ".." + WHITESPACE " " + MATCH_GUARD + IF_KW "if" + WHITESPACE " " + LITERAL + TRUE_KW "true" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + MATCH_ARM + WILDCARD_PAT + UNDERSCORE "_" + WHITESPACE " " + FAT_ARROW "=>" + WHITESPACE " " + TUPLE_EXPR + L_PAREN "(" + R_PAREN ")" + COMMA "," + WHITESPACE "\n " + R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs index c9386a221a955..f7e2d07922ecb 100644 --- a/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs +++ b/crates/parser/test_data/parser/inline/ok/0166_half_open_range_pat.rs @@ -1,4 +1,9 @@ fn f() { let 0 .. = 1u32; let 0..: _ = 1u32; + + match 42 { + 0 .. if true => (), + _ => (), + } } diff --git a/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast new file mode 100644 index 0000000000000..8970922c97794 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rast @@ -0,0 +1,32 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE " " + EXPR_STMT + CLOSURE_EXPR + PARAM_LIST + PIPE "|" + PIPE "|" + WHITESPACE " " + BIN_EXPR + UNDERSCORE_EXPR + UNDERSCORE "_" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs new file mode 100644 index 0000000000000..9a34b63d29c8b --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0203_closure_body_underscore_assignment.rs @@ -0,0 +1 @@ +fn main() { || _ = 0; } diff --git a/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast new file mode 100644 index 0000000000000..24931bfcd7cd3 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rast @@ -0,0 +1,31 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + YEET_EXPR + DO_KW "do" + WHITESPACE " " + YEET_KW "yeet" + SEMICOLON ";" + WHITESPACE "\n " + YEET_EXPR + DO_KW "do" + WHITESPACE " " + YEET_KW "yeet" + WHITESPACE " " + LITERAL + INT_NUMBER "1" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs new file mode 100644 index 0000000000000..624f86c9dc0da --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0204_yeet_expr.rs @@ -0,0 +1,4 @@ +fn foo() { + do yeet; + do yeet 1 +} diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index a3ea05f4aff8e..7921fda331eed 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -60,7 +60,7 @@ impl MacroDylib { let info = version::read_dylib_info(&path)?; if info.version.0 < 1 || info.version.1 < 47 { - let msg = format!("proc-macro {} built by {:#?} is not supported by rust-analyzer, please update your Rust version.", path.display(), info); + let msg = format!("proc-macro {} built by {info:#?} is not supported by rust-analyzer, please update your Rust version.", path.display()); return Err(io::Error::new(io::ErrorKind::InvalidData, msg)); } diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index 268a03bb5359b..b178c46263e0a 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -137,7 +137,7 @@ impl SubtreeRepr { 1 => Some(tt::DelimiterKind::Parenthesis), 2 => Some(tt::DelimiterKind::Brace), 3 => Some(tt::DelimiterKind::Bracket), - other => panic!("bad kind {}", other), + other => panic!("bad kind {other}"), }; SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] } } @@ -164,7 +164,7 @@ impl PunctRepr { let spacing = match spacing { 0 => tt::Spacing::Alone, 1 => tt::Spacing::Joint, - other => panic!("bad spacing {}", other), + other => panic!("bad spacing {other}"), }; PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } } @@ -210,7 +210,7 @@ impl<'a> Writer<'a> { let idx_tag = match child { tt::TokenTree::Subtree(it) => { let idx = self.enqueue(it); - idx << 2 | 0b00 + idx << 2 } tt::TokenTree::Leaf(leaf) => match leaf { tt::Leaf::Literal(lit) => { @@ -312,7 +312,7 @@ impl Reader { }) .into() } - other => panic!("bad tag: {}", other), + other => panic!("bad tag: {other}"), } }) .collect(), diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index c4018d3b39e77..54dcb17f4e8b0 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -67,7 +67,7 @@ impl Process { args: impl IntoIterator>, ) -> io::Result { let args: Vec = args.into_iter().map(|s| s.as_ref().into()).collect(); - let child = JodChild(mk_child(&path, &args)?); + let child = JodChild(mk_child(&path, args)?); Ok(Process { child }) } diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs index 030531b80d7bb..40125c2a512ad 100644 --- a/crates/proc-macro-api/src/version.rs +++ b/crates/proc-macro-api/src/version.rs @@ -125,7 +125,7 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result { _ => { return Err(io::Error::new( io::ErrorKind::InvalidData, - format!("unsupported metadata version {}", version), + format!("unsupported metadata version {version}"), )); } } diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs index ed0e91da36178..e78842f5c37ec 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs @@ -286,7 +286,7 @@ impl BridgeState<'_> { BRIDGE_STATE.with(|state| { state.replace(BridgeState::InUse, |mut state| { // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone - f(&mut *state) + f(&mut state) }) }) } diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs index a405497f3c9b7..c5145d00e329e 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs @@ -877,7 +877,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f32_unsuffixed(n: f32) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } let mut repr = n.to_string(); if !repr.contains('.') { @@ -901,7 +901,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f32_suffixed(n: f32) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } Literal(bridge::client::Literal::f32(&n.to_string())) } @@ -920,7 +920,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f64_unsuffixed(n: f64) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } let mut repr = n.to_string(); if !repr.contains('.') { @@ -944,7 +944,7 @@ impl Literal { /// example if it is infinity or NaN this function will panic. pub fn f64_suffixed(n: f64) -> Literal { if !n.is_finite() { - panic!("Invalid float literal {}", n); + panic!("Invalid float literal {n}"); } Literal(bridge::client::Literal::f64(&n.to_string())) } diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs index b1e982f4779f7..22d4ad94f770e 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs @@ -471,8 +471,12 @@ impl server::Punct for RustAnalyzer { } impl server::Ident for RustAnalyzer { - fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span }))) + fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { + IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw( + string.into(), + span, + is_raw, + )))) } fn span(&mut self, ident: Self::Ident) -> Self::Span { @@ -544,13 +548,13 @@ impl server::Literal for RustAnalyzer { fn f32(&mut self, n: &str) -> Self::Literal { let n: f32 = n.parse().unwrap(); - let text = format!("{}f32", n); + let text = format!("{n}f32"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } fn f64(&mut self, n: &str) -> Self::Literal { let n: f64 = n.parse().unwrap(); - let text = format!("{}f64", n); + let text = format!("{n}f64"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } @@ -559,11 +563,11 @@ impl server::Literal for RustAnalyzer { for ch in string.chars() { escaped.extend(ch.escape_debug()); } - Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() } } fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() } } fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { @@ -574,7 +578,7 @@ impl server::Literal for RustAnalyzer { .map(Into::::into) .collect::(); - Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() } } fn span(&mut self, literal: &Self::Literal) -> Self::Span { diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs index 102027d14a984..b346c2c189698 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/bridge/client.rs @@ -301,7 +301,7 @@ impl BridgeState<'_> { BRIDGE_STATE.with(|state| { state.replace(BridgeState::InUse, |mut state| { // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone - f(&mut *state) + f(&mut state) }) }) } diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs index ed49cc7596608..f82f20c37bc3d 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs @@ -486,8 +486,12 @@ impl server::Punct for RustAnalyzer { } impl server::Ident for RustAnalyzer { - fn new(&mut self, string: &str, span: Self::Span, _is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident { text: string.into(), id: span }))) + fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { + IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw( + string.into(), + span, + is_raw, + )))) } fn span(&mut self, ident: Self::Ident) -> Self::Span { @@ -559,13 +563,13 @@ impl server::Literal for RustAnalyzer { fn f32(&mut self, n: &str) -> Self::Literal { let n: f32 = n.parse().unwrap(); - let text = format!("{}f32", n); + let text = format!("{n}f32"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } fn f64(&mut self, n: &str) -> Self::Literal { let n: f64 = n.parse().unwrap(); - let text = format!("{}f64", n); + let text = format!("{n}f64"); Literal { text: text.into(), id: tt::TokenId::unspecified() } } @@ -574,11 +578,11 @@ impl server::Literal for RustAnalyzer { for ch in string.chars() { escaped.extend(ch.escape_debug()); } - Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() } } fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() } } fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { @@ -589,7 +593,7 @@ impl server::Literal for RustAnalyzer { .map(Into::::into) .collect::(); - Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() } + Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() } } fn span(&mut self, literal: &Self::Literal) -> Self::Span { diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs index e4e43e97dde82..068f79f824dca 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs @@ -107,8 +107,8 @@ impl server::TokenStream for RustAnalyzer { } bridge::TokenTree::Ident(ident) => { - // FIXME: handle raw idents let text = ident.sym.text(); + let text = if ident.is_raw { tt::SmolStr::from_iter(["r#", &text]) } else { text }; let ident: tt::Ident = tt::Ident { text, id: ident.span }; let leaf = tt::Leaf::from(ident); let tree = TokenTree::from(leaf); @@ -182,9 +182,8 @@ impl server::TokenStream for RustAnalyzer { .map(|tree| match tree { tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(&ident.text), - // FIXME: handle raw idents - is_raw: false, + sym: Symbol::intern(ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), span: ident.id, }) } diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index 0ce099ae0bab3..5b8aca4d8164e 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -117,7 +117,7 @@ impl Abi { let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?; Ok(Abi::Abi1_63(inner)) } - _ => Err(LoadProcMacroDylibError::UnsupportedABI(info.version_string.clone())), + _ => Err(LoadProcMacroDylibError::UnsupportedABI(info.version_string)), } } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index b4f5ebd157f33..2eb939a7ce581 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -48,7 +48,7 @@ impl ProcMacroSrv { pub fn expand(&mut self, task: ExpandMacro) -> Result { let expander = self.expander(task.lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); - PanicMessage(format!("failed to load macro: {}", err)) + PanicMessage(format!("failed to load macro: {err}")) })?; let prev_env = EnvSnapshot::new(); @@ -59,7 +59,7 @@ impl ProcMacroSrv { Some(dir) => { let prev_working_dir = std::env::current_dir().ok(); if let Err(err) = std::env::set_current_dir(&dir) { - eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err) + eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") } prev_working_dir } @@ -112,14 +112,16 @@ impl ProcMacroSrv { } fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { - let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| { - format!("Failed to get file metadata for {}: {}", path.display(), err) - })?; + let time = fs::metadata(path) + .and_then(|it| it.modified()) + .map_err(|err| format!("Failed to get file metadata for {}: {err}", path.display()))?; Ok(match self.expanders.entry((path.to_path_buf(), time)) { - Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| { - format!("Cannot create expander for {}: {}", path.display(), err) - })?), + Entry::Vacant(v) => { + v.insert(dylib::Expander::new(path).map_err(|err| { + format!("Cannot create expander for {}: {err}", path.display()) + })?) + } Entry::Occupied(e) => e.into_mut(), }) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index cc0fc91fe989e..1ccc170f422b0 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -63,7 +63,7 @@ fn test_fn_like_macro_clone_raw_ident() { "r#async", expect![[r#" SUBTREE $ - IDENT async 4294967295"#]], + IDENT r#async 4294967295"#]], ); } @@ -86,15 +86,13 @@ fn test_fn_like_mk_literals() { #[test] fn test_fn_like_mk_idents() { - // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below - // simply ignore `is_raw` when implementing the `Ident` interface. assert_expand( "fn_like_mk_idents", r#""#, expect![[r#" SUBTREE $ IDENT standard 4294967295 - IDENT raw 4294967295"#]], + IDENT r#raw 4294967295"#]], ); } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 44b1b6588da0a..efbeb90ca9dd5 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -30,12 +30,12 @@ fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree()); let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap(); - expect.assert_eq(&format!("{:?}", res)); + expect.assert_eq(&format!("{res:?}")); } pub(crate) fn list() -> Vec { let dylib_path = proc_macro_test_dylib_path(); let mut srv = ProcMacroSrv::default(); let res = srv.list_macros(&dylib_path).unwrap(); - res.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() + res.into_iter().map(|(name, kind)| format!("{name} [{kind:?}]")).collect() } diff --git a/crates/proc-macro-test/build.rs b/crates/proc-macro-test/build.rs index a80c962617bb3..19a5caa4ccda6 100644 --- a/crates/proc-macro-test/build.rs +++ b/crates/proc-macro-test/build.rs @@ -63,7 +63,7 @@ fn main() { }; cmd.current_dir(&staging_dir) - .args(&["build", "-p", "proc-macro-test-impl", "--message-format", "json"]) + .args(["build", "-p", "proc-macro-test-impl", "--message-format", "json"]) // Explicit override the target directory to avoid using the same one which the parent // cargo is using, or we'll deadlock. // This can happen when `CARGO_TARGET_DIR` is set or global config forces all cargo @@ -71,7 +71,7 @@ fn main() { .arg("--target-dir") .arg(&target_dir); - println!("Running {:?}", cmd); + println!("Running {cmd:?}"); let output = cmd.output().unwrap(); if !output.status.success() { @@ -85,16 +85,13 @@ fn main() { let mut artifact_path = None; for message in Message::parse_stream(output.stdout.as_slice()) { - match message.unwrap() { - Message::CompilerArtifact(artifact) => { - if artifact.target.kind.contains(&"proc-macro".to_string()) { - let repr = format!("{} {}", name, version); - if artifact.package_id.repr.starts_with(&repr) { - artifact_path = Some(PathBuf::from(&artifact.filenames[0])); - } + if let Message::CompilerArtifact(artifact) = message.unwrap() { + if artifact.target.kind.contains(&"proc-macro".to_string()) { + let repr = format!("{name} {version}"); + if artifact.package_id.repr.starts_with(&repr) { + artifact_path = Some(PathBuf::from(&artifact.filenames[0])); } } - _ => (), // Unknown message } } diff --git a/crates/profile/src/hprof.rs b/crates/profile/src/hprof.rs index b562c193e7137..ea89a89c5c5ca 100644 --- a/crates/profile/src/hprof.rs +++ b/crates/profile/src/hprof.rs @@ -133,7 +133,7 @@ static FILTER: Lazy> = Lazy::new(Default::default); fn with_profile_stack(f: impl FnOnce(&mut ProfileStack) -> T) -> T { thread_local!(static STACK: RefCell = RefCell::new(ProfileStack::new())); - STACK.with(|it| f(&mut *it.borrow_mut())) + STACK.with(|it| f(&mut it.borrow_mut())) } #[derive(Default, Clone, Debug)] @@ -238,7 +238,7 @@ impl ProfileStack { self.heartbeat(frame.heartbeats); let avg_span = duration / (frame.heartbeats + 1); if avg_span > self.filter.heartbeat_longer_than { - eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration); + eprintln!("Too few heartbeats {label} ({}/{duration:?})?", frame.heartbeats); } } @@ -275,7 +275,7 @@ fn print( out: &mut impl Write, ) { let current_indent = " ".repeat(level as usize); - let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default(); + let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {it}")).unwrap_or_default(); writeln!( out, "{}{} - {}{}", @@ -302,13 +302,13 @@ fn print( } for (child_msg, (duration, count)) in &short_children { - writeln!(out, " {}{} - {} ({} calls)", current_indent, ms(*duration), child_msg, count) + writeln!(out, " {current_indent}{} - {child_msg} ({count} calls)", ms(*duration)) .expect("printing profiling info"); } let unaccounted = tree[curr].duration - accounted_for; if tree.children(curr).next().is_some() && unaccounted > longer_than { - writeln!(out, " {}{} - ???", current_indent, ms(unaccounted)) + writeln!(out, " {current_indent}{} - ???", ms(unaccounted)) .expect("printing profiling info"); } } @@ -320,7 +320,7 @@ impl fmt::Display for ms { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self.0.as_millis() { 0 => f.write_str(" 0 "), - n => write!(f, "{:5}ms", n), + n => write!(f, "{n:5}ms"), } } } diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index 00f7952e80725..7ca3c7d629570 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -114,11 +114,11 @@ impl Drop for CpuSpan { match out { Ok(out) if out.status.success() => { let svg = profile_data.with_extension("svg"); - std::fs::write(&svg, &out.stdout).unwrap(); + std::fs::write(&svg, out.stdout).unwrap(); eprintln!("Profile rendered to:\n\n {}\n", svg.display()); } _ => { - eprintln!("Failed to run:\n\n {:?}\n", cmd); + eprintln!("Failed to run:\n\n {cmd:?}\n"); } } } diff --git a/crates/profile/src/memory_usage.rs b/crates/profile/src/memory_usage.rs index ee882b4cb4c68..8017f865792b3 100644 --- a/crates/profile/src/memory_usage.rs +++ b/crates/profile/src/memory_usage.rs @@ -109,7 +109,7 @@ impl fmt::Display for Bytes { suffix = "mb"; } } - f.pad(&format!("{}{}", value, suffix)) + f.pad(&format!("{value}{suffix}")) } } diff --git a/crates/profile/src/stop_watch.rs b/crates/profile/src/stop_watch.rs index 6258328482962..71303d5a63162 100644 --- a/crates/profile/src/stop_watch.rs +++ b/crates/profile/src/stop_watch.rs @@ -33,11 +33,11 @@ impl StopWatch { if *PERF_ENABLED { let mut counter = perf_event::Builder::new() .build() - .map_err(|err| eprintln!("Failed to create perf counter: {}", err)) + .map_err(|err| eprintln!("Failed to create perf counter: {err}")) .ok(); if let Some(counter) = &mut counter { if let Err(err) = counter.enable() { - eprintln!("Failed to start perf counter: {}", err) + eprintln!("Failed to start perf counter: {err}") } } counter @@ -64,7 +64,7 @@ impl StopWatch { #[cfg(target_os = "linux")] let instructions = self.counter.as_mut().and_then(|it| { - it.read().map_err(|err| eprintln!("Failed to read perf counter: {}", err)).ok() + it.read().map_err(|err| eprintln!("Failed to read perf counter: {err}")).ok() }); #[cfg(not(target_os = "linux"))] let instructions = None; @@ -91,10 +91,10 @@ impl fmt::Display for StopWatchSpan { instructions /= 1000; prefix = "g"; } - write!(f, ", {}{}instr", instructions, prefix)?; + write!(f, ", {instructions}{prefix}instr")?; } if let Some(memory) = self.memory { - write!(f, ", {}", memory)?; + write!(f, ", {memory}")?; } Ok(()) } diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index ae2b41f27d58e..6550cf27e9916 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -66,7 +66,7 @@ impl WorkspaceBuildScripts { _ => { let mut cmd = Command::new(toolchain::cargo()); - cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]); + cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); // --all-targets includes tests, benches and examples in addition to the // default lib and bins. This is an independent concept from the --target @@ -74,7 +74,7 @@ impl WorkspaceBuildScripts { cmd.arg("--all-targets"); if let Some(target) = &config.target { - cmd.args(&["--target", target]); + cmd.args(["--target", target]); } match &config.features { @@ -122,7 +122,7 @@ impl WorkspaceBuildScripts { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { root.as_path() } - _ => &workspace.workspace_root(), + _ => workspace.workspace_root(), } .as_ref(); @@ -133,7 +133,7 @@ impl WorkspaceBuildScripts { // building build scripts failed, attempt to build with --keep-going so // that we potentially get more build data let mut cmd = Self::build_command(config)?; - cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); + cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; res.error = Some(error); Ok(res) @@ -295,7 +295,7 @@ impl WorkspaceBuildScripts { match message { Message::BuildScriptExecuted(mut message) => { with_output_for(&message.package_id.repr, &mut |name, data| { - progress(format!("running build-script: {}", name)); + progress(format!("running build-script: {name}")); let cfgs = { let mut acc = Vec::new(); for cfg in &message.cfgs { @@ -303,8 +303,7 @@ impl WorkspaceBuildScripts { Ok(it) => acc.push(it), Err(err) => { push_err(&format!( - "invalid cfg from cargo-metadata: {}", - err + "invalid cfg from cargo-metadata: {err}" )); return; } @@ -334,7 +333,7 @@ impl WorkspaceBuildScripts { } Message::CompilerArtifact(message) => { with_output_for(&message.package_id.repr, &mut |name, data| { - progress(format!("building proc-macros: {}", name)); + progress(format!("building proc-macros: {name}")); if message.target.kind.iter().any(|k| k == "proc-macro") { // Skip rmeta file if let Some(filename) = diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 02ec7a4f6f992..467cf0917875a 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -411,7 +411,7 @@ impl CargoWorkspace { CargoWorkspace { packages, targets, workspace_root } } - pub fn packages<'a>(&'a self) -> impl Iterator + ExactSizeIterator + 'a { + pub fn packages(&self) -> impl Iterator + ExactSizeIterator + '_ { self.packages.iter().map(|(id, _pkg)| id) } @@ -427,7 +427,7 @@ impl CargoWorkspace { } pub fn package_flag(&self, package: &PackageData) -> String { - if self.is_unique(&*package.name) { + if self.is_unique(&package.name) { package.name.clone() } else { format!("{}:{}", package.name, package.version) @@ -517,7 +517,7 @@ fn cargo_config_build_target( cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) - .args(&["-Z", "unstable-options", "config", "get", "build.target"]) + .args(["-Z", "unstable-options", "config", "get", "build.target"]) .env("RUSTC_BOOTSTRAP", "1"); // if successful we receive `build.target = "target-triple"` // or `build.target = ["", ..]` diff --git a/crates/project-model/src/cfg_flag.rs b/crates/project-model/src/cfg_flag.rs index f3dd8f51333be..c134b78ab3a2e 100644 --- a/crates/project-model/src/cfg_flag.rs +++ b/crates/project-model/src/cfg_flag.rs @@ -17,7 +17,7 @@ impl FromStr for CfgFlag { let res = match s.split_once('=') { Some((key, value)) => { if !(value.starts_with('"') && value.ends_with('"')) { - return Err(format!("Invalid cfg ({:?}), value should be in quotes", s)); + return Err(format!("Invalid cfg ({s:?}), value should be in quotes")); } let key = key.to_string(); let value = value[1..value.len() - 1].to_string(); diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index 575581fa543a3..e2f09bad2ded5 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -25,6 +25,7 @@ mod sysroot; mod workspace; mod rustc_cfg; mod build_scripts; +mod target_data_layout; #[cfg(test)] mod tests; @@ -145,7 +146,7 @@ impl ProjectManifest { } fn utf8_stdout(mut cmd: Command) -> Result { - let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?; + let output = cmd.output().with_context(|| format!("{cmd:?} failed"))?; if !output.status.success() { match String::from_utf8(output.stderr) { Ok(stderr) if !stderr.is_empty() => { diff --git a/crates/project-model/src/manifest_path.rs b/crates/project-model/src/manifest_path.rs index 4910fd3d11ccc..980d92d3df9db 100644 --- a/crates/project-model/src/manifest_path.rs +++ b/crates/project-model/src/manifest_path.rs @@ -40,7 +40,7 @@ impl ops::Deref for ManifestPath { type Target = AbsPath; fn deref(&self) -> &Self::Target { - &*self.file + &self.file } } diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 5133a14d532bb..9af0eafe9fddb 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -197,5 +197,5 @@ where D: de::Deserializer<'de>, { let name = String::deserialize(de)?; - CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err))) + CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}"))) } diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index 3231361836634..0066f6717efe6 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -50,10 +50,10 @@ fn get_rust_cfgs( cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) - .args(&["-Z", "unstable-options", "rustc", "--print", "cfg"]) + .args(["rustc", "-Z", "unstable-options", "--print", "cfg"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { - cargo_config.args(&["--target", target]); + cargo_config.args(["--target", target]); } match utf8_stdout(cargo_config) { Ok(it) => return Ok(it), @@ -63,9 +63,9 @@ fn get_rust_cfgs( // using unstable cargo features failed, fall back to using plain rustc let mut cmd = Command::new(toolchain::rustc()); cmd.envs(extra_env); - cmd.args(&["--print", "cfg", "-O"]); + cmd.args(["--print", "cfg", "-O"]); if let Some(target) = target { - cmd.args(&["--target", target]); + cmd.args(["--target", target]); } utf8_stdout(cmd) } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index f6c09a27c9d7e..8d5ab0061e518 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -104,7 +104,7 @@ impl Sysroot { for path in SYSROOT_CRATES.trim().lines() { let name = path.split('/').last().unwrap(); - let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)] + let root = [format!("{path}/src/lib.rs"), format!("lib{path}/lib.rs")] .into_iter() .map(|it| sysroot.src_root.join(it)) .filter_map(|it| ManifestPath::try_from(it).ok()) @@ -171,7 +171,7 @@ fn discover_sysroot_dir( ) -> Result { let mut rustc = Command::new(toolchain::rustc()); rustc.envs(extra_env); - rustc.current_dir(current_dir).args(&["--print", "sysroot"]); + rustc.current_dir(current_dir).args(["--print", "sysroot"]); tracing::debug!("Discovering sysroot by {:?}", rustc); let stdout = utf8_stdout(rustc)?; Ok(AbsPathBuf::assert(PathBuf::from(stdout))) @@ -203,7 +203,7 @@ fn discover_sysroot_src_dir_or_add_component( .or_else(|| { let mut rustup = Command::new(toolchain::rustup()); rustup.envs(extra_env); - rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]); + rustup.current_dir(current_dir).args(["component", "add", "rust-src"]); tracing::info!("adding rust-src component by {:?}", rustup); utf8_stdout(rustup).ok()?; get_rust_src(sysroot_path) diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs new file mode 100644 index 0000000000000..40cf47c3f5597 --- /dev/null +++ b/crates/project-model/src/target_data_layout.rs @@ -0,0 +1,40 @@ +//! Runs `rustc --print target-spec-json` to get the target_data_layout. +use std::process::Command; + +use rustc_hash::FxHashMap; + +use crate::{utf8_stdout, ManifestPath}; + +pub(super) fn get( + cargo_toml: Option<&ManifestPath>, + target: Option<&str>, + extra_env: &FxHashMap, +) -> Option { + let output = (|| { + if let Some(cargo_toml) = cargo_toml { + let mut cmd = Command::new(toolchain::rustc()); + cmd.envs(extra_env); + cmd.current_dir(cargo_toml.parent()) + .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + match utf8_stdout(cmd) { + Ok(it) => return Ok(it), + Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"), + } + } + // using unstable cargo features failed, fall back to using plain rustc + let mut cmd = Command::new(toolchain::rustc()); + cmd.envs(extra_env) + .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + utf8_stdout(cmd) + })() + .ok()?; + Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()) +} diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index a1cb438bddc4c..2bb9ebf998bdb 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -29,6 +29,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr rustc_cfg: Vec::new(), cfg_overrides, toolchain: None, + target_layout: None, }; to_crate_graph(project_workspace) } @@ -106,7 +107,7 @@ fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph { } fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) { - let mut crate_graph = format!("{:#?}", crate_graph); + let mut crate_graph = format!("{crate_graph:#?}"); replace_root(&mut crate_graph, false); expect.assert_eq(&crate_graph); } @@ -150,6 +151,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -219,6 +221,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -297,6 +300,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -375,6 +379,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -462,6 +467,7 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "feature=use_std", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -547,6 +553,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -618,6 +625,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -698,6 +706,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -778,6 +787,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -865,6 +875,7 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "feature=use_std", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -941,6 +952,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1012,6 +1024,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1092,6 +1105,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1172,6 +1186,7 @@ fn cargo_hello_world_project_model() { "test", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1259,6 +1274,7 @@ fn cargo_hello_world_project_model() { "feature=use_std", ], ), + target_layout: None, env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1327,6 +1343,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1371,6 +1388,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1405,6 +1423,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1439,6 +1458,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1473,6 +1493,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1517,6 +1538,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1551,6 +1573,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1658,6 +1681,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1692,6 +1716,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1726,6 +1751,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, @@ -1760,6 +1786,7 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), + target_layout: None, env: Env { entries: {}, }, diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 3d199ed24afe7..e2382aa37e8e5 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -21,8 +21,8 @@ use crate::{ cfg_flag::CfgFlag, rustc_cfg, sysroot::SysrootCrate, - utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, - ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts, + target_data_layout, utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, + Package, ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -79,6 +79,7 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, + target_layout: Option, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { project: ProjectJson, sysroot: Option, rustc_cfg: Vec }, @@ -93,7 +94,7 @@ pub enum ProjectWorkspace { // // /// Project with a set of disjoint files, not belonging to any particular workspace. /// Backed by basic sysroot crates for basic completion and highlighting. - DetachedFiles { files: Vec, sysroot: Sysroot, rustc_cfg: Vec }, + DetachedFiles { files: Vec, sysroot: Option, rustc_cfg: Vec }, } impl fmt::Debug for ProjectWorkspace { @@ -108,6 +109,7 @@ impl fmt::Debug for ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + target_layout: data_layout, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -120,6 +122,7 @@ impl fmt::Debug for ProjectWorkspace { .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) .field("toolchain", &toolchain) + .field("data_layout", &data_layout) .finish(), ProjectWorkspace::Json { project, sysroot, rustc_cfg } => { let mut debug_struct = f.debug_struct("Json"); @@ -133,7 +136,7 @@ impl fmt::Debug for ProjectWorkspace { ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f .debug_struct("DetachedFiles") .field("n_files", &files.len()) - .field("n_sysroot_crates", &sysroot.crates().len()) + .field("sysroot", &sysroot.is_some()) .field("n_rustc_cfg", &rustc_cfg.len()) .finish(), } @@ -191,10 +194,7 @@ impl ProjectWorkspace { let sysroot = match &config.sysroot { Some(RustcSource::Path(path)) => { Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| { - format!( - "Failed to find sysroot for Cargo.toml file {}.", - cargo_toml.display() - ) + format!("Failed to find sysroot at {}.", path.display()) })?) } Some(RustcSource::Discover) => Some( @@ -244,6 +244,11 @@ impl ProjectWorkspace { rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env); let cfg_overrides = config.cfg_overrides(); + let data_layout = target_data_layout::get( + Some(&cargo_toml), + config.target.as_deref(), + &config.extra_env, + ); ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -252,6 +257,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + target_layout: data_layout, } } }; @@ -291,14 +297,29 @@ impl ProjectWorkspace { Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) } - pub fn load_detached_files(detached_files: Vec) -> Result { - let sysroot = Sysroot::discover( - detached_files - .first() - .and_then(|it| it.parent()) - .ok_or_else(|| format_err!("No detached files to load"))?, - &Default::default(), - )?; + pub fn load_detached_files( + detached_files: Vec, + config: &CargoConfig, + ) -> Result { + let sysroot = match &config.sysroot { + Some(RustcSource::Path(path)) => Some( + Sysroot::with_sysroot_dir(path.clone()) + .with_context(|| format!("Failed to find sysroot at {}.", path.display()))?, + ), + Some(RustcSource::Discover) => { + let dir = &detached_files + .first() + .and_then(|it| it.parent()) + .ok_or_else(|| format_err!("No detached files to load"))?; + Some(Sysroot::discover(dir, &config.extra_env).with_context(|| { + format!("Failed to find sysroot in {}. Is rust-src installed?", dir.display()) + })?) + } + None => None, + }; + if let Some(sysroot) = &sysroot { + tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + } let rustc_cfg = rustc_cfg::get(None, None, &Default::default()); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } @@ -386,7 +407,7 @@ impl ProjectWorkspace { ["libexec", "lib"] .into_iter() .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) - .find(|server_path| std::fs::metadata(&server_path).is_ok()) + .find(|server_path| std::fs::metadata(server_path).is_ok()) } _ => None, } @@ -423,6 +444,7 @@ impl ProjectWorkspace { cfg_overrides: _, build_scripts, toolchain: _, + target_layout: _, } => { cargo .packages() @@ -479,21 +501,25 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(Some(sysroot))) + .chain(mk_sysroot(sysroot.as_ref())) .collect(), } } pub fn n_packages(&self) -> usize { match self { - ProjectWorkspace::Json { project, .. } => project.n_crates(), + ProjectWorkspace::Json { project, sysroot, .. } => { + let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); + sysroot_package_len + project.n_crates() + } ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => { let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len()); let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); cargo.packages().len() + sysroot_package_len + rustc_package_len } ProjectWorkspace::DetachedFiles { sysroot, files, .. } => { - sysroot.crates().len() + files.len() + let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len()); + sysroot_package_len + files.len() } } } @@ -514,6 +540,7 @@ impl ProjectWorkspace { project, sysroot, extra_env, + None, ), ProjectWorkspace::Cargo { cargo, @@ -523,6 +550,7 @@ impl ProjectWorkspace { cfg_overrides, build_scripts, toolchain: _, + target_layout, } => cargo_to_crate_graph( load_proc_macro, load, @@ -532,9 +560,10 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, + target_layout.as_deref().map(Arc::from), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot) + detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot, None) } }; if crate_graph.patch_cfg_if() { @@ -553,11 +582,18 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: &Option, extra_env: &FxHashMap, + target_layout: Option>, ) -> CrateGraph { let mut crate_graph = CrateGraph::default(); - let sysroot_deps = sysroot - .as_ref() - .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load)); + let sysroot_deps = sysroot.as_ref().map(|sysroot| { + sysroot_to_crate_graph( + &mut crate_graph, + sysroot, + rustc_cfg.clone(), + target_layout.clone(), + load, + ) + }); let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); let crates: NoHashHashMap = project @@ -609,6 +645,7 @@ fn project_json_to_crate_graph( } else { CrateOrigin::CratesIo { repo: None, name: None } }, + target_layout.clone(), ), ) }) @@ -649,11 +686,18 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, + target_layout: Option>, ) -> CrateGraph { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); let (public_deps, libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), + Some(sysroot) => sysroot_to_crate_graph( + &mut crate_graph, + sysroot, + rustc_cfg.clone(), + target_layout.clone(), + load, + ), None => (SysrootPublicDeps::default(), None), }; @@ -716,6 +760,7 @@ fn cargo_to_crate_graph( file_id, &cargo[tgt].name, cargo[tgt].is_proc_macro, + target_layout.clone(), ); if cargo[tgt].kind == TargetKind::Lib { lib_tgt = Some((crate_id, cargo[tgt].name.clone())); @@ -795,6 +840,7 @@ fn cargo_to_crate_graph( &cfg_options, override_cfg, build_scripts, + target_layout, ); } } @@ -805,12 +851,21 @@ fn detached_files_to_crate_graph( rustc_cfg: Vec, load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], - sysroot: &Sysroot, + sysroot: &Option, + target_layout: Option>, ) -> CrateGraph { let _p = profile::span("detached_files_to_crate_graph"); let mut crate_graph = CrateGraph::default(); - let (public_deps, _libproc_macro) = - sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load); + let (public_deps, _libproc_macro) = match sysroot { + Some(sysroot) => sysroot_to_crate_graph( + &mut crate_graph, + sysroot, + rustc_cfg.clone(), + target_layout.clone(), + load, + ), + None => (SysrootPublicDeps::default(), None), + }; let mut cfg_options = CfgOptions::default(); cfg_options.extend(rustc_cfg); @@ -841,6 +896,7 @@ fn detached_files_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_string()), }, + target_layout.clone(), ); public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); @@ -861,6 +917,7 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, + target_layout: Option>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -917,6 +974,7 @@ fn handle_rustc_crates( file_id, &rustc_workspace[tgt].name, rustc_workspace[tgt].is_proc_macro, + target_layout.clone(), ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate @@ -981,6 +1039,7 @@ fn add_target_crate_root( file_id: FileId, cargo_name: &str, is_proc_macro: bool, + target_layout: Option>, ) -> CrateId { let edition = pkg.edition; let mut potential_cfg_options = cfg_options.clone(); @@ -1027,6 +1086,7 @@ fn add_target_crate_root( proc_macro, is_proc_macro, CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) }, + target_layout, ) } @@ -1048,6 +1108,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, + target_layout: Option>, load: &mut dyn FnMut(&AbsPath) -> Option, ) -> (SysrootPublicDeps, Option) { let _p = profile::span("sysroot_to_crate_graph"); @@ -1071,6 +1132,7 @@ fn sysroot_to_crate_graph( Err("no proc macro loaded for sysroot crate".into()), false, CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)), + target_layout.clone(), ); Some((krate, crate_id)) }) diff --git a/crates/rust-analyzer/src/bin/logger.rs b/crates/rust-analyzer/src/bin/logger.rs index ac10721d95514..8caadecd850b3 100644 --- a/crates/rust-analyzer/src/bin/logger.rs +++ b/crates/rust-analyzer/src/bin/logger.rs @@ -81,9 +81,9 @@ impl Logger { Registry::default() .with( self.filter - .add_directive(format!("chalk_solve={}", val).parse()?) - .add_directive(format!("chalk_ir={}", val).parse()?) - .add_directive(format!("chalk_recursive={}", val).parse()?), + .add_directive(format!("chalk_solve={val}").parse()?) + .add_directive(format!("chalk_ir={val}").parse()?) + .add_directive(format!("chalk_recursive={val}").parse()?), ) .with(ra_fmt_layer) .with(chalk_layer) @@ -124,7 +124,7 @@ where Some(log) => log.target(), None => event.metadata().target(), }; - write!(writer, "[{} {}] ", level, target)?; + write!(writer, "[{level} {target}] ")?; // Write spans and fields of each span ctx.visit_spans(|span| { @@ -140,7 +140,7 @@ where let fields = &ext.get::>().expect("will never be `None`"); if !fields.is_empty() { - write!(writer, "{{{}}}", fields)?; + write!(writer, "{{{fields}}}")?; } write!(writer, ": ")?; diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 7bf595d2a45f5..53710749de3d4 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -30,7 +30,7 @@ fn main() { let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) { Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102), Err(err) => { - eprintln!("{}", err); + eprintln!("{err}"); 101 } }; @@ -40,7 +40,7 @@ fn main() { let flags = flags::RustAnalyzer::from_env_or_exit(); if let Err(err) = try_main(flags) { tracing::error!("Unexpected error: {}", err); - eprintln!("{}", err); + eprintln!("{err}"); process::exit(101); } } @@ -183,6 +183,8 @@ fn run_server() -> Result<()> { } } + config.client_specific_adjustments(&initialize_params.client_info); + let server_capabilities = rust_analyzer::server_capabilities(&config); let initialize_result = lsp_types::InitializeResult { diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 723b888d9abc8..122d2e6ff1b76 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs @@ -42,7 +42,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { "(".to_string(), ]), all_commit_characters: None, - completion_item: completion_item(&config), + completion_item: completion_item(config), work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, }), signature_help_provider: Some(SignatureHelpOptions { @@ -67,7 +67,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { }, document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { first_trigger_character: "=".to_string(), - more_trigger_character: Some(more_trigger_character(&config)), + more_trigger_character: Some(more_trigger_character(config)), }), selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs index 60ba67e25f93b..d5d877680a09a 100644 --- a/crates/rust-analyzer/src/cli.rs +++ b/crates/rust-analyzer/src/cli.rs @@ -46,7 +46,7 @@ fn report_metric(metric: &str, value: u64, unit: &str) { if std::env::var("RA_METRICS").is_err() { return; } - println!("METRIC:{}:{}:{}", metric, value, unit) + println!("METRIC:{metric}:{value}:{unit}") } fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { @@ -65,6 +65,6 @@ fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { for (name, bytes) in mem { // NOTE: Not a debug print, so avoid going through the `eprintln` defined above. - eprintln!("{:>8} {}", bytes, name); + eprintln!("{bytes:>8} {name}"); } } diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 01fccc83e8227..053db5fc5331d 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -87,9 +87,9 @@ impl flags::AnalysisStats { load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; let db = host.raw_database(); eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed()); - eprint!(" (metadata {}", metadata_time); + eprint!(" (metadata {metadata_time}"); if let Some(build_scripts_time) = build_scripts_time { - eprint!("; build {}", build_scripts_time); + eprint!("; build {build_scripts_time}"); } eprintln!(")"); @@ -118,7 +118,7 @@ impl flags::AnalysisStats { shuffle(&mut rng, &mut visit_queue); } - eprint!(" crates: {}", num_crates); + eprint!(" crates: {num_crates}"); let mut num_decls = 0; let mut funcs = Vec::new(); while let Some(module) = visit_queue.pop() { @@ -142,7 +142,7 @@ impl flags::AnalysisStats { } } } - eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len()); + eprintln!(", mods: {}, decls: {num_decls}, fns: {}", visited_modules.len(), funcs.len()); eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed()); if self.randomize { @@ -154,7 +154,7 @@ impl flags::AnalysisStats { } let total_span = analysis_sw.elapsed(); - eprintln!("{:<20} {}", "Total:", total_span); + eprintln!("{:<20} {total_span}", "Total:"); report_metric("total time", total_span.time.as_millis() as u64, "ms"); if let Some(instructions) = total_span.instructions { report_metric("total instructions", instructions, "#instr"); @@ -179,7 +179,7 @@ impl flags::AnalysisStats { total_macro_file_size += syntax_len(val.syntax_node()) } } - eprintln!("source files: {}, macro files: {}", total_file_size, total_macro_file_size); + eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}"); } if self.memory_usage && verbosity.is_verbose() { @@ -239,7 +239,7 @@ impl flags::AnalysisStats { continue; } } - let mut msg = format!("processing: {}", full_name); + let mut msg = format!("processing: {full_name}"); if verbosity.is_verbose() { if let Some(src) = f.source(db) { let original_file = src.file_id.original_file(db); @@ -275,7 +275,7 @@ impl flags::AnalysisStats { end.col, )); } else { - bar.println(format!("{}: Unknown type", name,)); + bar.println(format!("{name}: Unknown type",)); } } true @@ -402,7 +402,7 @@ fn location_csv( let text_range = original_range.range; let (start, end) = (line_index.line_col(text_range.start()), line_index.line_col(text_range.end())); - format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col) + format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col) } fn expr_syntax_range( diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index 247007db0a788..fd5b3ce61f78d 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -40,7 +40,7 @@ impl flags::Diagnostics { if !visited_files.contains(&file_id) { let crate_name = module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); - println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id)); + println!("processing crate: {crate_name}, module: {}", _vfs.file_path(file_id)); for diagnostic in analysis .diagnostics( &DiagnosticsConfig::test_sample(), @@ -53,7 +53,7 @@ impl flags::Diagnostics { found_error = true; } - println!("{:?}", diagnostic); + println!("{diagnostic:?}"); } visited_files.insert(file_id); diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 5bcc97e226122..770612cc9478d 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -255,7 +255,7 @@ impl FromStr for OutputFormat { fn from_str(s: &str) -> Result { match s { "csv" => Ok(Self::Csv), - _ => Err(format!("unknown output format `{}`", s)), + _ => Err(format!("unknown output format `{s}`")), } } } diff --git a/crates/rust-analyzer/src/cli/highlight.rs b/crates/rust-analyzer/src/cli/highlight.rs index 4f9b362f1bec4..84607b9fd5d52 100644 --- a/crates/rust-analyzer/src/cli/highlight.rs +++ b/crates/rust-analyzer/src/cli/highlight.rs @@ -8,7 +8,7 @@ impl flags::Highlight { pub fn run(self) -> anyhow::Result<()> { let (analysis, file_id) = Analysis::from_single_file(read_stdin()?); let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap(); - println!("{}", html); + println!("{html}"); Ok(()) } } diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index c74ddabb17770..af8356d041f83 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -83,7 +83,7 @@ impl LsifManager<'_> { // FIXME: support file in addition to stdout here fn emit(&self, data: &str) { - println!("{}", data); + println!("{data}"); } fn get_token_id(&mut self, id: TokenId) -> Id { @@ -253,7 +253,7 @@ impl LsifManager<'_> { }; let result = folds .into_iter() - .map(|it| to_proto::folding_range(&*text, &line_index, false, it)) + .map(|it| to_proto::folding_range(&text, &line_index, false, it)) .collect(); let folding_id = self.add_vertex(lsif::Vertex::FoldingRangeResult { result }); self.add_edge(lsif::Edge::FoldingRange(lsif::EdgeData { diff --git a/crates/rust-analyzer/src/cli/progress_report.rs b/crates/rust-analyzer/src/cli/progress_report.rs index 5a2dc39d52b39..d459dd115cebe 100644 --- a/crates/rust-analyzer/src/cli/progress_report.rs +++ b/crates/rust-analyzer/src/cli/progress_report.rs @@ -67,7 +67,7 @@ impl ProgressReport { return; } let percent = (self.curr * 100.0) as u32; - let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg); + let text = format!("{}/{} {percent:3>}% {}", self.pos, self.len, self.msg); self.update_text(&text); } @@ -114,7 +114,7 @@ impl ProgressReport { // Fill all last text to space and return the cursor let spaces = " ".repeat(self.text.len()); let backspaces = "\x08".repeat(self.text.len()); - print!("{}{}{}", backspaces, spaces, backspaces); + print!("{backspaces}{spaces}{backspaces}"); let _ = io::stdout().flush(); self.text = String::new(); diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 9edd045ab0716..b050d1e95ac1f 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -28,7 +28,7 @@ impl flags::Scip { let now = Instant::now(); let cargo_config = CargoConfig::default(); - let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s)); + let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}")); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro: true, @@ -102,7 +102,7 @@ impl flags::Scip { let symbol = tokens_to_symbol .entry(id) .or_insert_with(|| { - let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol); + let symbol = token_to_symbol(token).unwrap_or_else(&mut new_local_symbol); scip::symbol::format_symbol(symbol) }) .clone(); @@ -176,7 +176,7 @@ fn get_relative_filepath( rootpath: &vfs::AbsPathBuf, file_id: ide::FileId, ) -> Option { - Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string()) + Some(vfs.file_path(file_id).as_path()?.strip_prefix(rootpath)?.as_ref().to_str()?.to_string()) } // SCIP Ranges have a (very large) optimization that ranges if they are on the same line @@ -209,7 +209,7 @@ fn new_descriptor_str( fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor { let mut name = name.to_string(); if name.contains("'") { - name = format!("`{}`", name); + name = format!("`{name}`"); } new_descriptor_str(name.as_str(), suffix) @@ -303,11 +303,11 @@ mod test { } if expected == "" { - assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol); + assert!(found_symbol.is_none(), "must have no symbols {found_symbol:?}"); return; } - assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol); + assert!(found_symbol.is_some(), "must have one symbol {found_symbol:?}"); let res = found_symbol.unwrap(); let formatted = format_symbol(res); assert_eq!(formatted, expected); diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index e8291782b7ac1..84c48917167b6 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs @@ -70,7 +70,7 @@ impl flags::Search { let sr = db.source_root(root); for file_id in sr.iter() { for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) { - println!("{:#?}", debug_info); + println!("{debug_info:#?}"); } } } diff --git a/crates/rust-analyzer/src/cli/symbols.rs b/crates/rust-analyzer/src/cli/symbols.rs index 84659b5ea9cd5..9fad6723afcd9 100644 --- a/crates/rust-analyzer/src/cli/symbols.rs +++ b/crates/rust-analyzer/src/cli/symbols.rs @@ -9,7 +9,7 @@ impl flags::Symbols { let (analysis, file_id) = Analysis::from_single_file(text); let structure = analysis.file_structure(file_id).unwrap(); for s in structure { - println!("{:?}", s); + println!("{s:?}"); } Ok(()) } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 6b2f22faa7178..b0afbdc9a4265 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -20,7 +20,7 @@ use ide_db::{ SnippetCap, }; use itertools::Itertools; -use lsp_types::{ClientCapabilities, MarkupKind}; +use lsp_types::{ClientCapabilities, ClientInfo, MarkupKind}; use project_model::{ CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates, @@ -124,22 +124,23 @@ config_data! { /// Unsets `#[cfg(test)]` for the specified crates. cargo_unsetTest: Vec = "[\"core\"]", + /// Run the check command for diagnostics on save. + checkOnSave | checkOnSave_enable: bool = "true", + /// Check all targets and tests (`--all-targets`). - checkOnSave_allTargets: bool = "true", + check_allTargets | checkOnSave_allTargets: bool = "true", /// Cargo command to use for `cargo check`. - checkOnSave_command: String = "\"check\"", - /// Run specified `cargo check` command for diagnostics on save. - checkOnSave_enable: bool = "true", + check_command | checkOnSave_command: String = "\"check\"", /// Extra arguments for `cargo check`. - checkOnSave_extraArgs: Vec = "[]", + check_extraArgs | checkOnSave_extraArgs: Vec = "[]", /// Extra environment variables that will be set when running `cargo check`. /// Extends `#rust-analyzer.cargo.extraEnv#`. - checkOnSave_extraEnv: FxHashMap = "{}", + check_extraEnv | checkOnSave_extraEnv: FxHashMap = "{}", /// List of features to activate. Defaults to /// `#rust-analyzer.cargo.features#`. /// /// Set to `"all"` to pass `--all-features` to Cargo. - checkOnSave_features: Option = "null", + check_features | checkOnSave_features: Option = "null", /// Specifies the working directory for running checks. /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories. // FIXME: Ideally we would support this in some way @@ -147,19 +148,21 @@ config_data! { /// - "root": run checks in the project's root directory. /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` /// is set. - checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", + check_invocationLocation | checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"", /// Specifies the invocation strategy to use when running the checkOnSave command. /// If `per_workspace` is set, the command will be executed for each workspace. /// If `once` is set, the command will be executed once. /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#` /// is set. - checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", + check_invocationStrategy | checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"", /// Whether to pass `--no-default-features` to Cargo. Defaults to /// `#rust-analyzer.cargo.noDefaultFeatures#`. - checkOnSave_noDefaultFeatures: Option = "null", + check_noDefaultFeatures | checkOnSave_noDefaultFeatures: Option = "null", /// Override the command rust-analyzer uses instead of `cargo check` for /// diagnostics on save. The command is required to output json and - /// should therefore include `--message-format=json` or a similar option. + /// should therefore include `--message-format=json` or a similar option + /// (if your client supports the `colorDiagnosticOutput` experimental + /// capability, you can use `--message-format=json-diagnostic-rendered-ansi`). /// /// If you're changing this because you're using some tool wrapping /// Cargo, you might also want to change @@ -175,14 +178,14 @@ config_data! { /// cargo check --workspace --message-format=json --all-targets /// ``` /// . - checkOnSave_overrideCommand: Option> = "null", + check_overrideCommand | checkOnSave_overrideCommand: Option> = "null", /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty. /// /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g. /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`. /// /// Aliased as `"checkOnSave.targets"`. - checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]", + check_targets | checkOnSave_targets | checkOnSave_target: Option = "null", /// Toggles the additional completions that automatically add imports when completed. /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled. @@ -327,12 +330,20 @@ config_data! { inlayHints_closingBraceHints_minLines: usize = "25", /// Whether to show inlay type hints for return types of closures. inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"", + /// Whether to show enum variant discriminant hints. + inlayHints_discriminantHints_enable: DiscriminantHintsDef = "\"never\"", /// Whether to show inlay hints for type adjustments. inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"", + /// Whether to hide inlay hints for type adjustments outside of `unsafe` blocks. + inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false", + /// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc). + inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"", /// Whether to show inlay type hints for elided lifetimes in function signatures. inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"", /// Whether to prefer using parameter names as the name for elided lifetime hints if possible. inlayHints_lifetimeElisionHints_useParameterNames: bool = "false", + /// Whether to use location links for parts of type mentioned in inlay hints. + inlayHints_locationLinks: bool = "true", /// Maximum length for inlay hints. Set to null to have an unlimited length. inlayHints_maxLength: Option = "25", /// Whether to show function parameter name inlay hints at the call @@ -403,6 +414,9 @@ config_data! { /// Whether to show `can't find Cargo.toml` error message. notifications_cargoTomlNotFound: bool = "true", + /// How many worker threads in the main loop. The default `null` means to pick automatically. + numThreads: Option = "null", + /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set. procMacro_attributes_enable: bool = "true", /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`. @@ -714,6 +728,19 @@ impl Config { } } + pub fn client_specific_adjustments(&mut self, client_info: &Option) { + // FIXME: remove this when we drop support for vscode 1.65 and below + if let Some(client) = client_info { + if client.name.contains("Code") || client.name.contains("Codium") { + if let Some(version) = &client.version { + if version.as_str() < "1.76" { + self.data.inlayHints_locationLinks = false; + } + } + } + } + } + pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> { tracing::info!("updating config from JSON: {:#}", json); if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { @@ -767,9 +794,9 @@ impl Config { fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) { use serde::de::Error; - if self.data.checkOnSave_command.is_empty() { + if self.data.check_command.is_empty() { error_sink.push(( - "/checkOnSave/command".to_string(), + "/check/command".to_string(), serde_json::Error::custom("expected a non-empty string"), )); } @@ -981,6 +1008,11 @@ impl Config { self.experimental("serverStatusNotification") } + /// Whether the client supports colored output for full diagnostics from `checkOnSave`. + pub fn color_diagnostic_output(&self) -> bool { + self.experimental("colorDiagnosticOutput") + } + pub fn publish_diagnostics(&self) -> bool { self.data.diagnostics_enable } @@ -1014,7 +1046,7 @@ impl Config { pub fn check_on_save_extra_env(&self) -> FxHashMap { let mut extra_env = self.data.cargo_extraEnv.clone(); - extra_env.extend(self.data.checkOnSave_extraEnv.clone()); + extra_env.extend(self.data.check_extraEnv.clone()); extra_env } @@ -1125,11 +1157,8 @@ impl Config { } } - pub fn flycheck(&self) -> Option { - if !self.data.checkOnSave_enable { - return None; - } - let flycheck_config = match &self.data.checkOnSave_overrideCommand { + pub fn flycheck(&self) -> FlycheckConfig { + match &self.data.check_overrideCommand { Some(args) if !args.is_empty() => { let mut args = args.clone(); let command = args.remove(0); @@ -1137,13 +1166,13 @@ impl Config { command, args, extra_env: self.check_on_save_extra_env(), - invocation_strategy: match self.data.checkOnSave_invocationStrategy { + invocation_strategy: match self.data.check_invocationStrategy { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { flycheck::InvocationStrategy::PerWorkspace } }, - invocation_location: match self.data.checkOnSave_invocationLocation { + invocation_location: match self.data.check_invocationLocation { InvocationLocation::Root => { flycheck::InvocationLocation::Root(self.root_path.clone()) } @@ -1152,34 +1181,43 @@ impl Config { } } Some(_) | None => FlycheckConfig::CargoCommand { - command: self.data.checkOnSave_command.clone(), - target_triples: match &self.data.checkOnSave_target.0[..] { - [] => self.data.cargo_target.clone().into_iter().collect(), - targets => targets.into(), - }, - all_targets: self.data.checkOnSave_allTargets, + command: self.data.check_command.clone(), + target_triples: self + .data + .check_targets + .clone() + .and_then(|targets| match &targets.0[..] { + [] => None, + targets => Some(targets.into()), + }) + .unwrap_or_else(|| self.data.cargo_target.clone().into_iter().collect()), + all_targets: self.data.check_allTargets, no_default_features: self .data - .checkOnSave_noDefaultFeatures + .check_noDefaultFeatures .unwrap_or(self.data.cargo_noDefaultFeatures), all_features: matches!( - self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features), + self.data.check_features.as_ref().unwrap_or(&self.data.cargo_features), CargoFeaturesDef::All ), features: match self .data - .checkOnSave_features + .check_features .clone() .unwrap_or_else(|| self.data.cargo_features.clone()) { CargoFeaturesDef::All => vec![], CargoFeaturesDef::Selected(it) => it, }, - extra_args: self.data.checkOnSave_extraArgs.clone(), + extra_args: self.data.check_extraArgs.clone(), extra_env: self.check_on_save_extra_env(), + ansi_color_output: self.color_diagnostic_output(), }, - }; - Some(flycheck_config) + } + } + + pub fn check_on_save(&self) -> bool { + self.data.checkOnSave } pub fn runnables(&self) -> RunnablesConfig { @@ -1191,10 +1229,16 @@ impl Config { pub fn inlay_hints(&self) -> InlayHintsConfig { InlayHintsConfig { + location_links: self.data.inlayHints_locationLinks, render_colons: self.data.inlayHints_renderColons, type_hints: self.data.inlayHints_typeHints_enable, parameter_hints: self.data.inlayHints_parameterHints_enable, chaining_hints: self.data.inlayHints_chainingHints_enable, + discriminant_hints: match self.data.inlayHints_discriminantHints_enable { + DiscriminantHintsDef::Always => ide::DiscriminantHints::Always, + DiscriminantHintsDef::Never => ide::DiscriminantHints::Never, + DiscriminantHintsDef::Fieldless => ide::DiscriminantHints::Fieldless, + }, closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable { ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always, ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never, @@ -1219,6 +1263,15 @@ impl Config { }, AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly, }, + adjustment_hints_mode: match self.data.inlayHints_expressionAdjustmentHints_mode { + AdjustmentHintsModeDef::Prefix => ide::AdjustmentHintsMode::Prefix, + AdjustmentHintsModeDef::Postfix => ide::AdjustmentHintsMode::Postfix, + AdjustmentHintsModeDef::PreferPrefix => ide::AdjustmentHintsMode::PreferPrefix, + AdjustmentHintsModeDef::PreferPostfix => ide::AdjustmentHintsMode::PreferPostfix, + }, + adjustment_hints_hide_outside_unsafe: self + .data + .inlayHints_expressionAdjustmentHints_hideOutsideUnsafe, binding_mode_hints: self.data.inlayHints_bindingModeHints_enable, param_names_for_lifetime_elision_hints: self .data @@ -1449,6 +1502,10 @@ impl Config { } } + pub fn main_loop_num_threads(&self) -> usize { + self.data.numThreads.unwrap_or(num_cpus::get_physical().try_into().unwrap_or(1)) + } + pub fn typing_autoclose_angle(&self) -> bool { self.data.typing_autoClosingAngleBrackets_enable } @@ -1553,6 +1610,7 @@ mod de_unit_v { named_unit_variant!(skip_trivial); named_unit_variant!(mutable); named_unit_variant!(reborrow); + named_unit_variant!(fieldless); named_unit_variant!(with_block); } @@ -1716,6 +1774,26 @@ enum AdjustmentHintsDef { Reborrow, } +#[derive(Deserialize, Debug, Clone)] +#[serde(untagged)] +enum DiscriminantHintsDef { + #[serde(deserialize_with = "true_or_always")] + Always, + #[serde(deserialize_with = "false_or_never")] + Never, + #[serde(deserialize_with = "de_unit_v::fieldless")] + Fieldless, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case")] +enum AdjustmentHintsModeDef { + Prefix, + Postfix, + PreferPrefix, + PreferPostfix, +} + #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case")] enum FilesWatcherDef { @@ -1817,40 +1895,35 @@ fn get_field( alias: Option<&'static str>, default: &str, ) -> T { - let default = serde_json::from_str(default).unwrap(); // XXX: check alias first, to work-around the VS Code where it pre-fills the // defaults instead of sending an empty object. alias .into_iter() .chain(iter::once(field)) - .find_map(move |field| { + .filter_map(move |field| { let mut pointer = field.replace('_', "/"); pointer.insert(0, '/'); - json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) { - Ok(it) => Some(it), - Err(e) => { - tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); - error_sink.push((pointer, e)); - None - } - }) + json.pointer_mut(&pointer) + .map(|it| serde_json::from_value(it.take()).map_err(|e| (e, pointer))) + }) + .find(Result::is_ok) + .and_then(|res| match res { + Ok(it) => Some(it), + Err((e, pointer)) => { + tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e); + error_sink.push((pointer, e)); + None + } }) - .unwrap_or(default) + .unwrap_or_else(|| serde_json::from_str(default).unwrap()) } fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value { - for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) { - fn key(f: &str) -> &str { - f.splitn(2, '_').next().unwrap() - } - assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2); - } - let map = fields .iter() .map(|(field, ty, doc, default)| { let name = field.replace('_', "."); - let name = format!("rust-analyzer.{}", name); + let name = format!("rust-analyzer.{name}"); let props = field_props(field, ty, doc, default); (name, props) }) @@ -1863,9 +1936,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json let doc = doc.trim_end_matches('\n'); assert!( doc.ends_with('.') && doc.starts_with(char::is_uppercase), - "bad docs for {}: {:?}", - field, - doc + "bad docs for {field}: {doc:?}" ); let default = default.parse::().unwrap(); @@ -1921,15 +1992,6 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "type": ["null", "array"], "items": { "type": "string" }, }, - "MergeBehaviorDef" => set! { - "type": "string", - "enum": ["none", "crate", "module"], - "enumDescriptions": [ - "Do not merge imports at all.", - "Merge imports from the same crate into a single `use` statement.", - "Merge imports from the same module into a single `use` statement." - ], - }, "ExprFillDefaultDef" => set! { "type": "string", "enum": ["todo", "default"], @@ -2038,6 +2100,34 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "Only show auto borrow and dereference adjustment hints." ] }, + "DiscriminantHintsDef" => set! { + "type": "string", + "enum": [ + "always", + "never", + "fieldless" + ], + "enumDescriptions": [ + "Always show all discriminant hints.", + "Never show discriminant hints.", + "Only show discriminant hints on fieldless enum variants." + ] + }, + "AdjustmentHintsModeDef" => set! { + "type": "string", + "enum": [ + "prefix", + "postfix", + "prefer_prefix", + "prefer_postfix", + ], + "enumDescriptions": [ + "Always show adjustment hints as prefix (`*expr`).", + "Always show adjustment hints as postfix (`expr.*`).", + "Show prefix or postfix depending on which uses less parenthesis, prefering prefix.", + "Show prefix or postfix depending on which uses less parenthesis, prefering postfix.", + ] + }, "CargoFeaturesDef" => set! { "anyOf": [ { @@ -2126,8 +2216,11 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json "The command will be executed in the project root." ], }, - "CheckOnSaveTargets" => set! { + "Option" => set! { "anyOf": [ + { + "type": "null" + }, { "type": "string", }, @@ -2137,7 +2230,7 @@ fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json }, ], }, - _ => panic!("missing entry for {}: {}", ty, default), + _ => panic!("missing entry for {ty}: {default}"), } map.into() @@ -2149,30 +2242,29 @@ fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String { .iter() .map(|(field, _ty, doc, default)| { let name = format!("rust-analyzer.{}", field.replace('_', ".")); - let doc = doc_comment_to_string(*doc); + let doc = doc_comment_to_string(doc); if default.contains('\n') { format!( - r#"[[{}]]{}:: + r#"[[{name}]]{name}:: + -- Default: ---- -{} +{default} ---- -{} +{doc} -- -"#, - name, name, default, doc +"# ) } else { - format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc) + format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n") } }) .collect::() } fn doc_comment_to_string(doc: &[&str]) -> String { - doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect() + doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect() } #[cfg(test)] @@ -2186,7 +2278,7 @@ mod tests { #[test] fn generate_package_json_config() { let s = Config::json_schema(); - let schema = format!("{:#}", s); + let schema = format!("{s:#}"); let mut schema = schema .trim_start_matches('{') .trim_end_matches('}') diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs index 472e2e0eeeabf..de6ac946a682f 100644 --- a/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/crates/rust-analyzer/src/config/patch_old_style.rs @@ -4,6 +4,9 @@ use serde_json::{json, Value}; /// This function patches the json config to the new expected keys. /// That is we try to load old known config keys here and convert them to the new ones. /// See https://github.com/rust-lang/rust-analyzer/pull/12010 +/// +/// We already have an alias system for simple cases, but if we make structural changes +/// the alias infra fails down. pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { let copy = json.clone(); @@ -105,9 +108,9 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { merge(json, json!({ "cargo": { "features": "all" } })); } - // checkOnSave_allFeatures, checkOnSave_features -> checkOnSave_features + // checkOnSave_allFeatures, checkOnSave_features -> check_features if let Some(Value::Bool(true)) = copy.pointer("/checkOnSave/allFeatures") { - merge(json, json!({ "checkOnSave": { "features": "all" } })); + merge(json, json!({ "check": { "features": "all" } })); } // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets @@ -116,11 +119,21 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { copy.pointer("/completion/addCallParenthesis"), ) { (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"), - (Some(Value::Bool(true)), _) => json!("add_parentheses"), + (_, Some(Value::Bool(true))) => json!("add_parentheses"), (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"), (_, _) => return, }; merge(json, json!({ "completion": { "callable": {"snippets": res }} })); + + // We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool + // checkOnSave_* -> check_* + if let Some(Value::Object(obj)) = copy.pointer("/checkOnSave") { + // checkOnSave_enable -> checkOnSave + if let Some(b @ Value::Bool(_)) = obj.get("enable") { + merge(json, json!({ "checkOnSave": b })); + } + merge(json, json!({ "check": obj })); + } } fn merge(dst: &mut Value, src: Value) { diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index f516c194da467..83b03fe473621 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs @@ -101,8 +101,7 @@ impl DiagnosticCollection { file_id: FileId, ) -> impl Iterator { let native = self.native.get(&file_id).into_iter().flatten(); - let check = - self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten(); + let check = self.check.values().filter_map(move |it| it.get(&file_id)).flatten(); native.chain(check) } diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index beb23c54c9f0f..acb416a068932 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -161,7 +161,7 @@ fn resolve_path( .iter() .find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name))) { - Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)), + Some((to, file_name)) => workspace_root.join(format!("{to}{file_name}")), None => workspace_root.join(file_name), } } @@ -191,6 +191,7 @@ fn map_rust_child_diagnostic( let mut edit_map: HashMap> = HashMap::new(); let mut suggested_replacements = Vec::new(); + let mut is_preferred = true; for &span in &spans { if let Some(suggested_replacement) = &span.suggested_replacement { if !suggested_replacement.is_empty() { @@ -209,6 +210,8 @@ fn map_rust_child_diagnostic( ) { edit_map.entry(location.uri).or_default().push(edit); } + is_preferred &= + matches!(span.suggestion_applicability, Some(Applicability::MachineApplicable)); } } @@ -218,7 +221,7 @@ fn map_rust_child_diagnostic( if !suggested_replacements.is_empty() { message.push_str(": "); let suggestions = - suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", "); + suggested_replacements.iter().map(|suggestion| format!("`{suggestion}`")).join(", "); message.push_str(&suggestions); } @@ -251,7 +254,7 @@ fn map_rust_child_diagnostic( document_changes: None, change_annotations: None, }), - is_preferred: Some(true), + is_preferred: Some(is_preferred), data: None, command: None, }, @@ -493,7 +496,7 @@ fn rustc_code_description(code: Option<&str>) -> Option) -> Option) -> Option { code.and_then(|code| { lsp_types::Url::parse(&format!( - "https://rust-lang.github.io/rust-clippy/master/index.html#{}", - code + "https://rust-lang.github.io/rust-clippy/master/index.html#{code}" )) .ok() .map(|href| lsp_types::CodeDescription { href }) diff --git a/crates/rust-analyzer/src/dispatch.rs b/crates/rust-analyzer/src/dispatch.rs index 57899b5991466..715804449a045 100644 --- a/crates/rust-analyzer/src/dispatch.rs +++ b/crates/rust-analyzer/src/dispatch.rs @@ -145,7 +145,7 @@ impl<'a> RequestDispatcher<'a> { match res { Ok(params) => { let panic_context = - format!("\nversion: {}\nrequest: {} {:#?}", version(), R::METHOD, params); + format!("\nversion: {}\nrequest: {} {params:#?}", version(), R::METHOD); Some((req, params, panic_context)) } Err(err) => { diff --git a/crates/rust-analyzer/src/from_proto.rs b/crates/rust-analyzer/src/from_proto.rs index dd433b0f4d31c..2dbb14fcd9a67 100644 --- a/crates/rust-analyzer/src/from_proto.rs +++ b/crates/rust-analyzer/src/from_proto.rs @@ -25,12 +25,9 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result { pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result { let line_col = match line_index.encoding { - PositionEncoding::Utf8 => { - LineCol { line: position.line as u32, col: position.character as u32 } - } + PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character }, PositionEncoding::Utf16 => { - let line_col = - LineColUtf16 { line: position.line as u32, col: position.character as u32 }; + let line_col = LineColUtf16 { line: position.line, col: position.character }; line_index.index.to_utf8(line_col) } }; @@ -67,7 +64,15 @@ pub(crate) fn file_range( text_document_identifier: lsp_types::TextDocumentIdentifier, range: lsp_types::Range, ) -> Result { - let file_id = file_id(snap, &text_document_identifier.uri)?; + file_range_uri(snap, &text_document_identifier.uri, range) +} + +pub(crate) fn file_range_uri( + snap: &GlobalStateSnapshot, + document: &lsp_types::Url, + range: lsp_types::Range, +) -> Result { + let file_id = file_id(snap, document)?; let line_index = snap.file_line_index(file_id)?; let range = text_range(&line_index, range)?; Ok(FileRange { file_id, range }) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 4e8bc8d6462ce..c6f4e9ce07f07 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -134,7 +134,7 @@ impl GlobalState { let task_pool = { let (sender, receiver) = unbounded(); - let handle = TaskPool::new(sender); + let handle = TaskPool::new_with_threads(sender, config.main_loop_num_threads()); Handle { handle, receiver } }; @@ -429,6 +429,6 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result { let path = from_proto::vfs_path(url)?; - let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?; + let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?; Ok(res) } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index d190a9f4e2ca9..59bdd3061272c 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -28,7 +28,8 @@ use lsp_types::{ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; -use syntax::{algo, ast, AstNode, TextRange, TextSize, T}; +use syntax::{algo, ast, AstNode, TextRange, TextSize}; +use tracing::error; use vfs::AbsPathBuf; use crate::{ @@ -729,7 +730,7 @@ pub(crate) fn handle_runnables( Some(spec) => { for cmd in ["check", "test"] { res.push(lsp_ext::Runnable { - label: format!("cargo {} -p {} --all-targets", cmd, spec.package), + label: format!("cargo {cmd} -p {} --all-targets", spec.package), location: None, kind: lsp_ext::RunnableKind::Cargo, args: lsp_ext::CargoRunnable { @@ -812,18 +813,6 @@ pub(crate) fn handle_completion( let completion_trigger_character = params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next()); - if Some(':') == completion_trigger_character { - let source_file = snap.analysis.parse(position.file_id)?; - let left_token = source_file.syntax().token_at_offset(position.offset).left_biased(); - let completion_triggered_after_single_colon = match left_token { - Some(left_token) => left_token.kind() == T![:], - None => true, - }; - if completion_triggered_after_single_colon { - return Ok(None); - } - } - let completion_config = &snap.config.completion(); let items = match snap.analysis.completions( completion_config, @@ -910,7 +899,7 @@ pub(crate) fn handle_folding_range( let line_folding_only = snap.config.line_folding_only(); let res = folds .into_iter() - .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it)) + .map(|it| to_proto::folding_range(&text, &line_index, line_folding_only, it)) .collect(); Ok(Some(res)) } @@ -990,7 +979,7 @@ pub(crate) fn handle_rename( let position = from_proto::file_position(&snap, params.text_document_position)?; let mut change = - snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?; + snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; // this is kind of a hack to prevent double edits from happening when moving files // When a module gets renamed by renaming the mod declaration this causes the file to move @@ -1112,9 +1101,7 @@ pub(crate) fn handle_code_action( } // Fixes from `cargo check`. - for fix in - snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten() - { + for fix in snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).flatten() { // FIXME: this mapping is awkward and shouldn't exist. Refactor // `snap.check_fixes` to not convert to LSP prematurely. let intersect_fix_range = fix @@ -1157,8 +1144,8 @@ pub(crate) fn handle_code_action_resolve( Ok(parsed_data) => parsed_data, Err(e) => { return Err(invalid_params_error(format!( - "Failed to parse action id string '{}': {}", - params.id, e + "Failed to parse action id string '{}': {e}", + params.id )) .into()) } @@ -1202,7 +1189,7 @@ fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> { let assist_kind: AssistKind = assist_kind_string.parse()?; let index: usize = match index_string.parse() { Ok(index) => index, - Err(e) => return Err(format!("Incorrect index string: {}", e)), + Err(e) => return Err(format!("Incorrect index string: {e}")), }; Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind })) } @@ -1384,9 +1371,26 @@ pub(crate) fn handle_inlay_hints_resolve( let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - let file_range = from_proto::file_range( + match snap.url_file_version(&resolve_data.text_document.uri) { + Some(version) if version == resolve_data.text_document.version => {} + Some(version) => { + error!( + "attempted inlayHints/resolve of '{}' at version {} while server version is {}", + resolve_data.text_document.uri, resolve_data.text_document.version, version, + ); + return Ok(hint); + } + None => { + error!( + "attempted inlayHints/resolve of unknown file '{}' at version {}", + resolve_data.text_document.uri, resolve_data.text_document.version, + ); + return Ok(hint); + } + } + let file_range = from_proto::file_range_uri( &snap, - resolve_data.text_document, + &resolve_data.text_document.uri, match resolve_data.position { PositionOrRange::Position(pos) => Range::new(pos, pos), PositionOrRange::Range(range) => range, @@ -1782,14 +1786,15 @@ fn run_rustfmt( let file_id = from_proto::file_id(snap, &text_document.uri)?; let file = snap.analysis.file_text(file_id)?; - // find the edition of the package the file belongs to - // (if it belongs to multiple we'll just pick the first one and pray) - let edition = snap + // Determine the edition of the crate the file belongs to (if there's multiple, we pick the + // highest edition). + let editions = snap .analysis .relevant_crates_for(file_id)? .into_iter() - .find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id)) - .map(|(ws, target)| ws[ws[target].package].edition); + .map(|crate_id| snap.analysis.crate_edition(crate_id)) + .collect::, _>>()?; + let edition = editions.iter().copied().max(); let line_index = snap.file_line_index(file_id)?; @@ -1863,7 +1868,7 @@ fn run_rustfmt( .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() - .context(format!("Failed to spawn {:?}", command))?; + .context(format!("Failed to spawn {command:?}"))?; rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; @@ -1896,9 +1901,9 @@ fn run_rustfmt( format!( r#"rustfmt exited with: Status: {} - stdout: {} - stderr: {}"#, - output.status, captured_stdout, captured_stderr, + stdout: {captured_stdout} + stderr: {captured_stderr}"#, + output.status, ), ) .into()) diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 96b1cb6b12713..405d261db6fb4 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -48,7 +48,7 @@ fn integrated_highlighting_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) + vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; { @@ -102,7 +102,7 @@ fn integrated_completion_benchmark() { let file_id = { let file = workspace_to_load.join(file); let path = VfsPath::from(AbsPathBuf::assert(file)); - vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path)) + vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}")) }; { diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 552379752fa64..32dc3750fdf6b 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -55,7 +55,7 @@ pub type Result = std::result::Result; pub fn from_json(what: &'static str, json: &serde_json::Value) -> Result { let res = serde_json::from_value(json.clone()) - .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?; + .map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?; Ok(res) } diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 8cc5648f3ce0c..65620b4209b40 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -3,11 +3,11 @@ use std::{collections::HashMap, path::PathBuf}; use lsp_types::request::Request; -use lsp_types::PositionEncodingKind; use lsp_types::{ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; +use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier}; use serde::{Deserialize, Serialize}; pub enum AnalyzerStatus {} @@ -132,12 +132,31 @@ pub struct ExpandedMacro { pub enum CancelFlycheck {} -impl Request for CancelFlycheck { +impl Notification for CancelFlycheck { type Params = (); - type Result = (); const METHOD: &'static str = "rust-analyzer/cancelFlycheck"; } +pub enum RunFlycheck {} + +impl Notification for RunFlycheck { + type Params = RunFlycheckParams; + const METHOD: &'static str = "rust-analyzer/runFlycheck"; +} + +pub enum ClearFlycheck {} + +impl Notification for ClearFlycheck { + type Params = (); + const METHOD: &'static str = "rust-analyzer/clearFlycheck"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct RunFlycheckParams { + pub text_document: Option, +} + pub enum MatchingBrace {} impl Request for MatchingBrace { @@ -550,7 +569,7 @@ pub struct CompletionResolveData { #[derive(Debug, Serialize, Deserialize)] pub struct InlayHintResolveData { - pub text_document: TextDocumentIdentifier, + pub text_document: VersionedTextDocumentIdentifier, pub position: PositionOrRange, } diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index 0971dc36f3a5c..dcaee92857abd 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs @@ -98,7 +98,7 @@ impl GlobalState { }); let cancellable = Some(cancel_token.is_some()); let token = lsp_types::ProgressToken::String( - cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)), + cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")), ); let work_done_progress = match state { Progress::Begin => { diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 274588ce0e076..0bc940dfe8dab 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -229,8 +229,8 @@ impl GlobalState { message = match &report.crates_currently_indexing[..] { [crate_name] => Some(format!( - "{}/{} ({})", - report.crates_done, report.crates_total, crate_name + "{}/{} ({crate_name})", + report.crates_done, report.crates_total )), [crate_name, rest @ ..] => Some(format!( "{}/{} ({} + {} more)", @@ -414,10 +414,7 @@ impl GlobalState { let loop_duration = loop_start.elapsed(); if loop_duration > Duration::from_millis(100) && was_quiescent { tracing::warn!("overly long loop turn: {:?}", loop_duration); - self.poke_rust_analyzer_developer(format!( - "overly long loop turn: {:?}", - loop_duration - )); + self.poke_rust_analyzer_developer(format!("overly long loop turn: {loop_duration:?}")); } Ok(()) } @@ -516,7 +513,7 @@ impl GlobalState { self.report_progress( "Roots Scanned", state, - Some(format!("{}/{}", n_done, n_total)), + Some(format!("{n_done}/{n_total}")), Some(Progress::fraction(n_done, n_total)), None, ) @@ -561,10 +558,7 @@ impl GlobalState { flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)), flycheck::Progress::DidCancel => (Progress::End, None), flycheck::Progress::DidFailToRestart(err) => { - self.show_and_log_error( - "cargo check failed".to_string(), - Some(err.to_string()), - ); + self.show_and_log_error("cargo check failed".to_string(), Some(err)); return; } flycheck::Progress::DidFinish(result) => { @@ -581,10 +575,7 @@ impl GlobalState { // When we're running multiple flychecks, we have to include a disambiguator in // the title, or the editor complains. Note that this is a user-facing string. let title = if self.flycheck.len() == 1 { - match self.config.flycheck() { - Some(config) => format!("{}", config), - None => "cargo check".to_string(), - } + format!("{}", self.config.flycheck()) } else { format!("cargo check (#{})", id + 1) }; @@ -593,7 +584,7 @@ impl GlobalState { state, message, None, - Some(format!("rust-analyzer/checkOnSave/{}", id)), + Some(format!("rust-analyzer/flycheck/{id}")), ); } } @@ -638,7 +629,6 @@ impl GlobalState { .on_sync_mut::(handlers::handle_workspace_reload) .on_sync_mut::(handlers::handle_memory_usage) .on_sync_mut::(handlers::handle_shuffle_crate_graph) - .on_sync_mut::(handlers::handle_cancel_flycheck) .on_sync::(handlers::handle_join_lines) .on_sync::(handlers::handle_on_enter) .on_sync::(handlers::handle_selection_range) @@ -703,6 +693,88 @@ impl GlobalState { /// Handles an incoming notification. fn on_notification(&mut self, not: Notification) -> Result<()> { + // FIXME: Move these implementations out into a module similar to on_request + fn run_flycheck(this: &mut GlobalState, vfs_path: VfsPath) -> bool { + let file_id = this.vfs.read().0.file_id(&vfs_path); + if let Some(file_id) = file_id { + let world = this.snapshot(); + let mut updated = false; + let task = move || -> std::result::Result<(), ide::Cancelled> { + // Trigger flychecks for all workspaces that depend on the saved file + // Crates containing or depending on the saved file + let crate_ids: Vec<_> = world + .analysis + .crates_for(file_id)? + .into_iter() + .flat_map(|id| world.analysis.transitive_rev_deps(id)) + .flatten() + .sorted() + .unique() + .collect(); + + let crate_root_paths: Vec<_> = crate_ids + .iter() + .filter_map(|&crate_id| { + world + .analysis + .crate_root(crate_id) + .map(|file_id| { + world + .file_id_to_file_path(file_id) + .as_path() + .map(ToOwned::to_owned) + }) + .transpose() + }) + .collect::>()?; + let crate_root_paths: Vec<_> = + crate_root_paths.iter().map(Deref::deref).collect(); + + // Find all workspaces that have at least one target containing the saved file + let workspace_ids = + world.workspaces.iter().enumerate().filter(|(_, ws)| match ws { + project_model::ProjectWorkspace::Cargo { cargo, .. } => { + cargo.packages().any(|pkg| { + cargo[pkg].targets.iter().any(|&it| { + crate_root_paths.contains(&cargo[it].root.as_path()) + }) + }) + } + project_model::ProjectWorkspace::Json { project, .. } => project + .crates() + .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)), + project_model::ProjectWorkspace::DetachedFiles { .. } => false, + }); + + // Find and trigger corresponding flychecks + for flycheck in world.flycheck.iter() { + for (id, _) in workspace_ids.clone() { + if id == flycheck.id() { + updated = true; + flycheck.restart(); + continue; + } + } + } + // No specific flycheck was triggered, so let's trigger all of them. + if !updated { + for flycheck in world.flycheck.iter() { + flycheck.restart(); + } + } + Ok(()) + }; + this.task_pool.handle.spawn_with_sender(move |_| { + if let Err(e) = std::panic::catch_unwind(task) { + tracing::error!("flycheck task panicked: {e:?}") + } + }); + true + } else { + false + } + } + NotificationDispatcher { not: Some(not), global_state: self } .on::(|this, params| { let id: lsp_server::RequestId = match params.id { @@ -714,7 +786,7 @@ impl GlobalState { })? .on::(|this, params| { if let lsp_types::NumberOrString::String(s) = ¶ms.token { - if let Some(id) = s.strip_prefix("rust-analyzer/checkOnSave/") { + if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") { if let Ok(id) = u32::from_str_radix(id, 10) { if let Some(flycheck) = this.flycheck.get(id as usize) { flycheck.cancel(); @@ -743,6 +815,7 @@ impl GlobalState { } Ok(()) })? + .on::(handlers::handle_cancel_flycheck)? .on::(|this, params| { if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { match this.mem_docs.get_mut(&path) { @@ -782,99 +855,42 @@ impl GlobalState { } Ok(()) })? + .on::(|this, ()| { + this.diagnostics.clear_check_all(); + Ok(()) + })? + .on::(|this, params| { + if let Some(text_document) = params.text_document { + if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { + if run_flycheck(this, vfs_path) { + return Ok(()); + } + } + } + // No specific flycheck was triggered, so let's trigger all of them. + for flycheck in this.flycheck.iter() { + flycheck.restart(); + } + Ok(()) + })? .on::(|this, params| { if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { - if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) { + if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { this.fetch_workspaces_queue .request_op(format!("DidSaveTextDocument {}", abs_path.display())); } } - let file_id = this.vfs.read().0.file_id(&vfs_path); - if let Some(file_id) = file_id { - let world = this.snapshot(); - let mut updated = false; - let task = move || -> std::result::Result<(), ide::Cancelled> { - // Trigger flychecks for all workspaces that depend on the saved file - // Crates containing or depending on the saved file - let crate_ids: Vec<_> = world - .analysis - .crates_for(file_id)? - .into_iter() - .flat_map(|id| world.analysis.transitive_rev_deps(id)) - .flatten() - .sorted() - .unique() - .collect(); - - let crate_root_paths: Vec<_> = crate_ids - .iter() - .filter_map(|&crate_id| { - world - .analysis - .crate_root(crate_id) - .map(|file_id| { - world - .file_id_to_file_path(file_id) - .as_path() - .map(ToOwned::to_owned) - }) - .transpose() - }) - .collect::>()?; - let crate_root_paths: Vec<_> = - crate_root_paths.iter().map(Deref::deref).collect(); - - // Find all workspaces that have at least one target containing the saved file - let workspace_ids = - world.workspaces.iter().enumerate().filter(|(_, ws)| match ws { - project_model::ProjectWorkspace::Cargo { cargo, .. } => { - cargo.packages().any(|pkg| { - cargo[pkg].targets.iter().any(|&it| { - crate_root_paths.contains(&cargo[it].root.as_path()) - }) - }) - } - project_model::ProjectWorkspace::Json { project, .. } => { - project.crates().any(|(c, _)| { - crate_ids.iter().any(|&crate_id| crate_id == c) - }) - } - project_model::ProjectWorkspace::DetachedFiles { .. } => false, - }); - - // Find and trigger corresponding flychecks - for flycheck in world.flycheck.iter() { - for (id, _) in workspace_ids.clone() { - if id == flycheck.id() { - updated = true; - flycheck.restart(); - continue; - } - } - } - // No specific flycheck was triggered, so let's trigger all of them. - if !updated { - for flycheck in world.flycheck.iter() { - flycheck.restart(); - } - } - Ok(()) - }; - this.task_pool.handle.spawn_with_sender(move |_| { - if let Err(e) = std::panic::catch_unwind(task) { - tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}") - } - }); + if !this.config.check_on_save() || run_flycheck(this, vfs_path) { return Ok(()); } - } - - // No specific flycheck was triggered, so let's trigger all of them. - for flycheck in this.flycheck.iter() { - flycheck.restart(); + } else if this.config.check_on_save() { + // No specific flycheck was triggered, so let's trigger all of them. + for flycheck in this.flycheck.iter() { + flycheck.restart(); + } } Ok(()) })? diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index fcfe4be0b8cec..9bbce70ec0a8f 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -158,8 +158,10 @@ impl GlobalState { .collect::>(); if !detached_files.is_empty() { - workspaces - .push(project_model::ProjectWorkspace::load_detached_files(detached_files)); + workspaces.push(project_model::ProjectWorkspace::load_detached_files( + detached_files, + &cargo_config, + )); } tracing::info!("did fetch workspaces {:?}", workspaces); @@ -224,6 +226,7 @@ impl GlobalState { build_scripts: _, toolchain: _, + target_layout: _, } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)), _ => None, }; @@ -447,15 +450,7 @@ impl GlobalState { fn reload_flycheck(&mut self) { let _p = profile::span("GlobalState::reload_flycheck"); - let config = match self.config.flycheck() { - Some(it) => it, - None => { - self.flycheck = Arc::new([]); - self.diagnostics.clear_check_all(); - return; - } - }; - + let config = self.config.flycheck(); let sender = self.flycheck_sender.clone(); let invocation_strategy = match config { FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace, @@ -466,7 +461,7 @@ impl GlobalState { flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn( 0, Box::new(move |msg| sender.send(msg).unwrap()), - config.clone(), + config, self.config.root_path().clone(), )], flycheck::InvocationStrategy::PerWorkspace => { diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index c48410ed55e98..c2cc3f422d206 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs @@ -161,8 +161,8 @@ impl SemanticTokensBuilder { /// Push a new token onto the builder pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) { - let mut push_line = range.start.line as u32; - let mut push_char = range.start.character as u32; + let mut push_line = range.start.line; + let mut push_char = range.start.character; if !self.data.is_empty() { push_line -= self.prev_line; @@ -177,15 +177,15 @@ impl SemanticTokensBuilder { let token = SemanticToken { delta_line: push_line, delta_start: push_char, - length: token_len as u32, + length: token_len, token_type: token_index, token_modifiers_bitset: modifier_bitset, }; self.data.push(token); - self.prev_line = range.start.line as u32; - self.prev_char = range.start.character as u32; + self.prev_line = range.start.line; + self.prev_char = range.start.character; } pub(crate) fn build(self) -> SemanticTokens { diff --git a/crates/rust-analyzer/src/task_pool.rs b/crates/rust-analyzer/src/task_pool.rs index aeeb3b7c582b1..616e449984ae6 100644 --- a/crates/rust-analyzer/src/task_pool.rs +++ b/crates/rust-analyzer/src/task_pool.rs @@ -8,12 +8,13 @@ pub(crate) struct TaskPool { } impl TaskPool { - pub(crate) fn new(sender: Sender) -> TaskPool { + pub(crate) fn new_with_threads(sender: Sender, threads: usize) -> TaskPool { const STACK_SIZE: usize = 8 * 1024 * 1024; let inner = threadpool::Builder::new() .thread_name("Worker".into()) .thread_stack_size(STACK_SIZE) + .num_threads(threads) .build(); TaskPool { sender, inner } } diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 81cc1952ba5ca..e736b2ff9a3be 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -228,7 +228,7 @@ fn completion_item( max_relevance: u32, item: CompletionItem, ) { - let insert_replace_support = config.insert_replace_support().then(|| tdpp.position); + let insert_replace_support = config.insert_replace_support().then_some(tdpp.position); let mut additional_text_edits = Vec::new(); // LSP does not allow arbitrary edits in completion, so we have to do a @@ -258,7 +258,7 @@ fn completion_item( text_edit.unwrap() }; - let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET); + let insert_text_format = item.is_snippet().then_some(lsp_types::InsertTextFormat::SNIPPET); let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]); let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints { Some(command::trigger_parameter_hints()) @@ -342,7 +342,7 @@ fn completion_item( // by the client. Hex format is used because it is easier to // visually compare very large values, which the sort text // tends to be since it is the opposite of the score. - res.sort_text = Some(format!("{:08x}", sort_score)); + res.sort_text = Some(format!("{sort_score:08x}")); } } @@ -434,42 +434,52 @@ pub(crate) fn inlay_hint( InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"), InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "), InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "), + InlayKind::DiscriminantHint => inlay_hint.label.prepend_str(" = "), _ => {} } Ok(lsp_types::InlayHint { position: match inlay_hint.kind { // before annotated thing - InlayKind::ParameterHint | InlayKind::AdjustmentHint | InlayKind::BindingModeHint => { - position(line_index, inlay_hint.range.start()) - } + InlayKind::OpeningParenthesis + | InlayKind::ParameterHint + | InlayKind::AdjustmentHint + | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()), // after annotated thing InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint + | InlayKind::DiscriminantHint | InlayKind::ChainingHint | InlayKind::GenericParamListHint - | InlayKind::AdjustmentHintClosingParenthesis + | InlayKind::ClosingParenthesis + | InlayKind::AdjustmentHintPostfix | InlayKind::LifetimeHint | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()), }, padding_left: Some(match inlay_hint.kind { InlayKind::TypeHint => !render_colons, InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true, - InlayKind::AdjustmentHintClosingParenthesis + InlayKind::ClosingParenthesis + | InlayKind::DiscriminantHint + | InlayKind::OpeningParenthesis | InlayKind::BindingModeHint | InlayKind::ClosureReturnTypeHint | InlayKind::GenericParamListHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::LifetimeHint | InlayKind::ParameterHint => false, }), padding_right: Some(match inlay_hint.kind { - InlayKind::AdjustmentHintClosingParenthesis + InlayKind::ClosingParenthesis + | InlayKind::OpeningParenthesis | InlayKind::ChainingHint | InlayKind::ClosureReturnTypeHint | InlayKind::GenericParamListHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::TypeHint + | InlayKind::DiscriminantHint | InlayKind::ClosingBraceHint => false, InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"), InlayKind::ParameterHint | InlayKind::LifetimeHint => true, @@ -479,11 +489,14 @@ pub(crate) fn inlay_hint( InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => { Some(lsp_types::InlayHintKind::TYPE) } - InlayKind::AdjustmentHintClosingParenthesis + InlayKind::ClosingParenthesis + | InlayKind::DiscriminantHint + | InlayKind::OpeningParenthesis | InlayKind::BindingModeHint | InlayKind::GenericParamListHint | InlayKind::LifetimeHint | InlayKind::AdjustmentHint + | InlayKind::AdjustmentHintPostfix | InlayKind::ClosingBraceHint => None, }, text_edits: None, @@ -492,7 +505,10 @@ pub(crate) fn inlay_hint( let uri = url(snap, file_id); let line_index = snap.file_line_index(file_id).ok()?; - let text_document = lsp_types::TextDocumentIdentifier { uri }; + let text_document = lsp_types::VersionedTextDocumentIdentifier { + version: snap.url_file_version(&uri)?, + uri, + }; to_value(lsp_ext::InlayHintResolveData { text_document, position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)), @@ -501,7 +517,10 @@ pub(crate) fn inlay_hint( } Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => { let uri = url(snap, file_id); - let text_document = lsp_types::TextDocumentIdentifier { uri }; + let text_document = lsp_types::VersionedTextDocumentIdentifier { + version: snap.url_file_version(&uri)?, + uri, + }; let line_index = snap.file_line_index(file_id).ok()?; to_value(lsp_ext::InlayHintResolveData { text_document, @@ -1103,7 +1122,7 @@ pub(crate) fn code_action( (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?), (None, Some((index, code_action_params))) => { res.data = Some(lsp_ext::CodeActionData { - id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index), + id: format!("{}:{}:{index}", assist.id.0, assist.id.1.name()), code_action_params, }); } @@ -1164,7 +1183,10 @@ pub(crate) fn code_lens( let r = runnable(snap, run)?; let lens_config = snap.config.lens(); - if lens_config.run && client_commands_config.run_single { + if lens_config.run + && client_commands_config.run_single + && r.args.workspace_root.is_some() + { let command = command::run_single(&r, &title); acc.push(lsp_types::CodeLens { range: annotation_range, @@ -1339,7 +1361,7 @@ pub(crate) fn implementation_title(count: usize) -> String { if count == 1 { "1 implementation".into() } else { - format!("{} implementations", count) + format!("{count} implementations") } } @@ -1347,7 +1369,7 @@ pub(crate) fn reference_title(count: usize) -> String { if count == 1 { "1 reference".into() } else { - format!("{} references", count) + format!("{count} references") } } diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index fa55f7d90c49d..5e3e19d44d738 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -263,7 +263,7 @@ mod tests { for runnable in ["consumer", "dependency", "devdependency"] { server.request::( RunnablesParams { - text_document: server.doc_id(&format!("{}/src/lib.rs", runnable)), + text_document: server.doc_id(&format!("{runnable}/src/lib.rs")), position: None, }, json!([ @@ -528,14 +528,13 @@ fn test_missing_module_code_action_in_json_project() { let code = format!( r#" //- /rust-project.json -{PROJECT} +{project} //- /src/lib.rs mod bar; fn main() {{}} "#, - PROJECT = project, ); let server = @@ -595,8 +594,8 @@ fn diagnostics_dont_block_typing() { return; } - let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect(); - let libs: String = (0..10).map(|i| format!("//- /src/m{}.rs\nfn foo() {{}}\n\n", i)).collect(); + let librs: String = (0..10).map(|i| format!("mod m{i};")).collect(); + let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect(); let server = Project::with_fixture(&format!( r#" //- /Cargo.toml @@ -605,13 +604,12 @@ name = "foo" version = "0.0.0" //- /src/lib.rs -{} +{librs} -{} +{libs} fn main() {{}} -"#, - librs, libs +"# )) .with_config(serde_json::json!({ "cargo": { "sysroot": "discover" } @@ -622,7 +620,7 @@ fn main() {{}} for i in 0..10 { server.notification::(DidOpenTextDocumentParams { text_document: TextDocumentItem { - uri: server.doc_id(&format!("src/m{}.rs", i)).uri, + uri: server.doc_id(&format!("src/m{i}.rs")).uri, language_id: "rust".to_string(), version: 0, text: "/// Docs\nfn foo() {}".to_string(), @@ -645,7 +643,7 @@ fn main() {{}} }]), ); let elapsed = start.elapsed(); - assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed); + assert!(elapsed.as_millis() < 2000, "typing enter took {elapsed:?}"); } #[test] @@ -942,7 +940,7 @@ fn test_will_rename_files_same_level() { let tmp_dir = TestDir::new(); let tmp_dir_path = tmp_dir.path().to_owned(); let tmp_dir_str = tmp_dir_path.to_str().unwrap(); - let base_path = PathBuf::from(format!("file://{}", tmp_dir_str)); + let base_path = PathBuf::from(format!("file://{tmp_dir_str}")); let code = r#" //- /Cargo.toml diff --git a/crates/rust-analyzer/tests/slow-tests/sourcegen.rs b/crates/rust-analyzer/tests/slow-tests/sourcegen.rs index e6ac018a05fea..2eafb0da69210 100644 --- a/crates/rust-analyzer/tests/slow-tests/sourcegen.rs +++ b/crates/rust-analyzer/tests/slow-tests/sourcegen.rs @@ -14,7 +14,7 @@ fn sourcegen_feature_docs() { contents.trim() ); let dst = sourcegen::project_root().join("docs/user/generated_features.adoc"); - fs::write(&dst, &contents).unwrap(); + fs::write(dst, contents).unwrap(); } #[derive(Debug)] @@ -42,7 +42,7 @@ impl Feature { for block in comment_blocks { let id = block.id; if let Err(msg) = is_valid_feature_name(&id) { - panic!("invalid feature name: {:?}:\n {}", id, msg) + panic!("invalid feature name: {id:?}:\n {msg}") } let doc = block.contents.join("\n"); let location = sourcegen::Location { file: path.clone(), line: block.line }; @@ -63,11 +63,11 @@ fn is_valid_feature_name(feature: &str) -> Result<(), String> { } for short in ["To", "And"] { if word == short { - return Err(format!("Don't capitalize {:?}", word)); + return Err(format!("Don't capitalize {word:?}")); } } if !word.starts_with(char::is_uppercase) { - return Err(format!("Capitalize {:?}", word)); + return Err(format!("Capitalize {word:?}")); } } Ok(()) diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 7257445dabe05..269212ebb99c1 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -216,7 +216,7 @@ impl Server { fn send_request_(&self, r: Request) -> Value { let id = r.id.clone(); self.client.sender.send(r.clone().into()).unwrap(); - while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {:?}", r)) { + while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {r:?}")) { match msg { Message::Request(req) => { if req.method == "client/registerCapability" { @@ -228,19 +228,19 @@ impl Server { continue; } } - panic!("unexpected request: {:?}", req) + panic!("unexpected request: {req:?}") } Message::Notification(_) => (), Message::Response(res) => { assert_eq!(res.id, id); if let Some(err) = res.error { - panic!("error response: {:#?}", err); + panic!("error response: {err:#?}"); } return res.result.unwrap(); } } } - panic!("no response for {:?}", r); + panic!("no response for {r:?}"); } pub(crate) fn wait_until_workspace_is_loaded(self) -> Server { self.wait_for_message_cond(1, &|msg: &Message| match msg { diff --git a/crates/rust-analyzer/tests/slow-tests/testdir.rs b/crates/rust-analyzer/tests/slow-tests/testdir.rs index 3bec23a911750..f7fceb5888696 100644 --- a/crates/rust-analyzer/tests/slow-tests/testdir.rs +++ b/crates/rust-analyzer/tests/slow-tests/testdir.rs @@ -28,7 +28,7 @@ impl TestDir { static CNT: AtomicUsize = AtomicUsize::new(0); for _ in 0..100 { let cnt = CNT.fetch_add(1, Ordering::Relaxed); - let path = base.join(format!("{}_{}", pid, cnt)); + let path = base.join(format!("{pid}_{cnt}")); if path.is_dir() { continue; } @@ -53,7 +53,7 @@ impl Drop for TestDir { return; } remove_dir_all(&self.path).unwrap_or_else(|err| { - panic!("failed to remove temporary directory {}: {}", self.path.display(), err) + panic!("failed to remove temporary directory {}: {err}", self.path.display()) }) } } diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 24e68eca676d7..35b5af731925e 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -56,12 +56,11 @@ fn check_lsp_extensions_docs() { " lsp_ext.rs was changed without touching lsp-extensions.md. -Expected hash: {:x} -Actual hash: {:x} +Expected hash: {expected_hash:x} +Actual hash: {actual_hash:x} Please adjust docs/dev/lsp-extensions.md. -", - expected_hash, actual_hash +" ) } } @@ -194,6 +193,7 @@ MIT OR Apache-2.0 MIT OR Apache-2.0 OR Zlib MIT OR Zlib OR Apache-2.0 MIT/Apache-2.0 +Unlicense OR MIT Unlicense/MIT Zlib OR Apache-2.0 OR MIT " @@ -216,18 +216,18 @@ Zlib OR Apache-2.0 OR MIT diff.push_str("New Licenses:\n"); for &l in licenses.iter() { if !expected.contains(&l) { - diff += &format!(" {}\n", l) + diff += &format!(" {l}\n") } } diff.push_str("\nMissing Licenses:\n"); for &l in expected.iter() { if !licenses.contains(&l) { - diff += &format!(" {}\n", l) + diff += &format!(" {l}\n") } } - panic!("different set of licenses!\n{}", diff); + panic!("different set of licenses!\n{diff}"); } assert_eq!(licenses, expected); } @@ -316,7 +316,7 @@ fn check_test_attrs(path: &Path, text: &str) { "ide-assists/src/tests/generated.rs", ]; if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) { - panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),) + panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),) } let panic_rule = @@ -438,7 +438,7 @@ impl TidyMarks { self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect(); if !diff.is_empty() { - panic!("unpaired marks: {:?}", diff) + panic!("unpaired marks: {diff:?}") } } } diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs index 4e0ee63f32f26..72d26635c3369 100644 --- a/crates/sourcegen/src/lib.rs +++ b/crates/sourcegen/src/lib.rs @@ -57,7 +57,7 @@ impl CommentBlock { pub fn extract(tag: &str, text: &str) -> Vec { assert!(tag.starts_with(char::is_uppercase)); - let tag = format!("{}:", tag); + let tag = format!("{tag}:"); // Would be nice if we had `.retain_mut` here! CommentBlock::extract_untagged(text) .into_iter() @@ -65,10 +65,7 @@ impl CommentBlock { let first = block.contents.remove(0); first.strip_prefix(&tag).map(|id| { if block.is_doc { - panic!( - "Use plain (non-doc) comments with tags like {}:\n {}", - tag, first - ); + panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}"); } block.id = id.trim().to_string(); @@ -122,7 +119,7 @@ pub struct Location { impl fmt::Display for Location { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string(); + let path = self.file.strip_prefix(project_root()).unwrap().display().to_string(); let path = path.replace('\\', "/"); let name = self.file.file_name().unwrap(); write!( @@ -163,7 +160,7 @@ pub fn reformat(text: String) -> String { } pub fn add_preamble(generator: &'static str, mut text: String) -> String { - let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator); + let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n"); text.insert_str(0, &preamble); text } @@ -178,7 +175,7 @@ pub fn ensure_file_contents(file: &Path, contents: &str) { } } - let display_path = file.strip_prefix(&project_root()).unwrap_or(file); + let display_path = file.strip_prefix(project_root()).unwrap_or(file); eprintln!( "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", display_path.display() diff --git a/crates/stdx/src/hash.rs b/crates/stdx/src/hash.rs index 9909d71bdf066..0c21d2674b1a0 100644 --- a/crates/stdx/src/hash.rs +++ b/crates/stdx/src/hash.rs @@ -51,7 +51,7 @@ impl Hasher for NoHashHasher { } fn write_u64(&mut self, i: u64) { - self.0 = i as u64; + self.0 = i; } fn write_usize(&mut self, i: usize) { diff --git a/crates/stdx/src/panic_context.rs b/crates/stdx/src/panic_context.rs index f8fafc5a67724..c3e8813b0e813 100644 --- a/crates/stdx/src/panic_context.rs +++ b/crates/stdx/src/panic_context.rs @@ -25,7 +25,7 @@ impl PanicContext { if !ctx.is_empty() { eprintln!("Panic context:"); for frame in ctx.iter() { - eprintln!("> {}\n", frame); + eprintln!("> {frame}\n"); } } default_hook(panic_info); @@ -45,5 +45,5 @@ fn with_ctx(f: impl FnOnce(&mut Vec)) { thread_local! { static CTX: RefCell> = RefCell::new(Vec::new()); } - CTX.with(|ctx| f(&mut *ctx.borrow_mut())); + CTX.with(|ctx| f(&mut ctx.borrow_mut())); } diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 0a0cb0290d6cb..2c67586a3905d 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -359,6 +359,7 @@ Expr = | TupleExpr | WhileExpr | YieldExpr +| YeetExpr | LetExpr | UnderscoreExpr @@ -503,6 +504,9 @@ ReturnExpr = YieldExpr = Attr* 'yield' Expr? +YeetExpr = + Attr* 'do' 'yeet' Expr? + LetExpr = Attr* 'let' Pat '=' Expr diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 8b14789dd917d..c402a7bceaebc 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs @@ -616,7 +616,7 @@ fn main() { let fmt_syntax = |syn: &SyntaxElement| match syn.kind() { SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()), - _ => format!("{}", syn), + _ => format!("{syn}"), }; let insertions = @@ -637,7 +637,7 @@ fn main() { .iter() .sorted_by_key(|(syntax, _)| syntax.text_range().start()) .format_with("\n", |(k, v), f| { - f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v))) + f(&format!("Line {}: {k:?} -> {}", line_number(k), fmt_syntax(v))) }); let deletions = diff @@ -646,8 +646,7 @@ fn main() { .format_with("\n", |v, f| f(&format!("Line {}: {}", line_number(v), &fmt_syntax(v)))); let actual = format!( - "insertions:\n\n{}\n\nreplacements:\n\n{}\n\ndeletions:\n\n{}\n", - insertions, replacements, deletions + "insertions:\n\n{insertions}\n\nreplacements:\n\n{replacements}\n\ndeletions:\n\n{deletions}\n" ); expected_diff.assert_eq(&actual); diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 4aa64d0d6e8a6..10c04575833c8 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -9,6 +9,7 @@ mod operators; pub mod edit; pub mod edit_in_place; pub mod make; +pub mod prec; use std::marker::PhantomData; diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 15805dfc8608f..5bc6b780e47fa 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs @@ -87,7 +87,7 @@ impl IndentLevel { for token in tokens { if let Some(ws) = ast::Whitespace::cast(token) { if ws.text().contains('\n') { - let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self)); + let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); ted::replace(ws.syntax(), &new_ws); } } @@ -103,7 +103,7 @@ impl IndentLevel { if let Some(ws) = ast::Whitespace::cast(token) { if ws.text().contains('\n') { let new_ws = make::tokens::whitespace( - &ws.syntax().text().replace(&format!("\n{}", self), "\n"), + &ws.syntax().text().replace(&format!("\n{self}"), "\n"), ); ted::replace(ws.syntax(), &new_ws); } diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 660c057e99c56..a493c92e7dae9 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -481,7 +481,7 @@ impl ast::AssocItemList { }, }; let elements: Vec> = vec![ - make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(), + make::tokens::whitespace(&format!("{whitespace}{indent}")).into(), item.syntax().clone().into(), ]; ted::insert_all(position, elements); @@ -537,7 +537,7 @@ impl ast::MatchArmList { }, }; let indent = IndentLevel::from_node(self.syntax()) + 1; - elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into()); + elements.push(make::tokens::whitespace(&format!("\n{indent}")).into()); elements.push(arm.syntax().clone().into()); if needs_comma(&arm) { ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA)); @@ -555,7 +555,7 @@ impl ast::RecordExprFieldList { let is_multiline = self.syntax().text().contains_char('\n'); let whitespace = if is_multiline { let indent = IndentLevel::from_node(self.syntax()) + 1; - make::tokens::whitespace(&format!("\n{}", indent)) + make::tokens::whitespace(&format!("\n{indent}")) } else { make::tokens::single_space() }; @@ -616,7 +616,7 @@ impl ast::RecordPatFieldList { let is_multiline = self.syntax().text().contains_char('\n'); let whitespace = if is_multiline { let indent = IndentLevel::from_node(self.syntax()) + 1; - make::tokens::whitespace(&format!("\n{}", indent)) + make::tokens::whitespace(&format!("\n{indent}")) } else { make::tokens::single_space() }; @@ -681,7 +681,7 @@ impl ast::VariantList { }, }; let elements: Vec> = vec![ - make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(), + make::tokens::whitespace(&format!("{}{indent}", "\n")).into(), variant.syntax().clone().into(), ast::make::token(T![,]).into(), ]; @@ -704,11 +704,11 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> { match l.next_sibling_or_token() { Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => { if ws.next_sibling_or_token()?.into_token()? == r { - ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent))); + ted::replace(ws, make::tokens::whitespace(&format!("\n{indent}"))); } } Some(ws) if ws.kind() == T!['}'] => { - ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent))); + ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{indent}"))); } _ => (), } @@ -888,6 +888,6 @@ enum Foo { let enum_ = ast_mut_from_text::(before); enum_.variant_list().map(|it| it.add_variant(variant)); let after = enum_.to_string(); - assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(&after.trim())); + assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(after.trim())); } } diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 2ea715f47fb23..a214a5e4462cf 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1063,6 +1063,17 @@ impl YieldExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct YeetExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for YeetExpr {} +impl YeetExpr { + pub fn do_token(&self) -> Option { support::token(&self.syntax, T![do]) } + pub fn yeet_token(&self) -> Option { support::token(&self.syntax, T![yeet]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LetExpr { pub(crate) syntax: SyntaxNode, @@ -1541,6 +1552,7 @@ pub enum Expr { TupleExpr(TupleExpr), WhileExpr(WhileExpr), YieldExpr(YieldExpr), + YeetExpr(YeetExpr), LetExpr(LetExpr), UnderscoreExpr(UnderscoreExpr), } @@ -2694,6 +2706,17 @@ impl AstNode for YieldExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for YeetExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == YEET_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for LetExpr { fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR } fn cast(syntax: SyntaxNode) -> Option { @@ -3382,6 +3405,9 @@ impl From for Expr { impl From for Expr { fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) } } +impl From for Expr { + fn from(node: YeetExpr) -> Expr { Expr::YeetExpr(node) } +} impl From for Expr { fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) } } @@ -3422,6 +3448,7 @@ impl AstNode for Expr { | TUPLE_EXPR | WHILE_EXPR | YIELD_EXPR + | YEET_EXPR | LET_EXPR | UNDERSCORE_EXPR ) @@ -3458,6 +3485,7 @@ impl AstNode for Expr { TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }), + YEET_EXPR => Expr::YeetExpr(YeetExpr { syntax }), LET_EXPR => Expr::LetExpr(LetExpr { syntax }), UNDERSCORE_EXPR => Expr::UnderscoreExpr(UnderscoreExpr { syntax }), _ => return None, @@ -3496,6 +3524,7 @@ impl AstNode for Expr { Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, Expr::YieldExpr(it) => &it.syntax, + Expr::YeetExpr(it) => &it.syntax, Expr::LetExpr(it) => &it.syntax, Expr::UnderscoreExpr(it) => &it.syntax, } @@ -3892,7 +3921,7 @@ impl AnyHasArgList { impl AstNode for AnyHasArgList { fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasArgList { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -3963,6 +3992,7 @@ impl AstNode for AnyHasAttrs { | TUPLE_EXPR | WHILE_EXPR | YIELD_EXPR + | YEET_EXPR | LET_EXPR | UNDERSCORE_EXPR | STMT_LIST @@ -3976,7 +4006,7 @@ impl AstNode for AnyHasAttrs { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasAttrs { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4013,7 +4043,7 @@ impl AstNode for AnyHasDocComments { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasDocComments { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4028,7 +4058,7 @@ impl AstNode for AnyHasGenericParams { matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasGenericParams { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4041,7 +4071,7 @@ impl AnyHasLoopBody { impl AstNode for AnyHasLoopBody { fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasLoopBody { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4054,7 +4084,7 @@ impl AnyHasModuleItem { impl AstNode for AnyHasModuleItem { fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasModuleItem { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4089,7 +4119,7 @@ impl AstNode for AnyHasName { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasName { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4107,7 +4137,7 @@ impl AstNode for AnyHasTypeBounds { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasTypeBounds { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4141,7 +4171,7 @@ impl AstNode for AnyHasVisibility { ) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax }) + Self::can_cast(syntax.kind()).then_some(AnyHasVisibility { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax } } @@ -4655,6 +4685,11 @@ impl std::fmt::Display for YieldExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for YeetExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for LetExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 8c26009add2bb..d5b3296980c91 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -339,10 +339,10 @@ pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr { } /// Ideally this function wouldn't exist since it involves manual indenting. -/// It differs from `make::block_expr` by also supporting comments. +/// It differs from `make::block_expr` by also supporting comments and whitespace. /// /// FIXME: replace usages of this with the mutable syntax tree API -pub fn hacky_block_expr_with_comments( +pub fn hacky_block_expr( elements: impl IntoIterator, tail_expr: Option, ) -> ast::BlockExpr { @@ -350,10 +350,17 @@ pub fn hacky_block_expr_with_comments( for node_or_token in elements.into_iter() { match node_or_token { rowan::NodeOrToken::Node(n) => format_to!(buf, " {n}\n"), - rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => { - format_to!(buf, " {t}\n") + rowan::NodeOrToken::Token(t) => { + let kind = t.kind(); + if kind == SyntaxKind::COMMENT { + format_to!(buf, " {t}\n") + } else if kind == SyntaxKind::WHITESPACE { + let content = t.text().trim_matches(|c| c != '\n'); + if content.len() >= 1 { + format_to!(buf, "{}", &content[1..]) + } + } } - _ => (), } } if let Some(tail_expr) = tail_expr { @@ -719,12 +726,23 @@ pub fn param_list( ast_from_text(&list) } -pub fn type_param(name: ast::Name, ty: Option) -> ast::TypeParam { - let bound = match ty { - Some(it) => format!(": {it}"), - None => String::new(), - }; - ast_from_text(&format!("fn f<{name}{bound}>() {{ }}")) +pub fn type_bound(bound: &str) -> ast::TypeBound { + ast_from_text(&format!("fn f() {{ }}")) +} + +pub fn type_bound_list( + bounds: impl IntoIterator, +) -> Option { + let bounds = bounds.into_iter().map(|it| it.to_string()).unique().join(" + "); + if bounds.is_empty() { + return None; + } + Some(ast_from_text(&format!("fn f() {{ }}"))) +} + +pub fn type_param(name: ast::Name, bounds: Option) -> ast::TypeParam { + let bounds = bounds.map_or_else(String::new, |it| format!(": {it}")); + ast_from_text(&format!("fn f<{name}{bounds}>() {{ }}")) } pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam { diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs new file mode 100644 index 0000000000000..4ec388914e604 --- /dev/null +++ b/crates/syntax/src/ast/prec.rs @@ -0,0 +1,328 @@ +//! Precedence representation. + +use crate::{ + ast::{self, BinaryOp, Expr, HasArgList}, + match_ast, AstNode, SyntaxNode, +}; + +impl Expr { + // Implementation is based on + // - https://doc.rust-lang.org/reference/expressions.html#expression-precedence + // - https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html + // - rustc source, including, but not limited to + // - https://github.com/rust-lang/rust/blob/b6852428a8ea9728369b64b9964cad8e258403d3/compiler/rustc_ast/src/util/parser.rs#L296 + + /// Returns `true` if `self` would need to be wrapped in parentheses given that its parent is `parent`. + pub fn needs_parens_in(&self, parent: SyntaxNode) -> bool { + match_ast! { + match parent { + ast::Expr(e) => self.needs_parens_in_expr(&e), + ast::Stmt(e) => self.needs_parens_in_stmt(Some(&e)), + ast::StmtList(_) => self.needs_parens_in_stmt(None), + ast::ArgList(_) => false, + ast::MatchArm(_) => false, + _ => false, + } + } + } + + fn needs_parens_in_expr(&self, parent: &Expr) -> bool { + // Special-case block weirdness + if parent.child_is_followed_by_a_block() { + use Expr::*; + match self { + // Cases like `if return {}` (need parens or else `{}` is returned, instead of being `if`'s body) + ReturnExpr(e) if e.expr().is_none() => return true, + BreakExpr(e) if e.expr().is_none() => return true, + YieldExpr(e) if e.expr().is_none() => return true, + + // Same but with `..{}` + RangeExpr(e) if matches!(e.end(), Some(BlockExpr(..))) => return true, + + // Similarly with struct literals, e.g. `if S{} == 1 {}` + _ if self.contains_exterior_struct_lit() => return true, + _ => {} + } + } + + // Special-case `return.f()` + if self.is_ret_like_with_no_value() && parent.is_postfix() { + return false; + } + + if self.is_paren_like() + || parent.is_paren_like() + || self.is_prefix() && (parent.is_prefix() || !self.is_ordered_before(parent)) + || self.is_postfix() && (parent.is_postfix() || self.is_ordered_before(parent)) + { + return false; + } + + let (left, right, inv) = match self.is_ordered_before(parent) { + true => (self, parent, false), + false => (parent, self, true), + }; + + let (_, left_right_bp) = left.binding_power(); + let (right_left_bp, _) = right.binding_power(); + + (left_right_bp < right_left_bp) ^ inv + } + + fn needs_parens_in_stmt(&self, stmt: Option<&ast::Stmt>) -> bool { + use Expr::*; + + // Prevent false-positives in cases like `fn x() -> u8 { ({ 0 } + 1) }`, + // `{ { 0 } + 1 }` won't parse -- `{ 0 }` would be parsed as a self-contained stmt, + // leaving `+ 1` as a parse error. + let mut innermost = self.clone(); + loop { + let next = match &innermost { + BinExpr(e) => e.lhs(), + CallExpr(e) => e.expr(), + CastExpr(e) => e.expr(), + IndexExpr(e) => e.base(), + _ => break, + }; + + if let Some(next) = next { + innermost = next; + if !innermost.requires_semi_to_be_stmt() { + return true; + } + } else { + break; + } + } + + // Not every expression can be followed by `else` in the `let-else` + if let Some(ast::Stmt::LetStmt(e)) = stmt { + if e.let_else().is_some() { + match self { + BinExpr(e) + if e.op_kind() + .map(|op| matches!(op, BinaryOp::LogicOp(_))) + .unwrap_or(false) => + { + return true + } + _ if self.clone().trailing_brace().is_some() => return true, + _ => {} + } + } + } + + false + } + + /// Returns left and right so-called "binding powers" of this expression. + fn binding_power(&self) -> (u8, u8) { + use ast::{ArithOp::*, BinaryOp::*, Expr::*, LogicOp::*}; + + match self { + // (0, 0) -- paren-like/nullary + // (0, N) -- prefix + // (N, 0) -- postfix + // (N, N) -- infix, requires parens + // (N, N+1) -- infix, left to right associative + // (N+1, N) -- infix, right to left associative + // N is odd + // + ContinueExpr(_) => (0, 0), + + ClosureExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | BreakExpr(_) => (0, 1), + + RangeExpr(_) => (5, 5), + + BinExpr(e) => { + // Return a dummy value if we don't know the op + let Some(op) = e.op_kind() else { return (0, 0) }; + match op { + Assignment { .. } => (4, 3), + // + // Ranges are here in order :) + // + LogicOp(op) => match op { + Or => (7, 8), + And => (9, 10), + }, + CmpOp(_) => (11, 11), + ArithOp(op) => match op { + BitOr => (13, 14), + BitXor => (15, 16), + BitAnd => (17, 18), + Shl | Shr => (19, 20), + Add | Sub => (21, 22), + Mul | Div | Rem => (23, 24), + }, + } + } + + CastExpr(_) => (25, 26), + + BoxExpr(_) | RefExpr(_) | LetExpr(_) | PrefixExpr(_) => (0, 27), + + AwaitExpr(_) | CallExpr(_) | MethodCallExpr(_) | IndexExpr(_) | TryExpr(_) + | MacroExpr(_) => (29, 0), + + FieldExpr(_) => (31, 32), + + ArrayExpr(_) | TupleExpr(_) | Literal(_) | PathExpr(_) | ParenExpr(_) | IfExpr(_) + | WhileExpr(_) | ForExpr(_) | LoopExpr(_) | MatchExpr(_) | BlockExpr(_) + | RecordExpr(_) | UnderscoreExpr(_) => (0, 0), + } + } + + fn is_paren_like(&self) -> bool { + matches!(self.binding_power(), (0, 0)) + } + + fn is_prefix(&self) -> bool { + matches!(self.binding_power(), (0, 1..)) + } + + fn is_postfix(&self) -> bool { + matches!(self.binding_power(), (1.., 0)) + } + + /// Returns `true` if this expression can't be a standalone statement. + fn requires_semi_to_be_stmt(&self) -> bool { + use Expr::*; + !matches!( + self, + IfExpr(..) | MatchExpr(..) | BlockExpr(..) | WhileExpr(..) | LoopExpr(..) | ForExpr(..) + ) + } + + /// If an expression ends with `}`, returns the innermost expression ending in this `}`. + fn trailing_brace(mut self) -> Option { + use Expr::*; + + loop { + let rhs = match self { + RefExpr(e) => e.expr(), + BinExpr(e) => e.rhs(), + BoxExpr(e) => e.expr(), + BreakExpr(e) => e.expr(), + LetExpr(e) => e.expr(), + RangeExpr(e) => e.end(), + ReturnExpr(e) => e.expr(), + PrefixExpr(e) => e.expr(), + YieldExpr(e) => e.expr(), + ClosureExpr(e) => e.body(), + + BlockExpr(..) | ForExpr(..) | IfExpr(..) | LoopExpr(..) | MatchExpr(..) + | RecordExpr(..) | WhileExpr(..) => break Some(self), + _ => break None, + }; + + self = rhs?; + } + } + + /// Expressions that syntactically contain an "exterior" struct literal i.e., not surrounded by any + /// parens or other delimiters, e.g., `X { y: 1 }`, `X { y: 1 }.method()`, `foo == X { y: 1 }` and + /// `X { y: 1 } == foo` all do, but `(X { y: 1 }) == foo` does not. + fn contains_exterior_struct_lit(&self) -> bool { + return contains_exterior_struct_lit_inner(self).is_some(); + + fn contains_exterior_struct_lit_inner(expr: &Expr) -> Option<()> { + use Expr::*; + + match expr { + RecordExpr(..) => Some(()), + + // X { y: 1 } + X { y: 2 } + BinExpr(e) => e + .lhs() + .as_ref() + .and_then(contains_exterior_struct_lit_inner) + .or_else(|| e.rhs().as_ref().and_then(contains_exterior_struct_lit_inner)), + + // `&X { y: 1 }`, `X { y: 1 }.y`, `X { y: 1 }.bar(...)`, etc + IndexExpr(e) => contains_exterior_struct_lit_inner(&e.base()?), + AwaitExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + PrefixExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + CastExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + FieldExpr(e) => contains_exterior_struct_lit_inner(&e.expr()?), + MethodCallExpr(e) => contains_exterior_struct_lit_inner(&e.receiver()?), + + _ => None, + } + } + } + + /// Returns true if self is one of `return`, `break`, `continue` or `yield` with **no associated value**. + fn is_ret_like_with_no_value(&self) -> bool { + use Expr::*; + + match self { + ReturnExpr(e) => e.expr().is_none(), + BreakExpr(e) => e.expr().is_none(), + ContinueExpr(_) => true, + YieldExpr(e) => e.expr().is_none(), + _ => false, + } + } + + fn is_ordered_before(&self, other: &Expr) -> bool { + use Expr::*; + + return order(self) < order(other); + + /// Returns text range that can be used to compare two expression for order (which goes first). + fn order(this: &Expr) -> rowan::TextSize { + // For non-paren-like operators: get the operator itself + let token = match this { + RangeExpr(e) => e.op_token(), + BinExpr(e) => e.op_token(), + CastExpr(e) => e.as_token(), + FieldExpr(e) => e.dot_token(), + AwaitExpr(e) => e.dot_token(), + BoxExpr(e) => e.box_token(), + BreakExpr(e) => e.break_token(), + CallExpr(e) => e.arg_list().and_then(|args| args.l_paren_token()), + ClosureExpr(e) => e.param_list().and_then(|params| params.l_paren_token()), + ContinueExpr(e) => e.continue_token(), + IndexExpr(e) => e.l_brack_token(), + MethodCallExpr(e) => e.dot_token(), + PrefixExpr(e) => e.op_token(), + RefExpr(e) => e.amp_token(), + ReturnExpr(e) => e.return_token(), + TryExpr(e) => e.question_mark_token(), + YieldExpr(e) => e.yield_token(), + YeetExpr(e) => e.do_token(), + LetExpr(e) => e.let_token(), + + ArrayExpr(_) | TupleExpr(_) | Literal(_) | PathExpr(_) | ParenExpr(_) + | IfExpr(_) | WhileExpr(_) | ForExpr(_) | LoopExpr(_) | MatchExpr(_) + | BlockExpr(_) | RecordExpr(_) | UnderscoreExpr(_) | MacroExpr(_) => None, + }; + + token.map(|t| t.text_range()).unwrap_or_else(|| this.syntax().text_range()).start() + } + } + + fn child_is_followed_by_a_block(&self) -> bool { + use Expr::*; + + match self { + ArrayExpr(_) | AwaitExpr(_) | BlockExpr(_) | CallExpr(_) | CastExpr(_) + | ClosureExpr(_) | FieldExpr(_) | IndexExpr(_) | Literal(_) | LoopExpr(_) + | MacroExpr(_) | MethodCallExpr(_) | ParenExpr(_) | PathExpr(_) | RecordExpr(_) + | TryExpr(_) | TupleExpr(_) | UnderscoreExpr(_) => false, + + // For BinExpr and RangeExpr this is technically wrong -- the child can be on the left... + BinExpr(_) | RangeExpr(_) | BoxExpr(_) | BreakExpr(_) | ContinueExpr(_) + | PrefixExpr(_) | RefExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) + | LetExpr(_) => self + .syntax() + .parent() + .and_then(Expr::cast) + .map(|e| e.child_is_followed_by_a_block()) + .unwrap_or(false), + + ForExpr(_) | IfExpr(_) | MatchExpr(_) | WhileExpr(_) => true, + } + } +} diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 8990f7a7d4e8e..2cd312e7f4f8c 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs @@ -436,9 +436,7 @@ mod tests { fn check_string_value<'a>(lit: &str, expected: impl Into>) { assert_eq!( - ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) } - .value() - .as_deref(), + ast::String { syntax: make::tokens::literal(&format!("\"{lit}\"")) }.value().as_deref(), expected.into() ); } @@ -461,7 +459,7 @@ bcde", "abcde", expected: impl Into>, ) { assert_eq!( - ast::ByteString { syntax: make::tokens::literal(&format!("b\"{}\"", lit)) } + ast::ByteString { syntax: make::tokens::literal(&format!("b\"{lit}\"")) } .value() .as_deref(), expected.into().map(|value| &value[..]) @@ -483,7 +481,7 @@ bcde", b"abcde", #[test] fn test_value_underscores() { - check_float_value("3.141592653589793_f64", 3.141592653589793_f64); + check_float_value("1.234567891011121_f64", 1.234567891011121_f64); check_float_value("1__0.__0__f32", 10.0); check_int_value("0b__1_0_", 2); check_int_value("1_1_1_1_1_1", 111111); diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs index 7c7a60d62994e..239a89f9b2d52 100644 --- a/crates/syntax/src/fuzz.rs +++ b/crates/syntax/src/fuzz.rs @@ -36,7 +36,7 @@ impl CheckReparse { let delete_len = usize::from_str(lines.next()?).ok()?; let insert = lines.next()?.to_string(); let text = lines.collect::>().join("\n"); - let text = format!("{}{}{}", PREFIX, text, SUFFIX); + let text = format!("{PREFIX}{text}{SUFFIX}"); text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range let delete = TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap()); @@ -60,8 +60,8 @@ impl CheckReparse { eprint!("reparsed:\n{:#?}", new_parse.tree().syntax()); eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax()); assert_eq!( - format!("{:?}", a), - format!("{:?}", b), + format!("{a:?}"), + format!("{b:?}"), "different syntax tree produced by the full reparse" ); } diff --git a/crates/syntax/src/hacks.rs b/crates/syntax/src/hacks.rs index ec3d3d444c365..a3023c3195f3f 100644 --- a/crates/syntax/src/hacks.rs +++ b/crates/syntax/src/hacks.rs @@ -6,7 +6,7 @@ use crate::{ast, AstNode}; pub fn parse_expr_from_str(s: &str) -> Option { let s = s.trim(); - let file = ast::SourceFile::parse(&format!("const _: () = {};", s)); + let file = ast::SourceFile::parse(&format!("const _: () = {s};")); let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?; if expr.syntax().text() != s { return None; diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs index a886972fff961..1d4a89201ae42 100644 --- a/crates/syntax/src/ptr.rs +++ b/crates/syntax/src/ptr.rs @@ -82,7 +82,7 @@ impl AstPtr { /// Like `SyntaxNodePtr::cast` but the trait bounds work out. pub fn try_from_raw(raw: SyntaxNodePtr) -> Option> { - N::can_cast(raw.kind()).then(|| AstPtr { raw, _ty: PhantomData }) + N::can_cast(raw.kind()).then_some(AstPtr { raw, _ty: PhantomData }) } } diff --git a/crates/syntax/src/ted.rs b/crates/syntax/src/ted.rs index a47b4b11c0ae6..29788d05e845f 100644 --- a/crates/syntax/src/ted.rs +++ b/crates/syntax/src/ted.rs @@ -157,7 +157,7 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option { if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) { let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into()); indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{}", indent))); + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } } @@ -165,7 +165,7 @@ fn ws_before(position: &Position, new: &SyntaxElement) -> Option { if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) { let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into()); indent.0 += 1; - return Some(make::tokens::whitespace(&format!("\n{}", indent))); + return Some(make::tokens::whitespace(&format!("\n{indent}"))); } } @@ -200,7 +200,7 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option(); - panic!("Parsing errors:\n{}\n", errors); + panic!("Parsing errors:\n{errors}\n"); } } @@ -157,7 +157,7 @@ fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> /// Collects paths to all `.rs` files from `dir` in a sorted `Vec`. fn rust_files_in_dir(dir: &Path) -> Vec { let mut acc = Vec::new(); - for file in fs::read_dir(&dir).unwrap() { + for file in fs::read_dir(dir).unwrap() { let file = file.unwrap(); let path = file.path(); if path.extension().unwrap_or_default() == "rs" { @@ -181,6 +181,6 @@ fn rust_files_in_dir(dir: &Path) -> Vec { /// so this should always be correct. fn read_text(path: &Path) -> String { fs::read_to_string(path) - .unwrap_or_else(|_| panic!("File at {:?} should be valid", path)) + .unwrap_or_else(|_| panic!("File at {path:?} should be valid")) .replace("\r\n", "\n") } diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index cf5be1c30fba2..3ff6e03006b5a 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -65,12 +65,12 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { (">>=", "SHREQ"), ], keywords: &[ - "as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else", + "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield", ], - contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"], + contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules", "yeet"], literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"], tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"], nodes: &[ @@ -142,6 +142,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { "STMT_LIST", "RETURN_EXPR", "YIELD_EXPR", + "YEET_EXPR", "LET_EXPR", "UNDERSCORE_EXPR", "MACRO_EXPR", diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs index 712ef5f63b651..03aa2c451e848 100644 --- a/crates/syntax/src/tests/sourcegen_ast.rs +++ b/crates/syntax/src/tests/sourcegen_ast.rs @@ -253,7 +253,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { matches!(kind, #(#kinds)|*) } fn cast(syntax: SyntaxNode) -> Option { - Self::can_cast(syntax.kind()).then(|| #name { syntax }) + Self::can_cast(syntax.kind()).then_some(#name { syntax }) } fn syntax(&self) -> &SyntaxNode { &self.syntax @@ -328,7 +328,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String { fn write_doc_comment(contents: &[String], dest: &mut String) { for line in contents { - writeln!(dest, "///{}", line).unwrap(); + writeln!(dest, "///{line}").unwrap(); } } @@ -501,7 +501,7 @@ fn to_pascal_case(s: &str) -> String { } fn pluralize(s: &str) -> String { - format!("{}s", s) + format!("{s}s") } impl Field { @@ -637,7 +637,7 @@ fn lower_rule(acc: &mut Vec, grammar: &Grammar, label: Option<&String>, r let mut name = grammar[*token].name.clone(); if name != "int_number" && name != "string" { if "[]{}()".contains(&name) { - name = format!("'{}'", name); + name = format!("'{name}'"); } let field = Field::Token(name); acc.push(field); @@ -651,7 +651,7 @@ fn lower_rule(acc: &mut Vec, grammar: &Grammar, label: Option<&String>, r acc.push(field); return; } - panic!("unhandled rule: {:?}", rule) + panic!("unhandled rule: {rule:?}") } Rule::Labeled { label: l, rule } => { assert!(label.is_none()); diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 1eea2346451dd..fb2381110bfe2 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs @@ -196,7 +196,7 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) { fn validate_numeric_name(name_ref: Option, errors: &mut Vec) { if let Some(int_token) = int_token(name_ref) { - if int_token.text().chars().any(|c| !c.is_digit(10)) { + if int_token.text().chars().any(|c| !c.is_ascii_digit()) { errors.push(SyntaxError::new( "Tuple (struct) field access is only allowed through \ decimal integers with no underscores or suffix", diff --git a/crates/test-utils/src/assert_linear.rs b/crates/test-utils/src/assert_linear.rs index 24502ddb41aff..15c30c52a5486 100644 --- a/crates/test-utils/src/assert_linear.rs +++ b/crates/test-utils/src/assert_linear.rs @@ -83,7 +83,7 @@ impl Round { let a = mean_y - b * mean_x; - self.plot = format!("y_pred = {:.3} + {:.3} * x\n\nx y y_pred\n", a, b); + self.plot = format!("y_pred = {a:.3} + {b:.3} * x\n\nx y y_pred\n"); let mut se = 0.0; let mut max_error = 0.0f64; @@ -100,7 +100,7 @@ impl Round { self.linear = rmse < 0.05 && max_error < 0.1 && a > -0.1; - fn normalize(xs: &mut Vec) { + fn normalize(xs: &mut [f64]) { let max = xs.iter().copied().max_by(|a, b| a.partial_cmp(b).unwrap()).unwrap(); xs.iter_mut().for_each(|it| *it /= max); } diff --git a/crates/test-utils/src/bench_fixture.rs b/crates/test-utils/src/bench_fixture.rs index 979156263de1b..9296fd2e68353 100644 --- a/crates/test-utils/src/bench_fixture.rs +++ b/crates/test-utils/src/bench_fixture.rs @@ -36,10 +36,10 @@ struct S{} {{ pub fn glorious_old_parser() -> String { let path = project_root().join("bench_data/glorious_old_parser"); - fs::read_to_string(&path).unwrap() + fs::read_to_string(path).unwrap() } pub fn numerous_macro_rules() -> String { let path = project_root().join("bench_data/numerous_macro_rules"); - fs::read_to_string(&path).unwrap() + fs::read_to_string(path).unwrap() } diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index c824f5af72584..d1afd0039aa4b 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -78,6 +78,7 @@ pub struct Fixture { pub edition: Option, pub env: FxHashMap, pub introduce_new_source_root: Option, + pub target_data_layout: Option, } pub struct MiniCore { @@ -134,11 +135,9 @@ impl Fixture { if line.contains("//-") { assert!( line.starts_with("//-"), - "Metadata line {} has invalid indentation. \ + "Metadata line {ix} has invalid indentation. \ All metadata lines need to have the same indentation.\n\ - The offending line: {:?}", - ix, - line + The offending line: {line:?}" ); } @@ -152,7 +151,7 @@ impl Fixture { && !line.contains('.') && line.chars().all(|it| !it.is_uppercase()) { - panic!("looks like invalid metadata line: {:?}", line); + panic!("looks like invalid metadata line: {line:?}"); } if let Some(entry) = res.last_mut() { @@ -171,7 +170,7 @@ impl Fixture { let components = meta.split_ascii_whitespace().collect::>(); let path = components[0].to_string(); - assert!(path.starts_with('/'), "fixture path does not start with `/`: {:?}", path); + assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}"); let mut krate = None; let mut deps = Vec::new(); @@ -181,10 +180,10 @@ impl Fixture { let mut cfg_key_values = Vec::new(); let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; + let mut target_data_layout = None; for component in components[1..].iter() { - let (key, value) = component - .split_once(':') - .unwrap_or_else(|| panic!("invalid meta line: {:?}", meta)); + let (key, value) = + component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}")); match key { "crate" => krate = Some(value.to_string()), "deps" => deps = value.split(',').map(|it| it.to_string()).collect(), @@ -213,16 +212,15 @@ impl Fixture { } } "new_source_root" => introduce_new_source_root = Some(value.to_string()), - _ => panic!("bad component: {:?}", component), + "target_data_layout" => target_data_layout = Some(value.to_string()), + _ => panic!("bad component: {component:?}"), } } for prelude_dep in extern_prelude.iter().flatten() { assert!( deps.contains(prelude_dep), - "extern-prelude {:?} must be a subset of deps {:?}", - extern_prelude, - deps + "extern-prelude {extern_prelude:?} must be a subset of deps {deps:?}" ); } @@ -237,6 +235,7 @@ impl Fixture { edition, env, introduce_new_source_root, + target_data_layout, } } } @@ -249,7 +248,7 @@ impl MiniCore { #[track_caller] fn assert_valid_flag(&self, flag: &str) { if !self.valid_flags.iter().any(|it| it == flag) { - panic!("invalid flag: {:?}, valid flags: {:?}", flag, self.valid_flags); + panic!("invalid flag: {flag:?}, valid flags: {:?}", self.valid_flags); } } @@ -259,7 +258,7 @@ impl MiniCore { let line = line.strip_prefix("//- minicore:").unwrap().trim(); for entry in line.split(", ") { if res.has_flag(entry) { - panic!("duplicate minicore flag: {:?}", entry); + panic!("duplicate minicore flag: {entry:?}"); } res.activated_flags.push(entry.to_owned()); } @@ -345,11 +344,7 @@ impl MiniCore { let mut keep = true; for ®ion in &active_regions { - assert!( - !region.starts_with(' '), - "region marker starts with a space: {:?}", - region - ); + assert!(!region.starts_with(' '), "region marker starts with a space: {region:?}"); self.assert_valid_flag(region); seen_regions.push(region); keep &= self.has_flag(region); @@ -365,7 +360,7 @@ impl MiniCore { for flag in &self.valid_flags { if !seen_regions.iter().any(|it| it == flag) { - panic!("unused minicore flag: {:?}", flag); + panic!("unused minicore flag: {flag:?}"); } } buf diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs index 8a9cfb6c22e45..a7a52e08e75a8 100644 --- a/crates/test-utils/src/lib.rs +++ b/crates/test-utils/src/lib.rs @@ -146,8 +146,8 @@ pub fn extract_range_or_offset(text: &str) -> (RangeOrOffset, String) { /// Extracts ranges, marked with ` ` pairs from the `text` pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option)>, String) { - let open = format!("<{}", tag); - let close = format!("", tag); + let open = format!("<{tag}"); + let close = format!(""); let mut ranges = Vec::new(); let mut res = String::new(); let mut stack = Vec::new(); @@ -169,8 +169,7 @@ pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option", tag)); + let (from, attr) = stack.pop().unwrap_or_else(|| panic!("unmatched ")); let to = TextSize::of(&res); ranges.push((TextRange::new(from, to), attr)); } else { @@ -180,7 +179,7 @@ pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option", tag); + assert!(stack.is_empty(), "unmatched <{tag}>"); ranges.sort_by_key(|r| (r.0.start(), r.0.end())); (ranges, res) } @@ -397,7 +396,7 @@ pub fn skip_slow_tests() -> bool { eprintln!("ignoring slow test"); } else { let path = project_root().join("./target/.slow_tests_cookie"); - fs::write(&path, ".").unwrap(); + fs::write(path, ".").unwrap(); } should_skip } @@ -413,8 +412,8 @@ pub fn format_diff(chunks: Vec>) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } @@ -480,7 +479,7 @@ pub fn try_ensure_file_contents(file: &Path, contents: &str) -> Result<(), ()> { } _ => (), } - let display_path = file.strip_prefix(&project_root()).unwrap_or(file); + let display_path = file.strip_prefix(project_root()).unwrap_or(file); eprintln!( "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", display_path.display() diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 69d2e62b25673..3ca63fcab90d6 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -20,6 +20,7 @@ //! derive: //! drop: //! eq: sized +//! error: fmt //! fmt: result //! fn: //! from: sized @@ -29,13 +30,16 @@ //! index: sized //! iterator: option //! iterators: iterator, fn +//! non_zero: //! option: //! ord: eq, option //! pin: //! range: //! result: +//! send: sized //! sized: //! slice: +//! sync: sized //! try: //! unsize: sized @@ -47,6 +51,24 @@ pub mod marker { pub trait Sized {} // endregion:sized + // region:send + pub unsafe auto trait Send {} + + impl !Send for *const T {} + impl !Send for *mut T {} + // region:sync + unsafe impl Send for &T {} + unsafe impl Send for &mut T {} + // endregion:sync + // endregion:send + + // region:sync + pub unsafe auto trait Sync {} + + impl !Sync for *const T {} + impl !Sync for *mut T {} + // endregion:sync + // region:unsize #[lang = "unsize"] pub trait Unsize {} @@ -91,7 +113,7 @@ pub mod default { fn default() -> Self; } // region:derive - #[rustc_builtin_macro] + #[rustc_builtin_macro(Default, attributes(default))] pub macro Default($item:item) {} // endregion:derive } @@ -360,6 +382,12 @@ pub mod ops { type Output; fn add(self, rhs: Rhs) -> Self::Output; } + + #[lang = "add_assign"] + #[const_trait] + pub trait AddAssign { + fn add_assign(&mut self, rhs: Rhs); + } // endregion:add // region:generator @@ -438,6 +466,9 @@ pub mod fmt { pub trait Debug { fn fmt(&self, f: &mut Formatter<'_>) -> Result; } + pub trait Display { + fn fmt(&self, f: &mut Formatter<'_>) -> Result; + } } // endregion:fmt @@ -680,6 +711,15 @@ mod macros { } // endregion:derive +// region:non_zero +pub mod num { + #[repr(transparent)] + #[rustc_layout_scalar_valid_range_start(1)] + #[rustc_nonnull_optimization_guaranteed] + pub struct NonZeroU8(u8); +} +// endregion:non_zero + // region:bool_impl #[lang = "bool"] impl bool { @@ -693,6 +733,17 @@ impl bool { } // endregion:bool_impl +// region:error +pub mod error { + #[rustc_has_incoherent_inherent_impls] + pub trait Error: crate::fmt::Debug + crate::fmt::Display { + fn source(&self) -> Option<&(dyn Error + 'static)> { + None + } + } +} +// endregion:error + pub mod prelude { pub mod v1 { pub use crate::{ @@ -705,7 +756,9 @@ pub mod prelude { iter::{IntoIterator, Iterator}, // :iterator macros::builtin::derive, // :derive marker::Copy, // :copy + marker::Send, // :send marker::Sized, // :sized + marker::Sync, // :sync mem::drop, // :drop ops::Drop, // :drop ops::{Fn, FnMut, FnOnce}, // :fn diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index b05da769161e6..67bdad2aadd83 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -35,7 +35,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { // example: for cargo, this tries ~/.cargo/bin/cargo // It seems that this is a reasonable place to try for cargo, rustc, and rustup let env_var = executable_name.to_ascii_uppercase(); - if let Some(path) = env::var_os(&env_var) { + if let Some(path) = env::var_os(env_var) { return path.into(); } diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index 69226bd4c4805..d27a7aa0d4d38 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs @@ -190,7 +190,7 @@ impl<'a> Cursor<'a> { pub fn token_tree(self) -> Option> { match self.entry() { Some(Entry::Leaf(tt)) => match tt { - TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)), + TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, tt)), TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))), }, Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index a54861de9587b..353b09fd8c1ed 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -86,10 +86,20 @@ pub enum Spacing { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Ident { + /// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. pub text: SmolStr, pub id: TokenId, } +impl Ident { + /// Constructor intended to be used only by proc macro server. `text` should not contain raw + /// identifier prefix. + pub fn new_with_is_raw(text: SmolStr, id: TokenId, is_raw: bool) -> Self { + let text = if is_raw { SmolStr::from_iter(["r#", &text]) } else { text }; + Ident { text, id } + } +} + impl Leaf { pub fn id(&self) -> TokenId { match self { @@ -105,15 +115,15 @@ fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usi let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) { None => "$".to_string(), - Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id), - Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id), - Some((DelimiterKind::Bracket, id)) => format!("[] {}", id), + Some((DelimiterKind::Parenthesis, id)) => format!("() {id}"), + Some((DelimiterKind::Brace, id)) => format!("{{}} {id}"), + Some((DelimiterKind::Bracket, id)) => format!("[] {id}"), }; if subtree.token_trees.is_empty() { - write!(f, "{}SUBTREE {}", align, aux)?; + write!(f, "{align}SUBTREE {aux}")?; } else { - writeln!(f, "{}SUBTREE {}", align, aux)?; + writeln!(f, "{align}SUBTREE {aux}")?; for (idx, child) in subtree.token_trees.iter().enumerate() { print_debug_token(f, child, level + 1)?; if idx != subtree.token_trees.len() - 1 { @@ -130,7 +140,7 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) match tkn { TokenTree::Leaf(leaf) => match leaf { - Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?, + Leaf::Literal(lit) => write!(f, "{align}LITERAL {} {}", lit.text, lit.id.0)?, Leaf::Punct(punct) => write!( f, "{}PUNCH {} [{}] {}", @@ -139,7 +149,7 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) if punct.spacing == Spacing::Alone { "alone" } else { "joint" }, punct.id.0 )?, - Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?, + Leaf::Ident(ident) => write!(f, "{align}IDENT {} {}", ident.text, ident.id.0)?, }, TokenTree::Subtree(subtree) => { print_debug_subtree(f, subtree, level)?; @@ -302,7 +312,7 @@ pub fn pretty(tkns: &[TokenTree]) -> String { Some(DelimiterKind::Parenthesis) => ("(", ")"), Some(DelimiterKind::Bracket) => ("[", "]"), }; - format!("{}{}{}", open, content, close) + format!("{open}{content}{close}") } } } diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs index e0ef737b3fc09..700aebe0b34f1 100644 --- a/crates/vfs/src/file_set.rs +++ b/crates/vfs/src/file_set.rs @@ -140,16 +140,11 @@ impl FileSetConfig { } /// Builder for [`FileSetConfig`]. +#[derive(Default)] pub struct FileSetConfigBuilder { roots: Vec>, } -impl Default for FileSetConfigBuilder { - fn default() -> Self { - FileSetConfigBuilder { roots: Vec::new() } - } -} - impl FileSetConfigBuilder { /// Returns the number of sets currently held. pub fn len(&self) -> usize { diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index afc9a0fa6fb20..c61f30387b70c 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -59,15 +59,10 @@ pub use paths::{AbsPath, AbsPathBuf}; /// Handle to a file in [`Vfs`] /// /// Most functions in rust-analyzer use this when they need to refer to a file. -#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] +#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct FileId(pub u32); impl stdx::hash::NoHashHashable for FileId {} -impl std::hash::Hash for FileId { - fn hash(&self, state: &mut H) { - self.0.hash(state); - } -} /// Storage for all files read by rust-analyzer. /// diff --git a/crates/vfs/src/path_interner.rs b/crates/vfs/src/path_interner.rs index 6e049f0d40f73..64f51976053d4 100644 --- a/crates/vfs/src/path_interner.rs +++ b/crates/vfs/src/path_interner.rs @@ -9,16 +9,11 @@ use rustc_hash::FxHasher; use crate::{FileId, VfsPath}; /// Structure to map between [`VfsPath`] and [`FileId`]. +#[derive(Default)] pub(crate) struct PathInterner { map: IndexSet>, } -impl Default for PathInterner { - fn default() -> Self { - Self { map: IndexSet::default() } - } -} - impl PathInterner { /// Get the id corresponding to `path`. /// diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index 668c7320d4ec2..b23c9f1966d5c 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -364,7 +364,7 @@ impl VirtualPath { path = &path["../".len()..]; } path = path.trim_start_matches("./"); - res.0 = format!("{}/{}", res.0, path); + res.0 = format!("{}/{path}", res.0); Some(res) } diff --git a/docs/dev/README.md b/docs/dev/README.md index 4ac75b4bbfd96..cdab6b09928cd 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md @@ -200,7 +200,7 @@ Look for `fn benchmark_xxx` tests for a quick way to reproduce performance probl ## Release Process -Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one. +Release process is handled by `release`, `dist`, `publish-release-notes` and `promote` xtasks, `release` being the main one. `release` assumes that you have checkouts of `rust-analyzer`, `rust-analyzer.github.io`, and `rust-lang/rust` in the same directory: @@ -231,8 +231,9 @@ Release steps: * create a new changelog in `rust-analyzer.github.io` 3. While the release is in progress, fill in the changelog 4. Commit & push the changelog -5. Tweet -6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree. +5. Run `cargo xtask publish-release-notes ` -- this will convert the changelog entry in AsciiDoc to Markdown and update the body of GitHub Releases entry. +6. Tweet +7. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree. Self-approve the PR. If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index fe316fcae9b88..a4780af1a2615 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ [^\n]+\n/m, ""); } + let value; + if (errorCode) { + if (typeof diag.code === "string" || typeof diag.code === "number") { + value = diag.code; + } else { + value = diag.code?.value; + } + } diag.code = { target: vscode.Uri.from({ - scheme: "rust-analyzer-diagnostics-view", - path: "/diagnostic message", + scheme: diagnostics.URI_SCHEME, + path: `/diagnostic message [${idx.toString()}]`, fragment: uri.toString(), query: idx.toString(), }), - value: "Click for full compiler diagnostic", + value: value ?? "Click for full compiler diagnostic", }; } }); - return next(uri, diagnostics); + return next(uri, diagnosticList); }, async provideHover( document: vscode.TextDocument, @@ -302,6 +333,7 @@ class ExperimentalFeatures implements lc.StaticFeature { caps.codeActionGroup = true; caps.hoverActions = true; caps.serverStatusNotification = true; + caps.colorDiagnosticOutput = true; caps.commands = { commands: [ "rust-analyzer.runSingle", diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 312087e4cffef..cb4e13e2c6043 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -788,8 +788,23 @@ export function openDocs(ctx: CtxInit): Cmd { export function cancelFlycheck(ctx: CtxInit): Cmd { return async () => { + await ctx.client.sendNotification(ra.cancelFlycheck); + }; +} + +export function clearFlycheck(ctx: CtxInit): Cmd { + return async () => { + await ctx.client.sendNotification(ra.clearFlycheck); + }; +} + +export function runFlycheck(ctx: CtxInit): Cmd { + return async () => { + const editor = ctx.activeRustEditor; const client = ctx.client; - await client.sendRequest(ra.cancelFlycheck); + const params = editor ? { uri: editor.document.uri.toString() } : null; + + await client.sendNotification(ra.runFlycheck, { textDocument: params }); }; } @@ -797,12 +812,12 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { return async (params: lc.CodeAction) => { const client = ctx.client; params.command = undefined; - const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params); + const item = await client.sendRequest(lc.CodeActionResolveRequest.type, params); if (!item?.edit) { return; } const itemEdit = item.edit; - const edit = await client?.protocol2CodeConverter.asWorkspaceEdit(itemEdit); + const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit); // filter out all text edits and recreate the WorkspaceEdit without them so we can apply // snippet edits on our own const lcFileSystemEdit = { diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index d8dbd1df16dfb..eb4f965291fe5 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -241,6 +241,10 @@ export class Config { get previewRustcOutput() { return this.get("diagnostics.previewRustcOutput"); } + + get useRustcErrorCode() { + return this.get("diagnostics.useRustcErrorCode"); + } } const VarRegex = new RegExp(/\$\{(.+?)\}/g); diff --git a/editors/code/src/diagnostics.ts b/editors/code/src/diagnostics.ts new file mode 100644 index 0000000000000..9695d8bf26d5f --- /dev/null +++ b/editors/code/src/diagnostics.ts @@ -0,0 +1,212 @@ +import * as anser from "anser"; +import * as vscode from "vscode"; +import { ProviderResult, Range, TextEditorDecorationType, ThemeColor, window } from "vscode"; +import { Ctx } from "./ctx"; + +export const URI_SCHEME = "rust-analyzer-diagnostics-view"; + +export class TextDocumentProvider implements vscode.TextDocumentContentProvider { + private _onDidChange = new vscode.EventEmitter(); + + public constructor(private readonly ctx: Ctx) {} + + get onDidChange(): vscode.Event { + return this._onDidChange.event; + } + + triggerUpdate(uri: vscode.Uri) { + if (uri.scheme === URI_SCHEME) { + this._onDidChange.fire(uri); + } + } + + dispose() { + this._onDidChange.dispose(); + } + + async provideTextDocumentContent(uri: vscode.Uri): Promise { + const contents = getRenderedDiagnostic(this.ctx, uri); + return anser.ansiToText(contents); + } +} + +function getRenderedDiagnostic(ctx: Ctx, uri: vscode.Uri): string { + const diags = ctx.client?.diagnostics?.get(vscode.Uri.parse(uri.fragment, true)); + if (!diags) { + return "Unable to find original rustc diagnostic"; + } + + const diag = diags[parseInt(uri.query)]; + if (!diag) { + return "Unable to find original rustc diagnostic"; + } + const rendered = (diag as unknown as { data?: { rendered?: string } }).data?.rendered; + + if (!rendered) { + return "Unable to find original rustc diagnostic"; + } + + return rendered; +} + +interface AnserStyle { + fg: string; + bg: string; + fg_truecolor: string; + bg_truecolor: string; + decorations: Array; +} + +export class AnsiDecorationProvider implements vscode.Disposable { + private _decorationTypes = new Map(); + + public constructor(private readonly ctx: Ctx) {} + + dispose(): void { + for (const decorationType of this._decorationTypes.values()) { + decorationType.dispose(); + } + + this._decorationTypes.clear(); + } + + async provideDecorations(editor: vscode.TextEditor) { + if (editor.document.uri.scheme !== URI_SCHEME) { + return; + } + + const decorations = (await this._getDecorations(editor.document.uri)) || []; + for (const [decorationType, ranges] of decorations) { + editor.setDecorations(decorationType, ranges); + } + } + + private _getDecorations( + uri: vscode.Uri + ): ProviderResult<[TextEditorDecorationType, Range[]][]> { + const stringContents = getRenderedDiagnostic(this.ctx, uri); + const lines = stringContents.split("\n"); + + const result = new Map(); + // Populate all known decoration types in the result. This forces any + // lingering decorations to be cleared if the text content changes to + // something without ANSI codes for a given decoration type. + for (const decorationType of this._decorationTypes.values()) { + result.set(decorationType, []); + } + + for (const [lineNumber, line] of lines.entries()) { + const totalEscapeLength = 0; + + // eslint-disable-next-line camelcase + const parsed = anser.ansiToJson(line, { use_classes: true }); + + let offset = 0; + + for (const span of parsed) { + const { content, ...style } = span; + + const range = new Range( + lineNumber, + offset - totalEscapeLength, + lineNumber, + offset + content.length - totalEscapeLength + ); + + offset += content.length; + + const decorationType = this._getDecorationType(style); + + if (!result.has(decorationType)) { + result.set(decorationType, []); + } + + result.get(decorationType)!.push(range); + } + } + + return [...result]; + } + + private _getDecorationType(style: AnserStyle): TextEditorDecorationType { + let decorationType = this._decorationTypes.get(style); + + if (decorationType) { + return decorationType; + } + + const fontWeight = style.decorations.find((s) => s === "bold"); + const fontStyle = style.decorations.find((s) => s === "italic"); + const textDecoration = style.decorations.find((s) => s === "underline"); + + decorationType = window.createTextEditorDecorationType({ + backgroundColor: AnsiDecorationProvider._convertColor(style.bg, style.bg_truecolor), + color: AnsiDecorationProvider._convertColor(style.fg, style.fg_truecolor), + fontWeight, + fontStyle, + textDecoration, + }); + + this._decorationTypes.set(style, decorationType); + + return decorationType; + } + + // NOTE: This could just be a kebab-case to camelCase conversion, but I think it's + // a short enough list to just write these by hand + static readonly _anserToThemeColor: Record = { + "ansi-black": "ansiBlack", + "ansi-white": "ansiWhite", + "ansi-red": "ansiRed", + "ansi-green": "ansiGreen", + "ansi-yellow": "ansiYellow", + "ansi-blue": "ansiBlue", + "ansi-magenta": "ansiMagenta", + "ansi-cyan": "ansiCyan", + + "ansi-bright-black": "ansiBrightBlack", + "ansi-bright-white": "ansiBrightWhite", + "ansi-bright-red": "ansiBrightRed", + "ansi-bright-green": "ansiBrightGreen", + "ansi-bright-yellow": "ansiBrightYellow", + "ansi-bright-blue": "ansiBrightBlue", + "ansi-bright-magenta": "ansiBrightMagenta", + "ansi-bright-cyan": "ansiBrightCyan", + }; + + private static _convertColor( + color?: string, + truecolor?: string + ): ThemeColor | string | undefined { + if (!color) { + return undefined; + } + + if (color === "ansi-truecolor") { + if (!truecolor) { + return undefined; + } + return `rgb(${truecolor})`; + } + + const paletteMatch = color.match(/ansi-palette-(.+)/); + if (paletteMatch) { + const paletteColor = paletteMatch[1]; + // anser won't return both the RGB and the color name at the same time, + // so just fake a single foreground control char with the palette number: + const spans = anser.ansiToJson(`\x1b[38;5;${paletteColor}m`); + const rgb = spans[1].fg; + + if (rgb) { + return `rgb(${rgb})`; + } + } + + const themeColor = AnsiDecorationProvider._anserToThemeColor[color]; + if (themeColor) { + return new ThemeColor("terminal." + themeColor); + } + + return undefined; + } +} diff --git a/editors/code/src/lsp_ext.ts b/editors/code/src/lsp_ext.ts index 875261c48a60e..29349cc20f5b0 100644 --- a/editors/code/src/lsp_ext.ts +++ b/editors/code/src/lsp_ext.ts @@ -79,7 +79,11 @@ export const relatedTests = new lc.RequestType("rust-analyzer/cancelFlycheck"); +export const cancelFlycheck = new lc.NotificationType0("rust-analyzer/cancelFlycheck"); +export const clearFlycheck = new lc.NotificationType0("rust-analyzer/clearFlycheck"); +export const runFlycheck = new lc.NotificationType<{ + textDocument: lc.TextDocumentIdentifier | null; +}>("rust-analyzer/runFlycheck"); // Experimental extensions diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 25f1e83d109cb..dd439317c7092 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -3,6 +3,7 @@ import * as lc from "vscode-languageclient/node"; import * as commands from "./commands"; import { CommandFactory, Ctx, fetchWorkspace } from "./ctx"; +import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; @@ -48,30 +49,52 @@ async function activateServer(ctx: Ctx): Promise { ctx.pushExtCleanup(activateTaskProvider(ctx.config)); } + const diagnosticProvider = new diagnostics.TextDocumentProvider(ctx); ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider( - "rust-analyzer-diagnostics-view", - new (class implements vscode.TextDocumentContentProvider { - async provideTextDocumentContent(uri: vscode.Uri): Promise { - const diags = ctx.client?.diagnostics?.get( - vscode.Uri.parse(uri.fragment, true) - ); - if (!diags) { - return "Unable to find original rustc diagnostic"; - } - - const diag = diags[parseInt(uri.query)]; - if (!diag) { - return "Unable to find original rustc diagnostic"; - } - const rendered = (diag as unknown as { data?: { rendered?: string } }).data - ?.rendered; - return rendered ?? "Unable to find original rustc diagnostic"; - } - })() + diagnostics.URI_SCHEME, + diagnosticProvider ) ); + const decorationProvider = new diagnostics.AnsiDecorationProvider(ctx); + ctx.pushExtCleanup(decorationProvider); + + async function decorateVisibleEditors(document: vscode.TextDocument) { + for (const editor of vscode.window.visibleTextEditors) { + if (document === editor.document) { + await decorationProvider.provideDecorations(editor); + } + } + } + + vscode.workspace.onDidChangeTextDocument( + async (event) => await decorateVisibleEditors(event.document), + null, + ctx.subscriptions + ); + vscode.workspace.onDidOpenTextDocument(decorateVisibleEditors, null, ctx.subscriptions); + vscode.window.onDidChangeActiveTextEditor( + async (editor) => { + if (editor) { + diagnosticProvider.triggerUpdate(editor.document.uri); + await decorateVisibleEditors(editor.document); + } + }, + null, + ctx.subscriptions + ); + vscode.window.onDidChangeVisibleTextEditors( + async (visibleEditors) => { + for (const editor of visibleEditors) { + diagnosticProvider.triggerUpdate(editor.document.uri); + await decorationProvider.provideDecorations(editor); + } + }, + null, + ctx.subscriptions + ); + vscode.workspace.onDidChangeWorkspaceFolders( async (_) => ctx.onWorkspaceFolderChanges(), null, @@ -79,7 +102,7 @@ async function activateServer(ctx: Ctx): Promise { ); vscode.workspace.onDidChangeConfiguration( async (_) => { - await ctx.client?.sendNotification("workspace/didChangeConfiguration", { + await ctx.client?.sendNotification(lc.DidChangeConfigurationNotification.type, { settings: "", }); }, @@ -150,6 +173,8 @@ function createCommands(): Record { moveItemUp: { enabled: commands.moveItemUp }, moveItemDown: { enabled: commands.moveItemDown }, cancelFlycheck: { enabled: commands.cancelFlycheck }, + clearFlycheck: { enabled: commands.clearFlycheck }, + runFlycheck: { enabled: commands.runFlycheck }, ssr: { enabled: commands.ssr }, serverVersion: { enabled: commands.serverVersion }, // Internal commands which are invoked by the server. diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs index 5f347e274500e..b9d491da3c0cf 100644 --- a/lib/la-arena/src/map.rs +++ b/lib/la-arena/src/map.rs @@ -86,6 +86,14 @@ impl ArenaMap, V> { self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?))) } + /// Returns an iterator over the arena indexes and values in the map. + pub fn iter_mut(&mut self) -> impl Iterator, &mut V)> { + self.v + .iter_mut() + .enumerate() + .filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_mut()?))) + } + /// Gets the given key's corresponding entry in the map for in-place manipulation. pub fn entry(&mut self, idx: Idx) -> Entry<'_, Idx, V> { let idx = Self::to_idx(idx); diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs index ca7ad0b536739..2f270afbbf199 100644 --- a/lib/lsp-server/examples/goto_def.rs +++ b/lib/lsp-server/examples/goto_def.rs @@ -80,32 +80,32 @@ fn main_loop( let _params: InitializeParams = serde_json::from_value(params).unwrap(); eprintln!("starting example main loop"); for msg in &connection.receiver { - eprintln!("got msg: {:?}", msg); + eprintln!("got msg: {msg:?}"); match msg { Message::Request(req) => { if connection.handle_shutdown(&req)? { return Ok(()); } - eprintln!("got request: {:?}", req); + eprintln!("got request: {req:?}"); match cast::(req) { Ok((id, params)) => { - eprintln!("got gotoDefinition request #{}: {:?}", id, params); + eprintln!("got gotoDefinition request #{id}: {params:?}"); let result = Some(GotoDefinitionResponse::Array(Vec::new())); let result = serde_json::to_value(&result).unwrap(); let resp = Response { id, result: Some(result), error: None }; connection.sender.send(Message::Response(resp))?; continue; } - Err(err @ ExtractError::JsonError { .. }) => panic!("{:?}", err), + Err(err @ ExtractError::JsonError { .. }) => panic!("{err:?}"), Err(ExtractError::MethodMismatch(req)) => req, }; // ... } Message::Response(resp) => { - eprintln!("got response: {:?}", resp); + eprintln!("got response: {resp:?}"); } Message::Notification(not) => { - eprintln!("got notification: {:?}", not); + eprintln!("got notification: {not:?}"); } } } diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs index d567077d4a4ba..beccde40a8978 100644 --- a/lib/lsp-server/src/lib.rs +++ b/lib/lsp-server/src/lib.rs @@ -114,30 +114,21 @@ impl Connection { /// ``` pub fn initialize_start(&self) -> Result<(RequestId, serde_json::Value), ProtocolError> { loop { - match self.receiver.recv() { - Ok(Message::Request(req)) if req.is_initialize() => { - return Ok((req.id, req.params)) - } + break match self.receiver.recv() { + Ok(Message::Request(req)) if req.is_initialize() => Ok((req.id, req.params)), // Respond to non-initialize requests with ServerNotInitialized Ok(Message::Request(req)) => { let resp = Response::new_err( req.id.clone(), ErrorCode::ServerNotInitialized as i32, - format!("expected initialize request, got {:?}", req), + format!("expected initialize request, got {req:?}"), ); self.sender.send(resp.into()).unwrap(); + continue; } - Ok(msg) => { - return Err(ProtocolError(format!( - "expected initialize request, got {:?}", - msg - ))) - } + Ok(msg) => Err(ProtocolError(format!("expected initialize request, got {msg:?}"))), Err(e) => { - return Err(ProtocolError(format!( - "expected initialize request, got error: {}", - e - ))) + Err(ProtocolError(format!("expected initialize request, got error: {e}"))) } }; } @@ -152,21 +143,14 @@ impl Connection { let resp = Response::new_ok(initialize_id, initialize_result); self.sender.send(resp.into()).unwrap(); match &self.receiver.recv() { - Ok(Message::Notification(n)) if n.is_initialized() => (), + Ok(Message::Notification(n)) if n.is_initialized() => Ok(()), Ok(msg) => { - return Err(ProtocolError(format!( - "expected Message::Notification, got: {:?}", - msg, - ))) + Err(ProtocolError(format!(r#"expected initialized notification, got: {msg:?}"#))) } Err(e) => { - return Err(ProtocolError(format!( - "expected initialized notification, got error: {}", - e, - ))) + Err(ProtocolError(format!("expected initialized notification, got error: {e}",))) } } - Ok(()) } /// Initialize the connection. Sends the server capabilities @@ -221,11 +205,9 @@ impl Connection { match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) { Ok(Message::Notification(n)) if n.is_exit() => (), Ok(msg) => { - return Err(ProtocolError(format!("unexpected message during shutdown: {:?}", msg))) - } - Err(e) => { - return Err(ProtocolError(format!("unexpected error during shutdown: {}", e))) + return Err(ProtocolError(format!("unexpected message during shutdown: {msg:?}"))) } + Err(e) => return Err(ProtocolError(format!("unexpected error during shutdown: {e}"))), } Ok(true) } diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 95e27beab5dc1..2dd01796c6e41 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -12,4 +12,5 @@ flate2 = "1.0.24" write-json = "0.1.2" xshell = "0.2.2" xflags = "0.3.0" +zip = { version = "0.6", default-features = false, features = ["deflate", "time"] } # Avoid adding more dependencies to this crate diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 686aec4ae5074..74715c53eaac5 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs @@ -1,12 +1,13 @@ use std::{ env, fs::File, - io, + io::{self, BufWriter}, path::{Path, PathBuf}, }; use flate2::{write::GzEncoder, Compression}; use xshell::{cmd, Shell}; +use zip::{write::FileOptions, DateTime, ZipWriter}; use crate::{date_iso, flags, project_root}; @@ -26,10 +27,10 @@ impl flags::Dist { if let Some(patch_version) = self.client_patch_version { let version = if stable { - format!("{}.{}", VERSION_STABLE, patch_version) + format!("{VERSION_STABLE}.{patch_version}") } else { // A hack to make VS Code prefer nightly over stable. - format!("{}.{}", VERSION_NIGHTLY, patch_version) + format!("{VERSION_NIGHTLY}.{patch_version}") }; dist_server(sh, &format!("{version}-standalone"), &target)?; let release_tag = if stable { date_iso(sh)? } else { "nightly".to_string() }; @@ -59,10 +60,10 @@ fn dist_client( let mut patch = Patch::new(sh, "./package.json")?; patch .replace( - &format!(r#""version": "{}.0-dev""#, VERSION_DEV), - &format!(r#""version": "{}""#, version), + &format!(r#""version": "{VERSION_DEV}.0-dev""#), + &format!(r#""version": "{version}""#), ) - .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag)) + .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{release_tag}""#)) .replace(r#""$generated-start": {},"#, "") .replace(",\n \"$generated-end\": {}", "") .replace(r#""enabledApiProposals": [],"#, r#""#); @@ -89,6 +90,9 @@ fn dist_server(sh: &Shell, release: &str, target: &Target) -> anyhow::Result<()> let dst = Path::new("dist").join(&target.artifact_name); gzip(&target.server_path, &dst.with_extension("gz"))?; + if target_name.contains("-windows-") { + zip(&target.server_path, target.symbols_path.as_ref(), &dst.with_extension("zip"))?; + } Ok(()) } @@ -101,6 +105,38 @@ fn gzip(src_path: &Path, dest_path: &Path) -> anyhow::Result<()> { Ok(()) } +fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> anyhow::Result<()> { + let file = File::create(dest_path)?; + let mut writer = ZipWriter::new(BufWriter::new(file)); + writer.start_file( + src_path.file_name().unwrap().to_str().unwrap(), + FileOptions::default() + .last_modified_time( + DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(), + ) + .unix_permissions(0o755) + .compression_method(zip::CompressionMethod::Deflated) + .compression_level(Some(9)), + )?; + let mut input = io::BufReader::new(File::open(src_path)?); + io::copy(&mut input, &mut writer)?; + if let Some(symbols_path) = symbols_path { + writer.start_file( + symbols_path.file_name().unwrap().to_str().unwrap(), + FileOptions::default() + .last_modified_time( + DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(), + ) + .compression_method(zip::CompressionMethod::Deflated) + .compression_level(Some(9)), + )?; + let mut input = io::BufReader::new(File::open(symbols_path)?); + io::copy(&mut input, &mut writer)?; + } + writer.finish()?; + Ok(()) +} + struct Target { name: String, server_path: PathBuf, @@ -130,8 +166,8 @@ impl Target { } else { (String::new(), None) }; - let server_path = out_path.join(format!("rust-analyzer{}", exe_suffix)); - let artifact_name = format!("rust-analyzer-{}{}", name, exe_suffix); + let server_path = out_path.join(format!("rust-analyzer{exe_suffix}")); + let artifact_name = format!("rust-analyzer-{name}{exe_suffix}"); Self { name, server_path, symbols_path, artifact_name } } } diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs index 0fce48898349a..21004797014e8 100644 --- a/xtask/src/flags.rs +++ b/xtask/src/flags.rs @@ -34,6 +34,13 @@ xflags::xflags! { cmd dist { optional --client-patch-version version: String } + /// Read a changelog AsciiDoc file and update the GitHub Releases entry in Markdown. + cmd publish-release-notes { + /// Only run conversion and show the result. + optional --dry-run + /// Target changelog file. + required changelog: String + } cmd metrics { optional --dry-run } @@ -59,6 +66,7 @@ pub enum XtaskCmd { Release(Release), Promote(Promote), Dist(Dist), + PublishReleaseNotes(PublishReleaseNotes), Metrics(Metrics), Bb(Bb), } @@ -90,6 +98,13 @@ pub struct Dist { pub client_patch_version: Option, } +#[derive(Debug)] +pub struct PublishReleaseNotes { + pub changelog: String, + + pub dry_run: bool, +} + #[derive(Debug)] pub struct Metrics { pub dry_run: bool, diff --git a/xtask/src/install.rs b/xtask/src/install.rs index ae978d5512e7b..83223a551d130 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs @@ -62,7 +62,7 @@ fn fix_path_for_mac(sh: &Shell) -> Result<()> { let mut paths = env::split_paths(&vars).collect::>(); paths.append(&mut vscode_path); let new_paths = env::join_paths(paths).context("build env PATH")?; - sh.set_var("PATH", &new_paths); + sh.set_var("PATH", new_paths); } Ok(()) diff --git a/xtask/src/main.rs b/xtask/src/main.rs index a37f469adcb60..6a45033ada3ba 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -15,6 +15,7 @@ mod flags; mod install; mod release; mod dist; +mod publish; mod metrics; use anyhow::bail; @@ -36,6 +37,7 @@ fn main() -> anyhow::Result<()> { flags::XtaskCmd::Release(cmd) => cmd.run(sh), flags::XtaskCmd::Promote(cmd) => cmd.run(sh), flags::XtaskCmd::Dist(cmd) => cmd.run(sh), + flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh), flags::XtaskCmd::Metrics(cmd) => cmd.run(sh), flags::XtaskCmd::Bb(cmd) => { { diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index ebeb873463ea1..b6f730dbf1262 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -87,7 +87,7 @@ impl Metrics { self.measure_analysis_stats_path( sh, bench, - &format!("./target/rustc-perf/collector/benchmarks/{}", bench), + &format!("./target/rustc-perf/collector/benchmarks/{bench}"), ) } fn measure_analysis_stats_path( diff --git a/xtask/src/publish.rs b/xtask/src/publish.rs new file mode 100644 index 0000000000000..79b5f3d2f61b7 --- /dev/null +++ b/xtask/src/publish.rs @@ -0,0 +1,109 @@ +mod notes; + +use crate::flags; +use anyhow::{anyhow, bail, Result}; +use std::env; +use xshell::{cmd, Shell}; + +impl flags::PublishReleaseNotes { + pub(crate) fn run(self, sh: &Shell) -> Result<()> { + let asciidoc = sh.read_file(&self.changelog)?; + let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?; + let file_name = check_file_name(self.changelog)?; + let tag_name = &file_name[0..10]; + let original_changelog_url = create_original_changelog_url(&file_name); + let additional_paragraph = + format!("\nSee also [original changelog]({original_changelog_url})."); + markdown.push_str(&additional_paragraph); + if self.dry_run { + println!("{markdown}"); + } else { + update_release(sh, tag_name, &markdown)?; + } + Ok(()) + } +} + +fn check_file_name>(path: P) -> Result { + let file_name = path + .as_ref() + .file_name() + .ok_or_else(|| anyhow!("file name is not specified as `changelog`"))? + .to_string_lossy(); + + let mut chars = file_name.chars(); + if file_name.len() >= 10 + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap() == '-' + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap() == '-' + && chars.next().unwrap().is_ascii_digit() + && chars.next().unwrap().is_ascii_digit() + { + Ok(file_name.to_string()) + } else { + bail!("unexpected file name format; no date information prefixed") + } +} + +fn create_original_changelog_url(file_name: &str) -> String { + let year = &file_name[0..4]; + let month = &file_name[5..7]; + let day = &file_name[8..10]; + let mut stem = &file_name[11..]; + if let Some(stripped) = stem.strip_suffix(".adoc") { + stem = stripped; + } + format!("https://rust-analyzer.github.io/thisweek/{year}/{month}/{day}/{stem}.html") +} + +fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> { + let token = match env::var("GITHUB_TOKEN") { + Ok(token) => token, + Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."), + }; + let accept = "Accept: application/vnd.github+json"; + let authorization = format!("Authorization: Bearer {token}"); + let api_version = "X-GitHub-Api-Version: 2022-11-28"; + let release_url = "https://api.github.com/repos/rust-lang/rust-analyzer/releases"; + + let release_json = cmd!( + sh, + "curl -sf -H {accept} -H {authorization} -H {api_version} {release_url}/tags/{tag_name}" + ) + .read()?; + let release_id = cmd!(sh, "jq .id").stdin(release_json).read()?; + + let mut patch = String::new(); + write_json::object(&mut patch) + .string("tag_name", tag_name) + .string("target_commitish", "master") + .string("name", tag_name) + .string("body", release_notes) + .bool("draft", false) + .bool("prerelease", false); + let _ = cmd!( + sh, + "curl -sf -X PATCH -H {accept} -H {authorization} -H {api_version} {release_url}/{release_id} -d {patch}" + ) + .read()?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn original_changelog_url_creation() { + let input = "2019-07-24-changelog-0.adoc"; + let actual = create_original_changelog_url(input); + let expected = "https://rust-analyzer.github.io/thisweek/2019/07/24/changelog-0.html"; + assert_eq!(actual, expected); + } +} diff --git a/xtask/src/publish/notes.rs b/xtask/src/publish/notes.rs new file mode 100644 index 0000000000000..c30267295bf41 --- /dev/null +++ b/xtask/src/publish/notes.rs @@ -0,0 +1,631 @@ +use anyhow::{anyhow, bail}; +use std::{ + borrow::Cow, + io::{BufRead, Lines}, + iter::Peekable, +}; + +const LISTING_DELIMITER: &str = "----"; +const IMAGE_BLOCK_PREFIX: &str = "image::"; +const VIDEO_BLOCK_PREFIX: &str = "video::"; + +struct Converter<'a, 'b, R: BufRead> { + iter: &'a mut Peekable>, + output: &'b mut String, +} + +impl<'a, 'b, R: BufRead> Converter<'a, 'b, R> { + fn new(iter: &'a mut Peekable>, output: &'b mut String) -> Self { + Self { iter, output } + } + + fn process(&mut self) -> anyhow::Result<()> { + self.process_document_header()?; + self.skip_blank_lines()?; + self.output.push('\n'); + + loop { + let line = self.iter.peek().unwrap().as_deref().map_err(|e| anyhow!("{e}"))?; + if get_title(line).is_some() { + let line = self.iter.next().unwrap().unwrap(); + let (level, title) = get_title(&line).unwrap(); + self.write_title(level, title); + } else if get_list_item(line).is_some() { + self.process_list()?; + } else if line.starts_with('[') { + self.process_source_code_block(0)?; + } else if line.starts_with(LISTING_DELIMITER) { + self.process_listing_block(None, 0)?; + } else if line.starts_with('.') { + self.process_block_with_title(0)?; + } else if line.starts_with(IMAGE_BLOCK_PREFIX) { + self.process_image_block(None, 0)?; + } else if line.starts_with(VIDEO_BLOCK_PREFIX) { + self.process_video_block(None, 0)?; + } else { + self.process_paragraph(0, |line| line.is_empty())?; + } + + self.skip_blank_lines()?; + if self.iter.peek().is_none() { + break; + } + self.output.push('\n'); + } + Ok(()) + } + + fn process_document_header(&mut self) -> anyhow::Result<()> { + self.process_document_title()?; + + while let Some(line) = self.iter.next() { + let line = line?; + if line.is_empty() { + break; + } + if !line.starts_with(':') { + self.write_line(&line, 0) + } + } + + Ok(()) + } + + fn process_document_title(&mut self) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some((level, title)) = get_title(&line) { + let title = process_inline_macros(title)?; + if level == 1 { + self.write_title(level, &title); + return Ok(()); + } + } + } + bail!("document title not found") + } + + fn process_list(&mut self) -> anyhow::Result<()> { + let mut nesting = ListNesting::new(); + while let Some(line) = self.iter.peek() { + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + + if get_list_item(line).is_some() { + let line = self.iter.next().unwrap()?; + let line = process_inline_macros(&line)?; + let (marker, item) = get_list_item(&line).unwrap(); + nesting.set_current(marker); + self.write_list_item(item, &nesting); + self.process_paragraph(nesting.indent(), |line| { + line.is_empty() || get_list_item(line).is_some() || line == "+" + })?; + } else if line == "+" { + let _ = self.iter.next().unwrap()?; + let line = self + .iter + .peek() + .ok_or_else(|| anyhow!("list continuation unexpectedly terminated"))?; + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + + let indent = nesting.indent(); + if line.starts_with('[') { + self.write_line("", 0); + self.process_source_code_block(indent)?; + } else if line.starts_with(LISTING_DELIMITER) { + self.write_line("", 0); + self.process_listing_block(None, indent)?; + } else if line.starts_with('.') { + self.write_line("", 0); + self.process_block_with_title(indent)?; + } else if line.starts_with(IMAGE_BLOCK_PREFIX) { + self.write_line("", 0); + self.process_image_block(None, indent)?; + } else if line.starts_with(VIDEO_BLOCK_PREFIX) { + self.write_line("", 0); + self.process_video_block(None, indent)?; + } else { + self.write_line("", 0); + let current = nesting.current().unwrap(); + self.process_paragraph(indent, |line| { + line.is_empty() + || get_list_item(line).filter(|(m, _)| m == current).is_some() + || line == "+" + })?; + } + } else { + break; + } + self.skip_blank_lines()?; + } + + Ok(()) + } + + fn process_source_code_block(&mut self, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some(styles) = line.strip_prefix("[source").and_then(|s| s.strip_suffix(']')) { + let mut styles = styles.split(','); + if !styles.next().unwrap().is_empty() { + bail!("not a source code block"); + } + let language = styles.next(); + return self.process_listing_block(language, level); + } + } + bail!("not a source code block") + } + + fn process_listing_block(&mut self, style: Option<&str>, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if line == LISTING_DELIMITER { + self.write_indent(level); + self.output.push_str("```"); + if let Some(style) = style { + self.output.push_str(style); + } + self.output.push('\n'); + while let Some(line) = self.iter.next() { + let line = line?; + if line == LISTING_DELIMITER { + self.write_line("```", level); + return Ok(()); + } else { + self.write_line(&line, level); + } + } + bail!("listing block is not terminated") + } + } + bail!("not a listing block") + } + + fn process_block_with_title(&mut self, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + let title = + line.strip_prefix('.').ok_or_else(|| anyhow!("extraction of the title failed"))?; + + let line = self + .iter + .peek() + .ok_or_else(|| anyhow!("target block for the title is not found"))?; + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + if line.starts_with(IMAGE_BLOCK_PREFIX) { + return self.process_image_block(Some(title), level); + } else if line.starts_with(VIDEO_BLOCK_PREFIX) { + return self.process_video_block(Some(title), level); + } else { + bail!("title for that block type is not supported"); + } + } + bail!("not a title") + } + + fn process_image_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some((url, attrs)) = parse_media_block(&line, IMAGE_BLOCK_PREFIX) { + let alt = if let Some(stripped) = + attrs.strip_prefix('"').and_then(|s| s.strip_suffix('"')) + { + stripped + } else { + attrs + }; + if let Some(caption) = caption { + self.write_caption_line(caption, level); + } + self.write_indent(level); + self.output.push_str("!["); + self.output.push_str(alt); + self.output.push_str("]("); + self.output.push_str(url); + self.output.push_str(")\n"); + return Ok(()); + } + } + bail!("not a image block") + } + + fn process_video_block(&mut self, caption: Option<&str>, level: usize) -> anyhow::Result<()> { + if let Some(Ok(line)) = self.iter.next() { + if let Some((url, attrs)) = parse_media_block(&line, VIDEO_BLOCK_PREFIX) { + let html_attrs = match attrs { + "options=loop" => "controls loop", + r#"options="autoplay,loop""# => "autoplay controls loop", + _ => bail!("unsupported video syntax"), + }; + if let Some(caption) = caption { + self.write_caption_line(caption, level); + } + self.write_indent(level); + self.output.push_str(r#"\n"); + return Ok(()); + } + } + bail!("not a video block") + } + + fn process_paragraph

(&mut self, level: usize, predicate: P) -> anyhow::Result<()> + where + P: Fn(&str) -> bool, + { + while let Some(line) = self.iter.peek() { + let line = line.as_deref().map_err(|e| anyhow!("{e}"))?; + if predicate(line) { + break; + } + + self.write_indent(level); + let line = self.iter.next().unwrap()?; + let line = line.trim_start(); + let line = process_inline_macros(line)?; + if let Some(stripped) = line.strip_suffix('+') { + self.output.push_str(stripped); + self.output.push('\\'); + } else { + self.output.push_str(&line); + } + self.output.push('\n'); + } + + Ok(()) + } + + fn skip_blank_lines(&mut self) -> anyhow::Result<()> { + while let Some(line) = self.iter.peek() { + if !line.as_deref().unwrap().is_empty() { + break; + } + self.iter.next().unwrap()?; + } + Ok(()) + } + + fn write_title(&mut self, indent: usize, title: &str) { + for _ in 0..indent { + self.output.push('#'); + } + self.output.push(' '); + self.output.push_str(title); + self.output.push('\n'); + } + + fn write_list_item(&mut self, item: &str, nesting: &ListNesting) { + let (marker, indent) = nesting.marker(); + self.write_indent(indent); + self.output.push_str(marker); + self.output.push_str(item); + self.output.push('\n'); + } + + fn write_caption_line(&mut self, caption: &str, indent: usize) { + self.write_indent(indent); + self.output.push('_'); + self.output.push_str(caption); + self.output.push_str("_\\\n"); + } + + fn write_indent(&mut self, indent: usize) { + for _ in 0..indent { + self.output.push(' '); + } + } + + fn write_line(&mut self, line: &str, indent: usize) { + self.write_indent(indent); + self.output.push_str(line); + self.output.push('\n'); + } +} + +pub(crate) fn convert_asciidoc_to_markdown(input: R) -> anyhow::Result +where + R: BufRead, +{ + let mut output = String::new(); + let mut iter = input.lines().peekable(); + + let mut converter = Converter::new(&mut iter, &mut output); + converter.process()?; + + Ok(output) +} + +fn get_title(line: &str) -> Option<(usize, &str)> { + strip_prefix_symbol(line, '=') +} + +fn get_list_item(line: &str) -> Option<(ListMarker, &str)> { + const HYPHEN_MARKER: &str = "- "; + if let Some(text) = line.strip_prefix(HYPHEN_MARKER) { + Some((ListMarker::Hyphen, text)) + } else if let Some((count, text)) = strip_prefix_symbol(line, '*') { + Some((ListMarker::Asterisk(count), text)) + } else if let Some((count, text)) = strip_prefix_symbol(line, '.') { + Some((ListMarker::Dot(count), text)) + } else { + None + } +} + +fn strip_prefix_symbol(line: &str, symbol: char) -> Option<(usize, &str)> { + let mut iter = line.chars(); + if iter.next()? != symbol { + return None; + } + let mut count = 1; + loop { + match iter.next() { + Some(ch) if ch == symbol => { + count += 1; + } + Some(' ') => { + break; + } + _ => return None, + } + } + Some((count, iter.as_str())) +} + +fn parse_media_block<'a>(line: &'a str, prefix: &str) -> Option<(&'a str, &'a str)> { + if let Some(line) = line.strip_prefix(prefix) { + if let Some((url, rest)) = line.split_once('[') { + if let Some(attrs) = rest.strip_suffix(']') { + return Some((url, attrs)); + } + } + } + None +} + +#[derive(Debug)] +struct ListNesting(Vec); + +impl ListNesting { + fn new() -> Self { + Self(Vec::::with_capacity(6)) + } + + fn current(&mut self) -> Option<&ListMarker> { + self.0.last() + } + + fn set_current(&mut self, marker: ListMarker) { + let Self(markers) = self; + if let Some(index) = markers.iter().position(|m| *m == marker) { + markers.truncate(index + 1); + } else { + markers.push(marker); + } + } + + fn indent(&self) -> usize { + self.0.iter().map(|m| m.in_markdown().len()).sum() + } + + fn marker(&self) -> (&str, usize) { + let Self(markers) = self; + let indent = markers.iter().take(markers.len() - 1).map(|m| m.in_markdown().len()).sum(); + let marker = match markers.last() { + None => "", + Some(marker) => marker.in_markdown(), + }; + (marker, indent) + } +} + +#[derive(Debug, PartialEq, Eq)] +enum ListMarker { + Asterisk(usize), + Hyphen, + Dot(usize), +} + +impl ListMarker { + fn in_markdown(&self) -> &str { + match self { + ListMarker::Asterisk(_) => "- ", + ListMarker::Hyphen => "- ", + ListMarker::Dot(_) => "1. ", + } + } +} + +fn process_inline_macros(line: &str) -> anyhow::Result> { + let mut chars = line.char_indices(); + loop { + let (start, end, a_macro) = match get_next_line_component(&mut chars) { + Component::None => break, + Component::Text => continue, + Component::Macro(s, e, m) => (s, e, m), + }; + let mut src = line.chars(); + let mut processed = String::new(); + for _ in 0..start { + processed.push(src.next().unwrap()); + } + processed.push_str(a_macro.process()?.as_str()); + for _ in start..end { + let _ = src.next().unwrap(); + } + let mut pos = end; + + loop { + let (start, end, a_macro) = match get_next_line_component(&mut chars) { + Component::None => break, + Component::Text => continue, + Component::Macro(s, e, m) => (s, e, m), + }; + for _ in pos..start { + processed.push(src.next().unwrap()); + } + processed.push_str(a_macro.process()?.as_str()); + for _ in start..end { + let _ = src.next().unwrap(); + } + pos = end; + } + for ch in src { + processed.push(ch); + } + return Ok(Cow::Owned(processed)); + } + Ok(Cow::Borrowed(line)) +} + +fn get_next_line_component(chars: &mut std::str::CharIndices<'_>) -> Component { + let (start, mut macro_name) = match chars.next() { + None => return Component::None, + Some((_, ch)) if ch == ' ' || !ch.is_ascii() => return Component::Text, + Some((pos, ch)) => (pos, String::from(ch)), + }; + loop { + match chars.next() { + None => return Component::None, + Some((_, ch)) if ch == ' ' || !ch.is_ascii() => return Component::Text, + Some((_, ':')) => break, + Some((_, ch)) => macro_name.push(ch), + } + } + + let mut macro_target = String::new(); + loop { + match chars.next() { + None => return Component::None, + Some((_, ' ')) => return Component::Text, + Some((_, '[')) => break, + Some((_, ch)) => macro_target.push(ch), + } + } + + let mut attr_value = String::new(); + let end = loop { + match chars.next() { + None => return Component::None, + Some((pos, ']')) => break pos + 1, + Some((_, ch)) => attr_value.push(ch), + } + }; + + Component::Macro(start, end, Macro::new(macro_name, macro_target, attr_value)) +} + +enum Component { + None, + Text, + Macro(usize, usize, Macro), +} + +struct Macro { + name: String, + target: String, + attrs: String, +} + +impl Macro { + fn new(name: String, target: String, attrs: String) -> Self { + Self { name, target, attrs } + } + + fn process(&self) -> anyhow::Result { + let name = &self.name; + let text = match name.as_str() { + "https" => { + let url = &self.target; + let anchor_text = &self.attrs; + format!("[{anchor_text}](https:{url})") + } + "image" => { + let url = &self.target; + let alt = &self.attrs; + format!("![{alt}]({url})") + } + "kbd" => { + let keys = self.attrs.split('+').map(|k| Cow::Owned(format!("{k}"))); + keys.collect::>().join("+") + } + "pr" => { + let pr = &self.target; + let url = format!("https://github.com/rust-analyzer/rust-analyzer/pull/{pr}"); + format!("[`#{pr}`]({url})") + } + "commit" => { + let hash = &self.target; + let short = &hash[0..7]; + let url = format!("https://github.com/rust-analyzer/rust-analyzer/commit/{hash}"); + format!("[`{short}`]({url})") + } + "release" => { + let date = &self.target; + let url = format!("https://github.com/rust-analyzer/rust-analyzer/releases/{date}"); + format!("[`{date}`]({url})") + } + _ => bail!("macro not supported: {name}"), + }; + Ok(text) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs::read_to_string; + + #[test] + fn test_asciidoc_to_markdown_conversion() { + let input = read_to_string("test_data/input.adoc").unwrap(); + let expected = read_to_string("test_data/expected.md").unwrap(); + let actual = convert_asciidoc_to_markdown(std::io::Cursor::new(&input)).unwrap(); + + assert_eq!(actual, expected); + } + + macro_rules! test_inline_macro_processing { + ($(( + $name:ident, + $input:expr, + $expected:expr + ),)*) => ($( + #[test] + fn $name() { + let input = $input; + let actual = process_inline_macros(&input).unwrap(); + let expected = $expected; + assert_eq!(actual, expected) + } + )*); + } + + test_inline_macro_processing! { + (inline_macro_processing_for_empty_line, "", ""), + (inline_macro_processing_for_line_with_no_macro, "foo bar", "foo bar"), + ( + inline_macro_processing_for_macro_in_line_start, + "kbd::[Ctrl+T] foo", + "Ctrl+T foo" + ), + ( + inline_macro_processing_for_macro_in_line_end, + "foo kbd::[Ctrl+T]", + "foo Ctrl+T" + ), + ( + inline_macro_processing_for_macro_in_the_middle_of_line, + "foo kbd::[Ctrl+T] foo", + "foo Ctrl+T foo" + ), + ( + inline_macro_processing_for_several_macros, + "foo kbd::[Ctrl+T] foo kbd::[Enter] foo", + "foo Ctrl+T foo Enter foo" + ), + ( + inline_macro_processing_for_several_macros_without_text_in_between, + "foo kbd::[Ctrl+T]kbd::[Enter] foo", + "foo Ctrl+TEnter foo" + ), + } +} diff --git a/xtask/src/release.rs b/xtask/src/release.rs index eda8fceef05ba..4a30691477857 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -64,8 +64,8 @@ impl flags::Release { let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap(); let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?; - let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n)); - sh.write_file(&path, &contents)?; + let path = changelog_dir.join(format!("{today}-changelog-{changelog_n}.adoc")); + sh.write_file(path, contents)?; Ok(()) } diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs index 2647f7794f2c6..d2a1483e3873a 100644 --- a/xtask/src/release/changelog.rs +++ b/xtask/src/release/changelog.rs @@ -23,9 +23,9 @@ pub(crate) fn get_changelog( let mut others = String::new(); for line in git_log.lines() { let line = line.trim_start(); - if let Some(pr_num) = parse_pr_number(&line) { + if let Some(pr_num) = parse_pr_number(line) { let accept = "Accept: application/vnd.github.v3+json"; - let authorization = format!("Authorization: token {}", token); + let authorization = format!("Authorization: token {token}"); let pr_url = "https://api.github.com/repos/rust-lang/rust-analyzer/issues"; // we don't use an HTTPS client or JSON parser to keep the build times low @@ -57,36 +57,36 @@ pub(crate) fn get_changelog( PrKind::Other => &mut others, PrKind::Skip => continue, }; - writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)).unwrap(); + writeln!(s, "* pr:{pr_num}[] {}", l.message.as_deref().unwrap_or(&pr_title)).unwrap(); } } let contents = format!( "\ -= Changelog #{} += Changelog #{changelog_n} :sectanchors: +:experimental: :page-layout: post -Commit: commit:{}[] + -Release: release:{}[] +Commit: commit:{commit}[] + +Release: release:{today}[] == New Features -{} +{features} == Fixes -{} +{fixes} == Internal Improvements -{} +{internal} == Others -{} -", - changelog_n, commit, today, features, fixes, internal, others +{others} +" ); Ok(contents) } @@ -112,11 +112,9 @@ fn unescape(s: &str) -> String { fn parse_pr_number(s: &str) -> Option { const BORS_PREFIX: &str = "Merge #"; const HOMU_PREFIX: &str = "Auto merge of #"; - if s.starts_with(BORS_PREFIX) { - let s = &s[BORS_PREFIX.len()..]; + if let Some(s) = s.strip_prefix(BORS_PREFIX) { s.parse().ok() - } else if s.starts_with(HOMU_PREFIX) { - let s = &s[HOMU_PREFIX.len()..]; + } else if let Some(s) = s.strip_prefix(HOMU_PREFIX) { if let Some(space) = s.find(' ') { s[..space].parse().ok() } else { diff --git a/xtask/test_data/expected.md b/xtask/test_data/expected.md new file mode 100644 index 0000000000000..19c940c67bdc5 --- /dev/null +++ b/xtask/test_data/expected.md @@ -0,0 +1,81 @@ +# Changelog #256 + +Hello! + +Commit: [`0123456`](https://github.com/rust-analyzer/rust-analyzer/commit/0123456789abcdef0123456789abcdef01234567) \ +Release: [`2022-01-01`](https://github.com/rust-analyzer/rust-analyzer/releases/2022-01-01) + +## New Features + +- **BREAKING** [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111) shortcut ctrl+r + - hyphen-prefixed list item +- nested list item + - `foo` -> `foofoo` + - `bar` -> `barbar` +- listing in the secondary level + 1. install + 1. add to config + + ```json + {"foo":"bar"} + ``` +- list item with continuation + + ![](https://example.com/animation.gif) + + ![alt text](https://example.com/animation.gif) + + + + + + _Image_\ + ![](https://example.com/animation.gif) + + _Video_\ + + + ```bash + rustup update nightly + ``` + + ``` + This is a plain listing. + ``` +- single line item followed by empty lines +- multiline list + item followed by empty lines +- multiline list + item with indent +- multiline list + item not followed by empty lines +- multiline list + item followed by different marker + - foo + - bar +- multiline list + item followed by list continuation + + paragraph + paragraph + +## Another Section + +- foo bar baz +- list item with an inline image + ![](https://example.com/animation.gif) + +The highlight of the month is probably [`#1111`](https://github.com/rust-analyzer/rust-analyzer/pull/1111). +See [online manual](https://example.com/manual) for more information. + +```bash +rustup update nightly +``` + +``` +rustup update nightly +``` + +``` +This is a plain listing. +``` diff --git a/xtask/test_data/input.adoc b/xtask/test_data/input.adoc new file mode 100644 index 0000000000000..105bd8df0db76 --- /dev/null +++ b/xtask/test_data/input.adoc @@ -0,0 +1,90 @@ += Changelog #256 +:sectanchors: +:page-layout: post + +Hello! + +Commit: commit:0123456789abcdef0123456789abcdef01234567[] + +Release: release:2022-01-01[] + +== New Features + +* **BREAKING** pr:1111[] shortcut kbd:[ctrl+r] +- hyphen-prefixed list item +* nested list item +** `foo` -> `foofoo` +** `bar` -> `barbar` +* listing in the secondary level +. install +. add to config ++ +[source,json] +---- +{"foo":"bar"} +---- +* list item with continuation ++ +image::https://example.com/animation.gif[] ++ +image::https://example.com/animation.gif["alt text"] ++ +video::https://example.com/movie.mp4[options=loop] ++ +video::https://example.com/movie.mp4[options="autoplay,loop"] ++ +.Image +image::https://example.com/animation.gif[] ++ +.Video +video::https://example.com/movie.mp4[options=loop] ++ +[source,bash] +---- +rustup update nightly +---- ++ +---- +This is a plain listing. +---- +* single line item followed by empty lines + +* multiline list +item followed by empty lines + +* multiline list + item with indent + +* multiline list +item not followed by empty lines +* multiline list +item followed by different marker +** foo +** bar +* multiline list +item followed by list continuation ++ +paragraph +paragraph + +== Another Section + +* foo bar baz +* list item with an inline image + image:https://example.com/animation.gif[] + +The highlight of the month is probably pr:1111[]. +See https://example.com/manual[online manual] for more information. + +[source,bash] +---- +rustup update nightly +---- + +[source] +---- +rustup update nightly +---- + +---- +This is a plain listing. +---- From 621e96bd6ad0cf35c4dc4d1cfd8f6bc83096946f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 9 Jan 2023 20:47:51 +0100 Subject: [PATCH 007/501] Encode one level of cfg_attr in attr_id --- crates/hir-def/src/attr.rs | 2 +- crates/hir-def/src/child_by_source.rs | 2 +- crates/hir-def/src/lib.rs | 6 +- crates/hir-def/src/nameres/collector.rs | 4 +- crates/hir-def/src/nameres/diagnostics.rs | 8 +-- crates/hir-expand/src/attrs.rs | 79 ++++++++++++++++------- crates/hir-expand/src/db.rs | 9 ++- crates/hir-expand/src/hygiene.rs | 2 +- crates/hir-expand/src/lib.rs | 24 ++++--- crates/hir/src/lib.rs | 8 ++- crates/intern/Cargo.toml | 7 +- 11 files changed, 99 insertions(+), 52 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index eb88a74e448de..a0113fd048ce5 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -546,7 +546,7 @@ impl AttrSourceMap { } fn source_of_id(&self, id: AttrId) -> InFile<&Either> { - let ast_idx = id.ast_index as usize; + let ast_idx = id.ast_index(); let file_id = match self.mod_def_site_file_id { Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id, _ => self.file_id, diff --git a/crates/hir-def/src/child_by_source.rs b/crates/hir-def/src/child_by_source.rs index bb13165257ba4..19d2fe956f098 100644 --- a/crates/hir-def/src/child_by_source.rs +++ b/crates/hir-def/src/child_by_source.rs @@ -117,7 +117,7 @@ impl ChildBySource for ItemScope { let adt = ast_id.to_node(db.upcast()); calls.for_each(|(attr_id, call_id, calls)| { if let Some(Either::Left(attr)) = - adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize) + adt.doc_comments_and_attrs().nth(attr_id.ast_index()) { res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into())); } diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index cc0ea14d0156a..8eae2e92f4262 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -941,7 +941,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { fn derive_macro_as_call_id( db: &dyn db::DefDatabase, item_attr: &AstIdWithPath, - derive_attr: AttrId, + derive_attr_index: AttrId, derive_pos: u32, krate: CrateId, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, @@ -954,7 +954,7 @@ fn derive_macro_as_call_id( MacroCallKind::Derive { ast_id: item_attr.ast_id, derive_index: derive_pos, - derive_attr_index: derive_attr.ast_index, + derive_attr_index, }, ); Ok((macro_id, def_id, call_id)) @@ -982,7 +982,7 @@ fn attr_macro_as_call_id( MacroCallKind::Attr { ast_id: item_attr.ast_id, attr_args: Arc::new(arg), - invoc_attr_index: macro_attr.id.ast_index, + invoc_attr_index: macro_attr.id, is_derive, }, ); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index ad31e9aac2b5b..33a787fd9f95d 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -452,7 +452,7 @@ impl DefCollector<'_> { MacroCallKind::Attr { ast_id: ast_id.ast_id, attr_args: Default::default(), - invoc_attr_index: attr.id.ast_index, + invoc_attr_index: attr.id, is_derive: false, }, attr.path().clone(), @@ -1407,7 +1407,7 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Derive { ast_id: ast_id.ast_id, - derive_attr_index: derive_attr.ast_index, + derive_attr_index: *derive_attr, derive_index: *derive_pos as u32, }, ast_id.path.clone(), diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 74b25f4cc9828..b024d7c6777c0 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -31,9 +31,9 @@ pub enum DefDiagnosticKind { UnimplementedBuiltinMacro { ast: AstId }, - InvalidDeriveTarget { ast: AstId, id: u32 }, + InvalidDeriveTarget { ast: AstId, id: usize }, - MalformedDerive { ast: AstId, id: u32 }, + MalformedDerive { ast: AstId, id: usize }, } #[derive(Debug, PartialEq, Eq)] @@ -119,7 +119,7 @@ impl DefDiagnostic { ) -> Self { Self { in_module: container, - kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index }, + kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() }, } } @@ -130,7 +130,7 @@ impl DefDiagnostic { ) -> Self { Self { in_module: container, - kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index }, + kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() }, } } } diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index 6967d153271f1..c7135732b8a2a 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -1,3 +1,4 @@ +//! A higher level attributes based on TokenTree, with also some shortcuts. use std::{fmt, ops, sync::Arc}; use base_db::CrateId; @@ -65,14 +66,16 @@ impl RawAttrs { (None, entries @ Some(_)) => Self { entries }, (Some(entries), None) => Self { entries: Some(entries.clone()) }, (Some(a), Some(b)) => { - let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1); + let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32; Self { entries: Some( a.iter() .cloned() .chain(b.iter().map(|it| { let mut it = it.clone(); - it.id.ast_index += last_ast_index; + it.id.id = it.id.ast_index() as u32 + last_ast_index + | (it.id.cfg_attr_index().unwrap_or(0) as u32) + << AttrId::AST_INDEX_BITS; it })) .collect(), @@ -83,6 +86,7 @@ impl RawAttrs { } /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`. + // FIXME: This should return a different type pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs { let has_cfg_attrs = self .iter() @@ -106,27 +110,22 @@ impl RawAttrs { _ => return smallvec![attr.clone()], }; - // Input subtree is: `(cfg, $(attr),+)` - // Split it up into a `cfg` subtree and the `attr` subtrees. - // FIXME: There should be a common API for this. - let mut parts = subtree.token_trees.split(|tt| { - matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))) - }); - let cfg = match parts.next() { + let (cfg, parts) = match parse_cfg_attr_input(subtree) { Some(it) => it, - None => return smallvec![], + None => return smallvec![attr.clone()], }; - let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; - let cfg = CfgExpr::parse(&cfg); let index = attr.id; - let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| { - let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; - // FIXME hygiene - let hygiene = Hygiene::new_unhygienic(); - Attr::from_tt(db, &tree, &hygiene, index) - }); + let attrs = + parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { + let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; + // FIXME hygiene + let hygiene = Hygiene::new_unhygienic(); + Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx)) + }); let cfg_options = &crate_graph[krate].cfg_options; + let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() }; + let cfg = CfgExpr::parse(&cfg); if cfg_options.check(&cfg) == Some(false) { smallvec![] } else { @@ -143,7 +142,32 @@ impl RawAttrs { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct AttrId { - pub ast_index: u32, + id: u32, +} + +// FIXME: This only handles a single level of cfg_attr nesting +// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again +impl AttrId { + const CFG_ATTR_BITS: usize = 7; + const AST_INDEX_MASK: usize = 0x00FF_FFFF; + const AST_INDEX_BITS: usize = Self::AST_INDEX_MASK.count_ones() as usize; + const CFG_ATTR_SET_BITS: u32 = 1 << 31; + + pub fn ast_index(&self) -> usize { + self.id as usize & Self::AST_INDEX_MASK + } + + pub fn cfg_attr_index(&self) -> Option { + if self.id & Self::CFG_ATTR_SET_BITS == 0 { + None + } else { + Some(self.id as usize >> Self::AST_INDEX_BITS) + } + } + + pub fn with_cfg_attr(self, idx: usize) -> AttrId { + AttrId { id: self.id | (idx as u32) << Self::AST_INDEX_BITS | Self::CFG_ATTR_SET_BITS } + } } #[derive(Debug, Clone, PartialEq, Eq)] @@ -272,10 +296,7 @@ pub fn collect_attrs( Either::Left(attr) => attr.kind().is_outer(), Either::Right(comment) => comment.is_outer(), }); - outer_attrs - .chain(inner_attrs) - .enumerate() - .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr)) + outer_attrs.chain(inner_attrs).enumerate().map(|(id, attr)| (AttrId { id: id as u32 }, attr)) } fn inner_attributes( @@ -311,3 +332,15 @@ fn inner_attributes( }); Some(attrs) } + +// Input subtree is: `(cfg, $(attr),+)` +// Split it up into a `cfg` subtree and the `attr` subtrees. +pub fn parse_cfg_attr_input( + subtree: &Subtree, +) -> Option<(&[tt::TokenTree], impl Iterator)> { + let mut parts = subtree + .token_trees + .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))); + let cfg = parts.next()?; + Some((cfg, parts.filter(|it| !it.is_empty()))) +} diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index b28e60187deff..ec5886824f861 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -168,7 +168,9 @@ pub fn expand_speculative( // Attributes may have an input token tree, build the subtree and map for this as well // then try finding a token id for our token if it is inside this input subtree. let item = ast::Item::cast(speculative_args.clone())?; - item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left) + item.doc_comments_and_attrs() + .nth(invoc_attr_index.ast_index()) + .and_then(Either::left) }?; match attr.token_tree() { Some(token_tree) => { @@ -321,6 +323,7 @@ fn macro_arg( } fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet { + // FIXME: handle `cfg_attr` (|| { let censor = match loc.kind { MacroCallKind::FnLike { .. } => return None, @@ -328,7 +331,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet FxHashSet { + // FIXME: handle `cfg_attr` let tt = ast_id .to_node(db) .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .and_then(Either::left)? .token_tree()?; Some(InFile::new(ast_id.file_id, tt)) @@ -398,8 +400,7 @@ impl MacroDefId { } } -// FIXME: attribute indices do not account for `cfg_attr`, which means that we'll strip the whole -// `cfg_attr` instead of just one of the attributes it expands to +// FIXME: attribute indices do not account for nested `cfg_attr` impl MacroCallKind { /// Returns the file containing the macro invocation. @@ -420,7 +421,7 @@ impl MacroCallKind { // FIXME: handle `cfg_attr` ast_id.with_value(ast_id.to_node(db)).map(|it| { it.doc_comments_and_attrs() - .nth(*derive_attr_index as usize) + .nth(derive_attr_index.ast_index()) .and_then(|it| match it { Either::Left(attr) => Some(attr.syntax().clone()), Either::Right(_) => None, @@ -432,7 +433,7 @@ impl MacroCallKind { // FIXME: handle `cfg_attr` ast_id.with_value(ast_id.to_node(db)).map(|it| { it.doc_comments_and_attrs() - .nth(*invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .and_then(|it| match it { Either::Left(attr) => Some(attr.syntax().clone()), Either::Right(_) => None, @@ -489,19 +490,21 @@ impl MacroCallKind { MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), MacroCallKind::Derive { ast_id, derive_attr_index, .. } => { // FIXME: should be the range of the macro name, not the whole derive + // FIXME: handle `cfg_attr` ast_id .to_node(db) .doc_comments_and_attrs() - .nth(derive_attr_index as usize) + .nth(derive_attr_index.ast_index()) .expect("missing derive") .expect_left("derive is a doc comment?") .syntax() .text_range() } + // FIXME: handle `cfg_attr` MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id .to_node(db) .doc_comments_and_attrs() - .nth(invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .expect("missing attribute") .expect_left("attribute macro is a doc comment?") .syntax() @@ -593,9 +596,10 @@ impl ExpansionInfo { let token_range = token.value.text_range(); match &loc.kind { MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => { + // FIXME: handle `cfg_attr` let attr = item .doc_comments_and_attrs() - .nth(*invoc_attr_index as usize) + .nth(invoc_attr_index.ast_index()) .and_then(Either::left)?; match attr.token_tree() { Some(token_tree) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ad44e240423e0..ec66660f34a53 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -785,7 +785,7 @@ fn precise_macro_call_location( let token = (|| { let derive_attr = node .doc_comments_and_attrs() - .nth(*derive_attr_index as usize) + .nth(derive_attr_index.ast_index()) .and_then(Either::left)?; let token_tree = derive_attr.meta()?.token_tree()?; let group_by = token_tree @@ -813,9 +813,11 @@ fn precise_macro_call_location( let node = ast_id.to_node(db.upcast()); let attr = node .doc_comments_and_attrs() - .nth((*invoc_attr_index) as usize) + .nth(invoc_attr_index.ast_index()) .and_then(Either::left) - .unwrap_or_else(|| panic!("cannot find attribute #{invoc_attr_index}")); + .unwrap_or_else(|| { + panic!("cannot find attribute #{}", invoc_attr_index.ast_index()) + }); ( ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))), diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index dd5110255c50b..78007fc860ff4 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -1,9 +1,14 @@ [package] name = "intern" version = "0.0.0" +description = "TBD" +license = "MIT OR Apache-2.0" edition = "2021" +rust-version = "1.65" + +[lib] +doctest = false -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] # We need to freeze the version of the crate, as the raw-api feature is considered unstable From 9a15cc81b4ba96cfc8164ba4848cbf648d40f86a Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Fri, 6 Jan 2023 18:52:41 +0800 Subject: [PATCH 008/501] fix(ty): should query impls in nearest block --- crates/hir-ty/src/method_resolution.rs | 9 +++- crates/ide/src/goto_definition.rs | 64 ++++++++++++++++++++++++++ 2 files changed, 71 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 2f5fa3083c7a1..680e01e26b3c5 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -1094,13 +1094,13 @@ fn iterate_inherent_methods( None => return ControlFlow::Continue(()), }; - let (module, block) = match visible_from_module { + let (module, mut block) = match visible_from_module { VisibleFromModule::Filter(module) => (Some(module), module.containing_block()), VisibleFromModule::IncludeBlock(block) => (None, Some(block)), VisibleFromModule::None => (None, None), }; - if let Some(block_id) = block { + while let Some(block_id) = block { if let Some(impls) = db.inherent_impls_in_block(block_id) { impls_for_self_ty( &impls, @@ -1113,6 +1113,11 @@ fn iterate_inherent_methods( callback, )?; } + + block = db + .block_def_map(block_id) + .and_then(|map| map.parent()) + .and_then(|module| module.containing_block()); } for krate in def_crates { diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index 73fd518a9ef08..93019527f44da 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -1916,4 +1916,68 @@ fn main() { "#, ) } + + #[test] + fn query_impls_in_nearest_block() { + check( + r#" +struct S1; +impl S1 { + fn e() -> () {} +} +fn f1() { + struct S1; + impl S1 { + fn e() -> () {} + //^ + } + fn f2() { + fn f3() { + S1::e$0(); + } + } +} +"#, + ); + + check( + r#" +struct S1; +impl S1 { + fn e() -> () {} +} +fn f1() { + struct S1; + impl S1 { + fn e() -> () {} + //^ + } + fn f2() { + struct S2; + S1::e$0(); + } +} +fn f12() { + struct S1; + impl S1 { + fn e() -> () {} + } +} +"#, + ); + + check( + r#" +struct S1; +impl S1 { + fn e() -> () {} + //^ +} +fn f2() { + struct S2; + S1::e$0(); +} +"#, + ); + } } From ef4debc8b1d9b71de01e966cf17133ae2a1f5c43 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Fri, 30 Dec 2022 11:20:48 +0000 Subject: [PATCH 009/501] comment out disabled code --- crates/syntax/src/syntax_node.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index a08c01597d3f3..524be8139d14b 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs @@ -48,10 +48,10 @@ impl SyntaxTreeBuilder { pub fn finish(self) -> Parse { let (green, errors) = self.finish_raw(); // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357 - if cfg!(debug_assertions) && false { - let node = SyntaxNode::new_root(green.clone()); - crate::validation::validate_block_structure(&node); - } + // if cfg!(debug_assertions) { + // let node = SyntaxNode::new_root(green.clone()); + // crate::validation::validate_block_structure(&node); + // } Parse::new(green, errors) } From b971b5b64f784a08a0d787eb8c9a6a0bb928779b Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 07:06:26 +0000 Subject: [PATCH 010/501] suppress lint --- crates/syntax/src/syntax_node.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index 524be8139d14b..2e9e0bc226b9d 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs @@ -48,10 +48,11 @@ impl SyntaxTreeBuilder { pub fn finish(self) -> Parse { let (green, errors) = self.finish_raw(); // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357 - // if cfg!(debug_assertions) { - // let node = SyntaxNode::new_root(green.clone()); - // crate::validation::validate_block_structure(&node); - // } + #[allow(clippy::overly_complex_bool_expr)] + if cfg!(debug_assertions) && false { + let node = SyntaxNode::new_root(green.clone()); + crate::validation::validate_block_structure(&node); + } Parse::new(green, errors) } From db41e6b40892b89ebb7184ceda8e94896bf8d37f Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 07:16:27 +0000 Subject: [PATCH 011/501] remove 'deny_clippy' test --- crates/rust-analyzer/tests/slow-tests/tidy.rs | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 35b5af731925e..8e3097fce4230 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -82,7 +82,6 @@ fn files_are_tidy() { check_dbg(&path, &text); check_test_attrs(&path, &text); check_trailing_ws(&path, &text); - deny_clippy(&path, &text); tidy_docs.visit(&path, &text); tidy_marks.visit(&path, &text); } @@ -144,32 +143,6 @@ fn check_cargo_toml(path: &Path, text: String) { } } -fn deny_clippy(path: &Path, text: &str) { - let ignore = &[ - // The documentation in string literals may contain anything for its own purposes - "ide-db/src/generated/lints.rs", - // The tests test clippy lint hovers - "ide/src/hover/tests.rs", - // The tests test clippy lint completions - "ide-completion/src/tests/attribute.rs", - ]; - if ignore.iter().any(|p| path.ends_with(p)) { - return; - } - - if text.contains("\u{61}llow(clippy") { - panic!( - "\n\nallowing lints is forbidden: {}. -rust-analyzer intentionally doesn't check clippy on CI. -You can allow lint globally via `xtask clippy`. -See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. - -", - path.display() - ) - } -} - #[cfg(not(feature = "in-rust-tree"))] #[test] fn check_licenses() { From 56ffe63c3c108f1c6002ac59b76511a0b81953c6 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 07:18:17 +0000 Subject: [PATCH 012/501] derive 'Hash' --- crates/base-db/src/input.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 5fa4a80249509..b44a157e2538c 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -84,15 +84,10 @@ pub struct CrateGraph { arena: NoHashHashMap, } -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct CrateId(pub u32); impl stdx::hash::NoHashHashable for CrateId {} -impl std::hash::Hash for CrateId { - fn hash(&self, state: &mut H) { - self.0.hash(state); - } -} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateName(SmolStr); From 6153ea8dce1dcd60ecff446332c2639be8a6b452 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 08:28:28 +0000 Subject: [PATCH 013/501] loop-that-never-loops --- crates/hir-def/src/attr.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index ab5d180e1bb9e..fb1e72b0d0d81 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -251,17 +251,17 @@ impl Attrs { let enum_ = &item_tree[loc.id.value]; let cfg_options = &crate_graph[krate].cfg_options; - let variant = 'tri: loop { - let mut idx = 0; - for variant in enum_.variants.clone() { - let attrs = item_tree.attrs(db, krate, variant.into()); - if attrs.is_cfg_enabled(cfg_options) { - if it.local_id == Idx::from_raw(RawIdx::from(idx)) { - break 'tri variant; - } - idx += 1; + let mut idx = 0; + let Some(variant) = enum_.variants.clone().find(|variant| { + let attrs = item_tree.attrs(db, krate, (*variant).into()); + if attrs.is_cfg_enabled(cfg_options) { + if it.local_id == Idx::from_raw(RawIdx::from(idx)) { + return true } + idx += 1; } + false + }) else { return Arc::new(res); }; (item_tree[variant].fields.clone(), item_tree, krate) From 4267b11c4097b52ec59323c29709a8d06ed7b4cd Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 10 Jan 2023 09:43:08 +0100 Subject: [PATCH 014/501] Revert "Use ZWNJ to prevent VSCode from forming ligatures between hints and code" --- editors/code/src/client.ts | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts index 82cdf0390acaa..1fbe9d4ea033d 100644 --- a/editors/code/src/client.ts +++ b/editors/code/src/client.ts @@ -102,24 +102,6 @@ export async function createClient( } }, }, - async provideInlayHints(document, viewPort, token, next) { - const inlays = await next(document, viewPort, token); - if (!inlays) { - return inlays; - } - // U+200C is a zero-width non-joiner to prevent the editor from forming a ligature - // between code and hints - for (const inlay of inlays) { - if (typeof inlay.label === "string") { - inlay.label = `\u{200c}${inlay.label}\u{200c}`; - } else if (Array.isArray(inlay.label)) { - for (const it of inlay.label) { - it.value = `\u{200c}${it.value}\u{200c}`; - } - } - } - return inlays; - }, async handleDiagnostics( uri: vscode.Uri, diagnosticList: vscode.Diagnostic[], From 220996c5c1446add4fb508e60542f383857dc257 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 10 Jan 2023 17:25:02 +0200 Subject: [PATCH 015/501] Tweak wording and remove blank issue template --- .github/ISSUE_TEMPLATE/blank_issue.md | 10 ---------- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- 2 files changed, 1 insertion(+), 11 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/blank_issue.md diff --git a/.github/ISSUE_TEMPLATE/blank_issue.md b/.github/ISSUE_TEMPLATE/blank_issue.md deleted file mode 100644 index a08ad07cbf8d3..0000000000000 --- a/.github/ISSUE_TEMPLATE/blank_issue.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -name: Blank Issue -about: Create a blank issue. -title: '' -labels: '' -assignees: '' - ---- - - diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index c2e21933c9a68..b2a2153f0941f 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -22,4 +22,4 @@ Otherwise please try to provide information which will help us to fix the issue **rustc version**: (eg. output of `rustc -V`) -**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTUP_HOME` or `CARGO_HOME`) +**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTC`, `RUSTUP_HOME` or `CARGO_HOME`) From 95d14c393c7efd7ccb56076eb2bc799e5a367c68 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 18:20:12 +0000 Subject: [PATCH 016/501] avoid 'cloning' types that implement 'Copy' --- crates/hir-ty/src/lower.rs | 2 +- crates/hir/src/lib.rs | 2 +- crates/mbe/src/benchmark.rs | 2 +- crates/mbe/src/expander/transcriber.rs | 2 +- crates/mbe/src/parser.rs | 2 +- crates/mbe/src/tt_iter.rs | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 4b1f40f91d6e0..6a58546982d88 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -1931,7 +1931,7 @@ pub(crate) fn const_or_path_to_chalk( debruijn: DebruijnIndex, ) -> Const { match value { - ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty), + ConstScalarOrPath::Scalar(s) => intern_const_scalar(*s, expected_ty), ConstScalarOrPath::Path(n) => { let path = ModPath::from_segments(PathKind::Plain, Some(n.clone())); path_to_const(db, resolver, &path, mode, args, debruijn) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ec66660f34a53..6baf53e35c6b5 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -923,7 +923,7 @@ impl Struct { } pub fn repr(self, db: &dyn HirDatabase) -> Option { - db.struct_data(self.id).repr.clone() + db.struct_data(self.id).repr } pub fn kind(self, db: &dyn HirDatabase) -> StructKind { diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 4b75002501878..0fee6dfe43336 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -145,7 +145,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri Op::Ident(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { for punct in puncts { - parent.token_trees.push(tt::Leaf::from(punct.clone()).into()); + parent.token_trees.push(tt::Leaf::from(*punct).into()); } } Op::Repeat { tokens, kind, separator } => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index db0d327bf409b..fb316320ae788 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -138,7 +138,7 @@ fn expand_subtree( Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()), Op::Punct(puncts) => { for punct in puncts { - arena.push(tt::Leaf::from(punct.clone()).into()); + arena.push(tt::Leaf::from(*punct).into()); } } Op::Subtree { tokens, delimiter } => { diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index fad905e97f456..875dc1ad87bd0 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -126,7 +126,7 @@ fn next_op( src.next().expect("first token already peeked"); // Note that the '$' itself is a valid token inside macro_rules. let second = match src.next() { - None => return Ok(Op::Punct(smallvec![p.clone()])), + None => return Ok(Op::Punct(smallvec![*p])), Some(it) => it, }; match second { diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index bee7b5de6ac31..7787c74da8837 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -114,7 +114,7 @@ impl<'a> TtIter<'a> { ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => { let _ = self.next().unwrap(); let _ = self.next().unwrap(); - Ok(smallvec![first, second.clone(), third.unwrap().clone()]) + Ok(smallvec![first, *second, *third.unwrap()]) } ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _) | ('-' | '=' | '>', '>', _) @@ -125,7 +125,7 @@ impl<'a> TtIter<'a> { | ('<', '<', _) | ('|', '|', _) => { let _ = self.next().unwrap(); - Ok(smallvec![first, second.clone()]) + Ok(smallvec![first, *second]) } _ => Ok(smallvec![first]), } From ac3844a0bb9a13260bf211354dbc014e892d7491 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 18:48:51 +0000 Subject: [PATCH 017/501] a number of code simplifications --- crates/hir-def/src/body/pretty.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 20 ++- crates/hir/src/lib.rs | 4 +- .../src/handlers/generate_default_from_new.rs | 6 +- .../ide-assists/src/handlers/merge_imports.rs | 2 +- .../src/handlers/unmerge_match_arm.rs | 3 +- crates/ide-db/src/rename.rs | 8 +- crates/ide-db/src/search.rs | 40 +++--- .../src/handlers/json_is_not_rust.rs | 122 ++++++++---------- .../src/handlers/private_assoc_item.rs | 5 +- .../src/handlers/unresolved_proc_macro.rs | 5 +- crates/mbe/src/expander/transcriber.rs | 2 +- crates/project-model/src/workspace.rs | 4 +- 13 files changed, 99 insertions(+), 124 deletions(-) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 10b9b26bbeaa1..41783081ecd7a 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -80,7 +80,7 @@ impl<'a> Write for Printer<'a> { fn write_str(&mut self, s: &str) -> fmt::Result { for line in s.split_inclusive('\n') { if self.needs_indent { - match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { + match self.buf.chars().rev().find(|ch| *ch != ' ') { Some('\n') | None => {} _ => self.buf.push('\n'), } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 33a787fd9f95d..c69599079b5e3 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1600,17 +1600,15 @@ impl ModCollector<'_, '_> { FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); - if self.def_collector.is_proc_macro { - if self.module_id == def_map.root { - if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { - let crate_root = def_map.module_id(def_map.root); - self.def_collector.export_proc_macro( - proc_macro, - ItemTreeId::new(self.tree_id, id), - fn_id, - crate_root, - ); - } + if self.def_collector.is_proc_macro && self.module_id == def_map.root { + if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { + let crate_root = def_map.module_id(def_map.root); + self.def_collector.export_proc_macro( + proc_macro, + ItemTreeId::new(self.tree_id, id), + fn_id, + crate_root, + ); } } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index ec66660f34a53..c1e165b6381e7 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -472,8 +472,8 @@ impl Module { let def_map = self.id.def_map(db.upcast()); let children = def_map[self.id.local_id] .children - .iter() - .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) }) + .values() + .map(|module_id| Module { id: def_map.module_id(*module_id) }) .collect::>(); children.into_iter() } diff --git a/crates/ide-assists/src/handlers/generate_default_from_new.rs b/crates/ide-assists/src/handlers/generate_default_from_new.rs index 2d074a33e7fde..860372941f794 100644 --- a/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -82,18 +82,18 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code: let generic_params = impl_.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { + let ty_or_const_params = generic_params.type_or_const_params().map(|param| { // remove defaults since they can't be specified in impls match param { ast::TypeOrConstParam::Type(param) => { let param = param.clone_for_update(); param.remove_default(); - Some(ast::GenericParam::TypeParam(param)) + ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { let param = param.clone_for_update(); param.remove_default(); - Some(ast::GenericParam::ConstParam(param)) + ast::GenericParam::ConstParam(param) } } }); diff --git a/crates/ide-assists/src/handlers/merge_imports.rs b/crates/ide-assists/src/handlers/merge_imports.rs index 2bdbec93b1f96..d7ddc5f23f5dc 100644 --- a/crates/ide-assists/src/handlers/merge_imports.rs +++ b/crates/ide-assists/src/handlers/merge_imports.rs @@ -92,7 +92,7 @@ trait Merge: AstNode + Clone { fn try_merge_from(self, items: &mut dyn Iterator) -> Option> { let mut edits = Vec::new(); let mut merged = self.clone(); - while let Some(item) = items.next() { + for item in items { merged = merged.try_merge(&item)?; edits.push(Edit::Remove(item.into_either())); } diff --git a/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/crates/ide-assists/src/handlers/unmerge_match_arm.rs index 9565f0ee6f26f..db789cfa33428 100644 --- a/crates/ide-assists/src/handlers/unmerge_match_arm.rs +++ b/crates/ide-assists/src/handlers/unmerge_match_arm.rs @@ -86,8 +86,7 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O it.prev_sibling_or_token() }) .map(|it| it.kind()) - .skip_while(|it| it.is_trivia()) - .next() + .find(|it| !it.is_trivia()) == Some(T![,]); let has_arms_after = neighbor(&match_arm, Direction::Next).is_some(); if !has_comma_after && !has_arms_after { diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index cd4a7e1554cd7..8f310b0f42fb0 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -334,11 +334,9 @@ pub fn source_edit_from_references( } _ => false, }; - if !has_emitted_edit { - if !edited_ranges.contains(&range.start()) { - edit.replace(range, new_name.to_string()); - edited_ranges.push(range.start()); - } + if !has_emitted_edit && !edited_ranges.contains(&range.start()) { + edit.replace(range, new_name.to_string()); + edited_ranges.push(range.start()); } } diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index b2b0e49085c8c..f6d4ccc3ce5af 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -494,20 +494,28 @@ impl<'a> FindUsages<'a> { } // Search for `super` and `crate` resolving to our module - match self.def { - Definition::Module(module) => { - let scope = search_scope - .intersection(&SearchScope::module_and_children(self.sema.db, module)); + if let Definition::Module(module) = self.def { + let scope = + search_scope.intersection(&SearchScope::module_and_children(self.sema.db, module)); - let is_crate_root = - module.is_crate_root(self.sema.db).then(|| Finder::new("crate")); - let finder = &Finder::new("super"); + let is_crate_root = module.is_crate_root(self.sema.db).then(|| Finder::new("crate")); + let finder = &Finder::new("super"); - for (text, file_id, search_range) in scope_files(sema, &scope) { - let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); + for (text, file_id, search_range) in scope_files(sema, &scope) { + let tree = Lazy::new(move || sema.parse(file_id).syntax().clone()); + for offset in match_indices(&text, finder, search_range) { + if let Some(iter) = find_nodes("super", &tree, offset) { + for name_ref in iter.filter_map(ast::NameRef::cast) { + if self.found_name_ref(&name_ref, sink) { + return; + } + } + } + } + if let Some(finder) = &is_crate_root { for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("super", &tree, offset) { + if let Some(iter) = find_nodes("crate", &tree, offset) { for name_ref in iter.filter_map(ast::NameRef::cast) { if self.found_name_ref(&name_ref, sink) { return; @@ -515,20 +523,8 @@ impl<'a> FindUsages<'a> { } } } - if let Some(finder) = &is_crate_root { - for offset in match_indices(&text, finder, search_range) { - if let Some(iter) = find_nodes("crate", &tree, offset) { - for name_ref in iter.filter_map(ast::NameRef::cast) { - if self.found_name_ref(&name_ref, sink) { - return; - } - } - } - } - } } } - _ => (), } // search for module `self` references in our module's definition source diff --git a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index e8df6dcf285d0..04ce1e0feeed4 100644 --- a/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -99,76 +99,66 @@ pub(crate) fn json_in_items( && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY) { let node_string = node.to_string(); - if let Ok(it) = serde_json::from_str(&node_string) { - if let serde_json::Value::Object(it) = it { - let import_scope = ImportScope::find_insert_use_container(node, sema)?; - let range = node.text_range(); - let mut edit = TextEdit::builder(); - edit.delete(range); - let mut state = State::default(); - let semantics_scope = sema.scope(node)?; - let scope_resolve = - |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); - let scope_has = |it| scope_resolve(it).is_some(); - let deserialize_resolved = scope_resolve("::serde::Deserialize"); - let serialize_resolved = scope_resolve("::serde::Serialize"); - state.has_deserialize = deserialize_resolved.is_some(); - state.has_serialize = serialize_resolved.is_some(); - state.build_struct(&it); - edit.insert(range.start(), state.result); - acc.push( - Diagnostic::new( - "json-is-not-rust", - "JSON syntax is not valid as a Rust item", - range, - ) - .severity(Severity::WeakWarning) - .with_fixes(Some(vec![{ - let mut scb = SourceChangeBuilder::new(file_id); - let scope = match import_scope { - ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), - ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), - ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), - }; - let current_module = semantics_scope.module(); - if !scope_has("Serialize") { - if let Some(PathResolution::Def(it)) = serialize_resolved { - if let Some(it) = current_module.find_use_path_prefixed( - sema.db, - it, - config.insert_use.prefix_kind, - config.prefer_no_std, - ) { - insert_use( - &scope, - mod_path_to_ast(&it), - &config.insert_use, - ); - } + if let Ok(serde_json::Value::Object(it)) = serde_json::from_str(&node_string) { + let import_scope = ImportScope::find_insert_use_container(node, sema)?; + let range = node.text_range(); + let mut edit = TextEdit::builder(); + edit.delete(range); + let mut state = State::default(); + let semantics_scope = sema.scope(node)?; + let scope_resolve = + |it| semantics_scope.speculative_resolve(&make::path_from_text(it)); + let scope_has = |it| scope_resolve(it).is_some(); + let deserialize_resolved = scope_resolve("::serde::Deserialize"); + let serialize_resolved = scope_resolve("::serde::Serialize"); + state.has_deserialize = deserialize_resolved.is_some(); + state.has_serialize = serialize_resolved.is_some(); + state.build_struct(&it); + edit.insert(range.start(), state.result); + acc.push( + Diagnostic::new( + "json-is-not-rust", + "JSON syntax is not valid as a Rust item", + range, + ) + .severity(Severity::WeakWarning) + .with_fixes(Some(vec![{ + let mut scb = SourceChangeBuilder::new(file_id); + let scope = match import_scope { + ImportScope::File(it) => ImportScope::File(scb.make_mut(it)), + ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)), + ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)), + }; + let current_module = semantics_scope.module(); + if !scope_has("Serialize") { + if let Some(PathResolution::Def(it)) = serialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + config.prefer_no_std, + ) { + insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); } } - if !scope_has("Deserialize") { - if let Some(PathResolution::Def(it)) = deserialize_resolved { - if let Some(it) = current_module.find_use_path_prefixed( - sema.db, - it, - config.insert_use.prefix_kind, - config.prefer_no_std, - ) { - insert_use( - &scope, - mod_path_to_ast(&it), - &config.insert_use, - ); - } + } + if !scope_has("Deserialize") { + if let Some(PathResolution::Def(it)) = deserialize_resolved { + if let Some(it) = current_module.find_use_path_prefixed( + sema.db, + it, + config.insert_use.prefix_kind, + config.prefer_no_std, + ) { + insert_use(&scope, mod_path_to_ast(&it), &config.insert_use); } } - let mut sc = scb.finish(); - sc.insert_source_edit(file_id, edit.finish()); - fix("convert_json_to_struct", "Convert JSON to struct", sc, range) - }])), - ); - } + } + let mut sc = scb.finish(); + sc.insert_source_edit(file_id, edit.finish()); + fix("convert_json_to_struct", "Convert JSON to struct", sc, range) + }])), + ); } } Some(()) diff --git a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs index b363a516dd1c0..0b3121c765d8d 100644 --- a/crates/ide-diagnostics/src/handlers/private_assoc_item.rs +++ b/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -11,10 +11,7 @@ pub(crate) fn private_assoc_item( d: &hir::PrivateAssocItem, ) -> Diagnostic { // FIXME: add quickfix - let name = match d.item.name(ctx.sema.db) { - Some(name) => format!("`{}` ", name), - None => String::new(), - }; + let name = d.item.name(ctx.sema.db).map(|name| format!("`{name}` ")).unwrap_or_default(); Diagnostic::new( "private-assoc-item", format!( diff --git a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index b2ed19104e278..9a984ba6bf07a 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -34,10 +34,7 @@ pub(crate) fn unresolved_proc_macro( let message = format!( "{message}: {}", if config_enabled { - match def_map.proc_macro_loading_error() { - Some(e) => e, - None => "proc macro not found in the built dylib", - } + def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib") } else { match d.kind { hir::MacroKind::Attr if proc_macros_enabled => { diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index db0d327bf409b..605ddd4a065d8 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -212,7 +212,7 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe } else { ctx.bindings.get(v, &mut ctx.nesting).map_or_else( |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, - |it| ExpandResult::ok(it), + ExpandResult::ok, ) } } diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index e2382aa37e8e5..5ab767a0e5faf 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -366,11 +366,11 @@ impl ProjectWorkspace { _ => None, }) .collect(); - let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { + let outputs = &mut (match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { Ok(it) => Ok(it.into_iter()), // io::Error is not Clone? Err(e) => Err(Arc::new(e)), - }; + }); workspaces .iter() From 5b1cd8245f64d6501f0866c49e948e2d8a75038a Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 19:44:19 +0000 Subject: [PATCH 018/501] fixup --- crates/project-model/src/workspace.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 5ab767a0e5faf..d562066533eee 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -366,11 +366,11 @@ impl ProjectWorkspace { _ => None, }) .collect(); - let outputs = &mut (match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { + let outputs = &mut match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) { Ok(it) => Ok(it.into_iter()), // io::Error is not Clone? Err(e) => Err(Arc::new(e)), - }); + }; workspaces .iter() From 95fc3ba41cff006a3ae4c3c730122b0503fdefab Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 19:51:21 +0000 Subject: [PATCH 019/501] fixup --- crates/hir-def/src/body/pretty.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 41783081ecd7a..4b4664a1cf4a6 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -113,7 +113,7 @@ impl<'a> Printer<'a> { } fn newline(&mut self) { - match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() { + match self.buf.chars().rev().find(|ch| *ch != ' ') { Some('\n') | None => {} _ => writeln!(self).unwrap(), } From d218b237fd8e5495c5399f0c1651c7dbf3b3daaf Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Tue, 10 Jan 2023 20:40:08 +0000 Subject: [PATCH 020/501] collapse some nested blocks --- crates/hir-ty/src/infer/expr.rs | 34 +++++++++---------- crates/hir-ty/src/layout.rs | 17 ++++------ .../src/handlers/extract_type_alias.rs | 22 ++++++------ crates/ide-db/src/rename.rs | 24 ++++++------- crates/ide-db/src/symbol_index.rs | 8 ++--- crates/ide/src/hover/render.rs | 8 ++--- crates/ide/src/join_lines.rs | 6 ++-- crates/ide/src/syntax_highlighting.rs | 9 +++-- crates/ide/src/typing.rs | 6 ++-- crates/rust-analyzer/src/main_loop.rs | 8 ++--- 10 files changed, 63 insertions(+), 79 deletions(-) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 8f9cdac3784c7..ea04a3d17b133 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -1136,18 +1136,16 @@ impl<'a> InferenceContext<'a> { if self.diverges.is_always() { // we don't even make an attempt at coercion self.table.new_maybe_never_var() - } else { - if let Some(t) = expected.only_has_type(&mut self.table) { - if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() { - self.result.type_mismatches.insert( - expr.into(), - TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() }, - ); - } - t - } else { - TyBuilder::unit() + } else if let Some(t) = expected.only_has_type(&mut self.table) { + if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() { + self.result.type_mismatches.insert( + expr.into(), + TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() }, + ); } + t + } else { + TyBuilder::unit() } } } @@ -1314,13 +1312,13 @@ impl<'a> InferenceContext<'a> { } else { param_ty }; - if !coercion_target.is_unknown() { - if self.coerce(Some(arg), &ty, &coercion_target).is_err() { - self.result.type_mismatches.insert( - arg.into(), - TypeMismatch { expected: coercion_target, actual: ty.clone() }, - ); - } + if !coercion_target.is_unknown() + && self.coerce(Some(arg), &ty, &coercion_target).is_err() + { + self.result.type_mismatches.insert( + arg.into(), + TypeMismatch { expected: coercion_target, actual: ty.clone() }, + ); } } } diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index 7a1cca3143ec8..958d3dabe0ed1 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -251,17 +251,14 @@ fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result Ty { match pointee.kind(Interner) { - TyKind::Adt(AdtId(adt), subst) => match adt { - &hir_def::AdtId::StructId(i) => { - let data = db.struct_data(i); - let mut it = data.variant_data.fields().iter().rev(); - match it.next() { - Some((f, _)) => field_ty(db, i.into(), f, subst), - None => pointee, - } + TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), subst) => { + let data = db.struct_data(*i); + let mut it = data.variant_data.fields().iter().rev(); + match it.next() { + Some((f, _)) => field_ty(db, (*i).into(), f, subst), + None => pointee, } - _ => pointee, - }, + } _ => pointee, } } diff --git a/crates/ide-assists/src/handlers/extract_type_alias.rs b/crates/ide-assists/src/handlers/extract_type_alias.rs index 0505f5784f814..6c0238f35d0f1 100644 --- a/crates/ide-assists/src/handlers/extract_type_alias.rs +++ b/crates/ide-assists/src/handlers/extract_type_alias.rs @@ -161,19 +161,17 @@ fn collect_used_generics<'gp>( .and_then(|lt| known_generics.iter().find(find_lifetime(<.text()))), ), ast::Type::ArrayType(ar) => { - if let Some(expr) = ar.expr() { - if let ast::Expr::PathExpr(p) = expr { - if let Some(path) = p.path() { - if let Some(name_ref) = path.as_single_name_ref() { - if let Some(param) = known_generics.iter().find(|gp| { - if let ast::GenericParam::ConstParam(cp) = gp { - cp.name().map_or(false, |n| n.text() == name_ref.text()) - } else { - false - } - }) { - generics.push(param); + if let Some(ast::Expr::PathExpr(p)) = ar.expr() { + if let Some(path) = p.path() { + if let Some(name_ref) = path.as_single_name_ref() { + if let Some(param) = known_generics.iter().find(|gp| { + if let ast::GenericParam::ConstParam(cp) = gp { + cp.name().map_or(false, |n| n.text() == name_ref.text()) + } else { + false } + }) { + generics.push(param); } } } diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 8f310b0f42fb0..0e5906097c3d4 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -389,19 +389,17 @@ fn source_edit_from_name_ref( edit.delete(TextRange::new(s, e)); return true; } - } else if init == name_ref { - if field_name.text() == new_name { - cov_mark::hit!(test_rename_local_put_init_shorthand); - // Foo { field: local } -> Foo { field } - // ^^^^^^^ delete this - - // same names, we can use a shorthand here instead. - // we do not want to erase attributes hence this range start - let s = field_name.syntax().text_range().end(); - let e = init.syntax().text_range().end(); - edit.delete(TextRange::new(s, e)); - return true; - } + } else if init == name_ref && field_name.text() == new_name { + cov_mark::hit!(test_rename_local_put_init_shorthand); + // Foo { field: local } -> Foo { field } + // ^^^^^^^ delete this + + // same names, we can use a shorthand here instead. + // we do not want to erase attributes hence this range start + let s = field_name.syntax().text_range().end(); + let e = init.syntax().text_range().end(); + edit.delete(TextRange::new(s, e)); + return true; } } // init shorthand diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index c054cc1597968..a91ffd1ec4fd1 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -323,10 +323,10 @@ impl Query { if symbol.name != self.query { continue; } - } else if self.case_sensitive { - if self.query.chars().any(|c| !symbol.name.contains(c)) { - continue; - } + } else if self.case_sensitive + && self.query.chars().any(|c| !symbol.name.contains(c)) + { + continue; } res.push(symbol.clone()); diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 47257f0bfad05..cb537d7ef79d7 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -64,12 +64,10 @@ pub(super) fn type_info( bt_end = if config.markdown() { "```\n" } else { "" } ) .into() + } else if config.markdown() { + Markup::fenced_block(&original.display(sema.db)) } else { - if config.markdown() { - Markup::fenced_block(&original.display(sema.db)) - } else { - original.display(sema.db).to_string().into() - } + original.display(sema.db).to_string().into() }; res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); Some(res) diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index edc48e84d7252..1cfde23624558 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -161,10 +161,8 @@ fn remove_newline( } } - if config.join_assignments { - if join_assignments(edit, &prev, &next).is_some() { - return; - } + if config.join_assignments && join_assignments(edit, &prev, &next).is_some() { + return; } if config.unwrap_trivial_blocks { diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs index 50371d620eb2a..454a250f3ded4 100644 --- a/crates/ide/src/syntax_highlighting.rs +++ b/crates/ide/src/syntax_highlighting.rs @@ -413,11 +413,10 @@ fn traverse( let string = ast::String::cast(token); let string_to_highlight = ast::String::cast(descended_token.clone()); if let Some((string, expanded_string)) = string.zip(string_to_highlight) { - if string.is_raw() { - if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some() - { - continue; - } + if string.is_raw() + && inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some() + { + continue; } highlight_format_string(hl, &string, &expanded_string, range); highlight_escape_string(hl, &string, range.start()); diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index eba5a485636e2..c265487562508 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -205,10 +205,8 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { if expr_stmt.semicolon_token().is_some() { return None; } - } else { - if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) { - return None; - } + } else if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) { + return None; } let expr = binop.rhs()?; diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 0bc940dfe8dab..a270049019f32 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -307,10 +307,10 @@ impl GlobalState { } } - if !was_quiescent || state_changed || memdocs_added_or_removed { - if self.config.publish_diagnostics() { - self.update_diagnostics() - } + if (!was_quiescent || state_changed || memdocs_added_or_removed) + && self.config.publish_diagnostics() + { + self.update_diagnostics() } } From a3114c3697c21310dbf8a11feea74712bf1eddd7 Mon Sep 17 00:00:00 2001 From: "daniel.eades" Date: Wed, 11 Jan 2023 16:07:35 +0000 Subject: [PATCH 021/501] move loop index inside iterator --- crates/hir-def/src/attr.rs | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index fb1e72b0d0d81..c209bd949073c 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -251,19 +251,18 @@ impl Attrs { let enum_ = &item_tree[loc.id.value]; let cfg_options = &crate_graph[krate].cfg_options; - let mut idx = 0; - let Some(variant) = enum_.variants.clone().find(|variant| { + + let Some(variant) = enum_.variants.clone().filter(|variant| { let attrs = item_tree.attrs(db, krate, (*variant).into()); - if attrs.is_cfg_enabled(cfg_options) { - if it.local_id == Idx::from_raw(RawIdx::from(idx)) { - return true - } - idx += 1; - } - false - }) else { + attrs.is_cfg_enabled(cfg_options) + }) + .zip(0u32..) + .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx))) + .map(|(variant, _idx)| variant) + else { return Arc::new(res); }; + (item_tree[variant].fields.clone(), item_tree, krate) } VariantId::StructId(it) => { From bb4e272d8a05c5b6a000808c5c84d722d582603d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 11 Jan 2023 17:10:04 +0100 Subject: [PATCH 022/501] Refine search for const and function assoc items --- crates/hir-ty/src/method_resolution.rs | 12 +- crates/hir/src/lib.rs | 2 +- crates/hir/src/source_analyzer.rs | 10 +- crates/ide-db/src/search.rs | 62 +++--- crates/ide/src/highlight_related.rs | 2 - crates/ide/src/references.rs | 261 +++++++++++++++++++++++++ crates/rust-analyzer/src/config.rs | 6 +- 7 files changed, 319 insertions(+), 36 deletions(-) diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index ae25704f2047a..64376e10bcc5b 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -712,17 +712,17 @@ fn lookup_impl_assoc_item_for_trait_ref( let table = InferenceTable::new(db, env); let impl_data = find_matching_impl(impls, table, trait_ref)?; - impl_data.items.iter().find_map(|it| match it { + impl_data.items.iter().find_map(|&it| match it { AssocItemId::FunctionId(f) => { - (db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f)) + (db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f)) } AssocItemId::ConstId(c) => db - .const_data(*c) + .const_data(c) .name .as_ref() - .map(|n| *n == *name) - .and_then(|result| if result { Some(AssocItemId::ConstId(*c)) } else { None }), - _ => None, + .map(|n| n == name) + .and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }), + AssocItemId::TypeAliasId(_) => None, }) } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index a1d2ec02f23bf..2fc2673bd2229 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2129,7 +2129,7 @@ pub enum AssocItem { Const(Const), TypeAlias(TypeAlias), } -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum AssocItemContainer { Trait(Trait), Impl(Impl), diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 059b80bcf1392..2354eb2c9ccaa 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -504,7 +504,7 @@ impl SourceAnalyzer { AssocItemId::ConstId(const_id) => { self.resolve_impl_const_or_trait_def(db, const_id, subs).into() } - _ => assoc, + assoc => assoc, }; return Some(PathResolution::Def(AssocItem::from(assoc).into())); @@ -517,7 +517,13 @@ impl SourceAnalyzer { prefer_value_ns = true; } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) { let pat_id = self.pat_id(&path_pat.into())?; - if let Some((assoc, _)) = infer.assoc_resolutions_for_pat(pat_id) { + if let Some((assoc, subs)) = infer.assoc_resolutions_for_pat(pat_id) { + let assoc = match assoc { + AssocItemId::ConstId(const_id) => { + self.resolve_impl_const_or_trait_def(db, const_id, subs).into() + } + assoc => assoc, + }; return Some(PathResolution::Def(AssocItem::from(assoc).into())); } if let Some(VariantId::EnumVariantId(variant)) = diff --git a/crates/ide-db/src/search.rs b/crates/ide-db/src/search.rs index f6d4ccc3ce5af..fd09fdeb0bda5 100644 --- a/crates/ide-db/src/search.rs +++ b/crates/ide-db/src/search.rs @@ -7,7 +7,9 @@ use std::{mem, sync::Arc}; use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt}; -use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility}; +use hir::{ + AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, +}; use memchr::memmem::Finder; use once_cell::unsync::Lazy; use parser::SyntaxKind; @@ -311,15 +313,15 @@ impl Definition { pub fn usages<'a>(self, sema: &'a Semantics<'_, RootDatabase>) -> FindUsages<'a> { FindUsages { - local_repr: match self { - Definition::Local(local) => Some(local.representative(sema.db)), - _ => None, - }, def: self, - trait_assoc_def: as_trait_assoc_def(sema.db, self), + assoc_item_container: self.as_assoc_item(sema.db).map(|a| a.container(sema.db)), sema, scope: None, include_self_kw_refs: None, + local_repr: match self { + Definition::Local(local) => Some(local.representative(sema.db)), + _ => None, + }, search_self_mod: false, } } @@ -328,8 +330,7 @@ impl Definition { #[derive(Clone)] pub struct FindUsages<'a> { def: Definition, - /// If def is an assoc item from a trait or trait impl, this is the corresponding item of the trait definition - trait_assoc_def: Option, + assoc_item_container: Option, sema: &'a Semantics<'a, RootDatabase>, scope: Option, include_self_kw_refs: Option, @@ -380,7 +381,9 @@ impl<'a> FindUsages<'a> { let sema = self.sema; let search_scope = { - let base = self.trait_assoc_def.unwrap_or(self.def).search_scope(sema.db); + // FIXME: Is the trait scope needed for trait impl assoc items? + let base = + as_trait_assoc_def(sema.db, self.def).unwrap_or(self.def).search_scope(sema.db); match &self.scope { None => base, Some(scope) => base.intersection(scope), @@ -651,13 +654,26 @@ impl<'a> FindUsages<'a> { sink(file_id, reference) } Some(NameRefClass::Definition(def)) - if match self.trait_assoc_def { - Some(trait_assoc_def) => { - // we have a trait assoc item, so force resolve all assoc items to their trait version - convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def - } - None => self.def == def, - } => + if self.def == def + // is our def a trait assoc item? then we want to find everything + || matches!(self.assoc_item_container, Some(hir::AssocItemContainer::Trait(_))) + && convert_to_def_in_trait(self.sema.db, def) == self.def => + { + let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); + let reference = FileReference { + range, + name: ast::NameLike::NameRef(name_ref.clone()), + category: ReferenceCategory::new(&def, name_ref), + }; + sink(file_id, reference) + } + // FIXME: special case type aliases, we can't filter between impl and trait defs here as we lack the substitutions + // so we always resolve all assoc type aliases to both their trait def and impl defs + Some(NameRefClass::Definition(def)) + if self.assoc_item_container.is_some() + && matches!(self.def, Definition::TypeAlias(_)) + && convert_to_def_in_trait(self.sema.db, def) + == convert_to_def_in_trait(self.sema.db, self.def) => { let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax()); let reference = FileReference { @@ -748,12 +764,14 @@ impl<'a> FindUsages<'a> { false } Some(NameClass::Definition(def)) if def != self.def => { - // if the def we are looking for is a trait (impl) assoc item, we'll have to resolve the items to trait definition assoc item - if !matches!( - self.trait_assoc_def, - Some(trait_assoc_def) - if convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def - ) { + // only when looking for trait assoc items, we want to find other assoc items + if !matches!(self.assoc_item_container, Some(hir::AssocItemContainer::Trait(_))) + // FIXME: special case type aliases, we can't filter between impl and trait defs here as we lack the substitutions + // so we always resolve all assoc type aliases to both their trait def and impl defs + && !(matches!(self.def, Definition::TypeAlias(_)) + && convert_to_def_in_trait(self.sema.db, def) + == convert_to_def_in_trait(self.sema.db, self.def)) + { return false; } let FileRange { file_id, range } = self.sema.original_range(name.syntax()); diff --git a/crates/ide/src/highlight_related.rs b/crates/ide/src/highlight_related.rs index 55f8779eed7d0..c889eb930f30b 100644 --- a/crates/ide/src/highlight_related.rs +++ b/crates/ide/src/highlight_related.rs @@ -1356,7 +1356,6 @@ fn main() { r#" trait Trait { fn func(self) {} - //^^^^ } impl Trait for () { @@ -1376,7 +1375,6 @@ fn main() { r#" trait Trait { fn func(self) {} - //^^^^ } impl Trait for () { diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs index 0f758cfa2d344..e49f68c57ba8d 100644 --- a/crates/ide/src/references.rs +++ b/crates/ide/src/references.rs @@ -1636,4 +1636,265 @@ pub fn deri$0ve(_stream: TokenStream) -> TokenStream {} "#]], ); } + + #[test] + fn assoc_items_trait_def() { + check( + r#" +trait Trait { + const CONST$0: usize; +} + +impl Trait for () { + const CONST: usize = 0; +} + +impl Trait for ((),) { + const CONST: usize = 0; +} + +fn f() { + let _ = <()>::CONST; + + let _ = T::CONST; +} +"#, + expect![[r#" + CONST Const FileId(0) 18..37 24..29 + + FileId(0) 71..76 + FileId(0) 125..130 + FileId(0) 183..188 + FileId(0) 206..211 + "#]], + ); + check( + r#" +trait Trait { + type TypeAlias$0; +} + +impl Trait for () { + type TypeAlias = (); +} + +impl Trait for ((),) { + type TypeAlias = (); +} + +fn f() { + let _: <() as Trait>::TypeAlias; + + let _: T::TypeAlias; +} +"#, + expect![[r#" + TypeAlias TypeAlias FileId(0) 18..33 23..32 + + FileId(0) 66..75 + FileId(0) 117..126 + FileId(0) 181..190 + FileId(0) 207..216 + "#]], + ); + check( + r#" +trait Trait { + fn function$0() {} +} + +impl Trait for () { + fn function() {} +} + +impl Trait for ((),) { + fn function() {} +} + +fn f() { + let _ = <()>::function; + + let _ = T::function; +} +"#, + expect![[r#" + function Function FileId(0) 18..34 21..29 + + FileId(0) 65..73 + FileId(0) 112..120 + FileId(0) 166..174 + FileId(0) 192..200 + "#]], + ); + } + + #[test] + fn assoc_items_trait_impl_def() { + check( + r#" +trait Trait { + const CONST: usize; +} + +impl Trait for () { + const CONST$0: usize = 0; +} + +impl Trait for ((),) { + const CONST: usize = 0; +} + +fn f() { + let _ = <()>::CONST; + + let _ = T::CONST; +} +"#, + expect![[r#" + CONST Const FileId(0) 65..88 71..76 + + FileId(0) 183..188 + "#]], + ); + check( + r#" +trait Trait { + type TypeAlias; +} + +impl Trait for () { + type TypeAlias$0 = (); +} + +impl Trait for ((),) { + type TypeAlias = (); +} + +fn f() { + let _: <() as Trait>::TypeAlias; + + let _: T::TypeAlias; +} +"#, + expect![[r#" + TypeAlias TypeAlias FileId(0) 61..81 66..75 + + FileId(0) 23..32 + FileId(0) 117..126 + FileId(0) 181..190 + FileId(0) 207..216 + "#]], + ); + check( + r#" +trait Trait { + fn function() {} +} + +impl Trait for () { + fn function$0() {} +} + +impl Trait for ((),) { + fn function() {} +} + +fn f() { + let _ = <()>::function; + + let _ = T::function; +} +"#, + expect![[r#" + function Function FileId(0) 62..78 65..73 + + FileId(0) 166..174 + "#]], + ); + } + + #[test] + fn assoc_items_ref() { + check( + r#" +trait Trait { + const CONST: usize; +} + +impl Trait for () { + const CONST: usize = 0; +} + +impl Trait for ((),) { + const CONST: usize = 0; +} + +fn f() { + let _ = <()>::CONST$0; + + let _ = T::CONST; +} +"#, + expect![[r#" + CONST Const FileId(0) 65..88 71..76 + + FileId(0) 183..188 + "#]], + ); + check( + r#" +trait Trait { + type TypeAlias; +} + +impl Trait for () { + type TypeAlias = (); +} + +impl Trait for ((),) { + type TypeAlias = (); +} + +fn f() { + let _: <() as Trait>::TypeAlias$0; + + let _: T::TypeAlias; +} +"#, + expect![[r#" + TypeAlias TypeAlias FileId(0) 18..33 23..32 + + FileId(0) 66..75 + FileId(0) 117..126 + FileId(0) 181..190 + FileId(0) 207..216 + "#]], + ); + check( + r#" +trait Trait { + fn function() {} +} + +impl Trait for () { + fn function() {} +} + +impl Trait for ((),) { + fn function() {} +} + +fn f() { + let _ = <()>::function$0; + + let _ = T::function; +} +"#, + expect![[r#" + function Function FileId(0) 62..78 65..73 + + FileId(0) 166..174 + "#]], + ); + } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index b0afbdc9a4265..6d6e367e3740b 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -1044,7 +1044,7 @@ impl Config { &self.data.cargo_extraEnv } - pub fn check_on_save_extra_env(&self) -> FxHashMap { + pub fn check_extra_env(&self) -> FxHashMap { let mut extra_env = self.data.cargo_extraEnv.clone(); extra_env.extend(self.data.check_extraEnv.clone()); extra_env @@ -1165,7 +1165,7 @@ impl Config { FlycheckConfig::CustomCommand { command, args, - extra_env: self.check_on_save_extra_env(), + extra_env: self.check_extra_env(), invocation_strategy: match self.data.check_invocationStrategy { InvocationStrategy::Once => flycheck::InvocationStrategy::Once, InvocationStrategy::PerWorkspace => { @@ -1210,7 +1210,7 @@ impl Config { CargoFeaturesDef::Selected(it) => it, }, extra_args: self.data.check_extraArgs.clone(), - extra_env: self.check_on_save_extra_env(), + extra_env: self.check_extra_env(), ansi_color_output: self.color_diagnostic_output(), }, } From 1ce3e820dcb3ca90dec6d3b008e14531c54afc06 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 11 Jan 2023 22:39:05 +0100 Subject: [PATCH 023/501] feat: Make unlinked_file diagnostic quickfixes work for inline modules --- crates/hir-expand/src/lib.rs | 8 + .../src/handlers/unlinked_file.rs | 219 ++++++++++++++---- crates/vfs/src/vfs_path.rs | 17 +- 3 files changed, 202 insertions(+), 42 deletions(-) diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 5554c7517f5ac..b879eec4cc8d7 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -356,6 +356,14 @@ impl HirFileId { } } + #[inline] + pub fn file_id(self) -> Option { + match self.0 & Self::MACRO_FILE_TAG_MASK { + 0 => Some(FileId(self.0)), + _ => None, + } + } + fn repr(self) -> HirFileIdRepr { match self.0 & Self::MACRO_FILE_TAG_MASK { 0 => HirFileIdRepr::FileId(FileId(self.0)), diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index be70f0ac4f797..4bb3789d69d07 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -1,6 +1,8 @@ //! Diagnostic emitted for files that aren't part of any crate. -use hir::db::DefDatabase; +use std::iter; + +use hir::{db::DefDatabase, InFile, ModuleSource}; use ide_db::{ base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt}, source_change::SourceChange, @@ -42,45 +44,106 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id)); let our_path = source_root.path_for_file(&file_id)?; - let (mut module_name, _) = our_path.name_and_extension()?; - - // Candidates to look for: - // - `mod.rs`, `main.rs` and `lib.rs` in the same folder - // - `$dir.rs` in the parent folder, where `$dir` is the directory containing `self.file_id` let parent = our_path.parent()?; - let paths = { - let parent = if module_name == "mod" { - // for mod.rs we need to actually look up one higher - // and take the parent as our to be module name - let (name, _) = parent.name_and_extension()?; - module_name = name; - parent.parent()? - } else { - parent - }; - let mut paths = - vec![parent.join("mod.rs")?, parent.join("lib.rs")?, parent.join("main.rs")?]; - - // `submod/bla.rs` -> `submod.rs` - let parent_mod = (|| { - let (name, _) = parent.name_and_extension()?; - parent.parent()?.join(&format!("{name}.rs")) - })(); - paths.extend(parent_mod); - paths + let (module_name, _) = our_path.name_and_extension()?; + let (parent, module_name) = if module_name == "mod" { + // for mod.rs we need to actually look up one higher + // and take the parent as our to be module name + let (name, _) = parent.name_and_extension()?; + (parent.parent()?, name.to_owned()) + } else { + (parent, module_name.to_owned()) }; - for &parent_id in paths.iter().filter_map(|path| source_root.file_for_path(path)) { + // check crate roots, i.e. main.rs, lib.rs, ... + 'outer: for &krate in &*ctx.sema.db.relevant_crates(file_id) { + let crate_def_map = ctx.sema.db.crate_def_map(krate); + if let Some(root_file_id) = crate_def_map[crate_def_map.root()].origin.file_id() { + if let Some(path) = source_root.path_for_file(&root_file_id) { + let parent2 = path.parent()?; + if let Some(rel) = parent.strip_prefix(&parent2) { + let mut current = &crate_def_map[crate_def_map.root()]; + for ele in rel.as_ref().components() { + let seg = match ele { + std::path::Component::Normal(seg) => seg.to_str()?, + std::path::Component::RootDir => continue, + // shouldn't occur + _ => continue 'outer, + }; + match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { + Some((_, child)) => { + current = &crate_def_map[*child]; + } + None => continue 'outer, + } + } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + if let Some(parent_file_id) = parent_file_id.file_id() { + return make_fixes( + ctx.sema.db, + parent_file_id, + source, + &module_name, + file_id, + ); + } + } + } + } + } + // build all parent paths of the form `../module_name/mod.rs` and `../module_name.rs` + let paths = iter::successors(Some(parent.clone()), |prev| prev.parent()).filter_map(|path| { + let parent = path.parent()?; + let (name, _) = path.name_and_extension()?; + Some(([parent.join(&format!("{name}.rs"))?, path.join("mod.rs")?], name.to_owned())) + }); + let mut stack = vec![]; + if let Some(&parent_id) = paths + .inspect(|(_, name)| stack.push(name.clone())) + .find_map(|(paths, _)| paths.into_iter().find_map(|path| source_root.file_for_path(&path))) + { + stack.pop(); for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { let crate_def_map = ctx.sema.db.crate_def_map(krate); - for (_, module) in crate_def_map.modules() { - if module.origin.is_inline() { - // We don't handle inline `mod parent {}`s, they use different paths. - continue; - } - + 'outer: for (_, module) in crate_def_map.modules() { if module.origin.file_id() == Some(parent_id) { - return make_fixes(ctx.sema.db, parent_id, module_name, file_id); + if module.origin.is_inline() { + continue; + } + if stack.is_empty() { + return make_fixes( + ctx.sema.db, + parent_id, + module.definition_source(ctx.sema.db).value, + &module_name, + file_id, + ); + } else { + let mut current = module; + for s in stack.iter().rev() { + match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { + Some((_, child)) => { + current = &crate_def_map[*child]; + } + None => break 'outer, + } + } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + if let Some(parent_file_id) = parent_file_id.file_id() { + if current.origin.is_inline() { + return make_fixes( + ctx.sema.db, + parent_file_id, + source, + &module_name, + file_id, + ); + } + } + break; + } } } } @@ -92,6 +155,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { fn make_fixes( db: &RootDatabase, parent_file_id: FileId, + source: ModuleSource, new_mod_name: &str, added_file_id: FileId, ) -> Option> { @@ -102,14 +166,18 @@ fn make_fixes( let mod_decl = format!("mod {new_mod_name};"); let pub_mod_decl = format!("pub mod {new_mod_name};"); - let ast: ast::SourceFile = db.parse(parent_file_id).tree(); - let mut mod_decl_builder = TextEdit::builder(); let mut pub_mod_decl_builder = TextEdit::builder(); + let mut items = match &source { + ModuleSource::SourceFile(it) => it.items(), + ModuleSource::Module(it) => it.item_list()?.items(), + ModuleSource::BlockExpr(_) => return None, + }; + // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's // probably `#[cfg]`d out). - for item in ast.items() { + for item in items.clone() { if let ast::Item::Module(m) = item { if let Some(name) = m.name() { if m.item_list().is_none() && name.to_string() == new_mod_name { @@ -121,7 +189,7 @@ fn make_fixes( } // If there are existing `mod m;` items, append after them (after the first group of them, rather). - match ast.items().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() { + match items.clone().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() { Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); @@ -130,7 +198,7 @@ fn make_fixes( } None => { // Prepend before the first item in the file. - match ast.items().next() { + match items.next() { Some(item) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); let offset = item.syntax().text_range().start(); @@ -140,7 +208,13 @@ fn make_fixes( None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); - let offset = ast.syntax().text_range().end(); + let offset = match &source { + ModuleSource::SourceFile(it) => it.syntax().text_range().end(), + ModuleSource::Module(it) => { + it.item_list()?.r_curly_token()?.text_range().start() + } + ModuleSource::BlockExpr(_) => return None, + }; mod_decl_builder.insert(offset, format!("{mod_decl}\n")); pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); } @@ -167,7 +241,6 @@ fn make_fixes( #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix, check_fixes, check_no_fix}; #[test] @@ -330,6 +403,70 @@ $0 mod foo; //- /foo.rs +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple() { + check_fix( + r#" +//- /main.rs +mod bar; +//- /bar.rs +mod foo { + +} +//- /bar/foo/baz.rs +$0 +"#, + r#" +mod foo { + +mod baz; +} +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple_modrs() { + check_fix( + r#" +//- /main.rs +mod bar; +//- /bar.rs +mod baz { + +} +//- /bar/baz/foo/mod.rs +$0 +"#, + r#" +mod baz { + +mod foo; +} +"#, + ); + } + + #[test] + fn unlinked_file_insert_into_inline_simple_modrs_main() { + check_fix( + r#" +//- /main.rs +mod bar { + +} +//- /bar/foo/mod.rs +$0 +"#, + r#" +mod bar { + +mod foo; +} "#, ); } diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index b23c9f1966d5c..38501a8ba5a34 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -1,7 +1,7 @@ //! Abstract-ish representation of paths for VFS. use std::fmt; -use paths::{AbsPath, AbsPathBuf}; +use paths::{AbsPath, AbsPathBuf, RelPath}; /// Path in [`Vfs`]. /// @@ -84,6 +84,14 @@ impl VfsPath { } } + pub fn strip_prefix(&self, other: &VfsPath) -> Option<&RelPath> { + match (&self.0, &other.0) { + (VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.strip_prefix(rhs), + (VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.strip_prefix(rhs), + (VfsPathRepr::PathBuf(_) | VfsPathRepr::VirtualPath(_), _) => None, + } + } + /// Returns the `VfsPath` without its final component, if there is one. /// /// Returns [`None`] if the path is a root or prefix. @@ -320,6 +328,13 @@ impl VirtualPath { self.0.starts_with(&other.0) } + fn strip_prefix(&self, base: &VirtualPath) -> Option<&RelPath> { + <_ as AsRef>::as_ref(&self.0) + .strip_prefix(&base.0) + .ok() + .map(RelPath::new_unchecked) + } + /// Remove the last component of `self`. /// /// This will find the last `'/'` in `self`, and remove everything after it, From 5214a98d9c8030998a233e2308ef371535f8ae86 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 11 Jan 2023 23:04:35 +0100 Subject: [PATCH 024/501] Simplify --- .../src/handlers/unlinked_file.rs | 153 +++++++++--------- 1 file changed, 72 insertions(+), 81 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index 4bb3789d69d07..f527aa290c4cd 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -46,52 +46,51 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { let our_path = source_root.path_for_file(&file_id)?; let parent = our_path.parent()?; let (module_name, _) = our_path.name_and_extension()?; - let (parent, module_name) = if module_name == "mod" { + let (parent, module_name) = match module_name { // for mod.rs we need to actually look up one higher // and take the parent as our to be module name - let (name, _) = parent.name_and_extension()?; - (parent.parent()?, name.to_owned()) - } else { - (parent, module_name.to_owned()) + "mod" => { + let (name, _) = parent.name_and_extension()?; + (parent.parent()?, name.to_owned()) + } + _ => (parent, module_name.to_owned()), }; // check crate roots, i.e. main.rs, lib.rs, ... - 'outer: for &krate in &*ctx.sema.db.relevant_crates(file_id) { + 'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) { let crate_def_map = ctx.sema.db.crate_def_map(krate); - if let Some(root_file_id) = crate_def_map[crate_def_map.root()].origin.file_id() { - if let Some(path) = source_root.path_for_file(&root_file_id) { - let parent2 = path.parent()?; - if let Some(rel) = parent.strip_prefix(&parent2) { - let mut current = &crate_def_map[crate_def_map.root()]; - for ele in rel.as_ref().components() { - let seg = match ele { - std::path::Component::Normal(seg) => seg.to_str()?, - std::path::Component::RootDir => continue, - // shouldn't occur - _ => continue 'outer, - }; - match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { - Some((_, child)) => { - current = &crate_def_map[*child]; - } - None => continue 'outer, - } - } - let InFile { file_id: parent_file_id, value: source } = - current.definition_source(ctx.sema.db); - if let Some(parent_file_id) = parent_file_id.file_id() { - return make_fixes( - ctx.sema.db, - parent_file_id, - source, - &module_name, - file_id, - ); - } - } + + let root_module = &crate_def_map[crate_def_map.root()]; + let Some(root_file_id) = root_module.origin.file_id() else { continue }; + let Some(crate_root_path) = source_root.path_for_file(&root_file_id) else { continue }; + let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue }; + + // try resolving the relative difference of the paths as inline modules + let mut current = root_module; + for ele in rel.as_ref().components() { + let seg = match ele { + std::path::Component::Normal(seg) => seg.to_str()?, + std::path::Component::RootDir => continue, + // shouldn't occur + _ => continue 'crates, + }; + match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) { + Some((_, &child)) => current = &crate_def_map[child], + None => continue 'crates, + } + if !current.origin.is_inline() { + continue 'crates; } } + + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + let parent_file_id = parent_file_id.file_id()?; + return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); } + + // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible + // build all parent paths of the form `../module_name/mod.rs` and `../module_name.rs` let paths = iter::successors(Some(parent.clone()), |prev| prev.parent()).filter_map(|path| { let parent = path.parent()?; @@ -99,53 +98,45 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option> { Some(([parent.join(&format!("{name}.rs"))?, path.join("mod.rs")?], name.to_owned())) }); let mut stack = vec![]; - if let Some(&parent_id) = paths - .inspect(|(_, name)| stack.push(name.clone())) - .find_map(|(paths, _)| paths.into_iter().find_map(|path| source_root.file_for_path(&path))) - { - stack.pop(); - for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { - let crate_def_map = ctx.sema.db.crate_def_map(krate); - 'outer: for (_, module) in crate_def_map.modules() { - if module.origin.file_id() == Some(parent_id) { - if module.origin.is_inline() { - continue; - } - if stack.is_empty() { - return make_fixes( - ctx.sema.db, - parent_id, - module.definition_source(ctx.sema.db).value, - &module_name, - file_id, - ); - } else { - let mut current = module; - for s in stack.iter().rev() { - match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { - Some((_, child)) => { - current = &crate_def_map[*child]; - } - None => break 'outer, - } - } - let InFile { file_id: parent_file_id, value: source } = - current.definition_source(ctx.sema.db); - if let Some(parent_file_id) = parent_file_id.file_id() { - if current.origin.is_inline() { - return make_fixes( - ctx.sema.db, - parent_file_id, - source, - &module_name, - file_id, - ); - } - } - break; + let &parent_id = + paths.inspect(|(_, name)| stack.push(name.clone())).find_map(|(paths, _)| { + paths.into_iter().find_map(|path| source_root.file_for_path(&path)) + })?; + stack.pop(); + 'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { + let crate_def_map = ctx.sema.db.crate_def_map(krate); + let Some((_, module)) = + crate_def_map.modules() + .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()) + else { continue }; + + if stack.is_empty() { + return make_fixes( + ctx.sema.db, + parent_id, + module.definition_source(ctx.sema.db).value, + &module_name, + file_id, + ); + } else { + // direct parent file is missing, + // try finding a parent that has an inline tree from here on + let mut current = module; + for s in stack.iter().rev() { + match module.children.iter().find(|(name, _)| name.to_smol_str() == s) { + Some((_, child)) => { + current = &crate_def_map[*child]; } + None => continue 'crates, + } + if !current.origin.is_inline() { + continue 'crates; } } + let InFile { file_id: parent_file_id, value: source } = + current.definition_source(ctx.sema.db); + let parent_file_id = parent_file_id.file_id()?; + return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id); } } From 6f201cfc56783beacb98c9e9033ddd58567c4517 Mon Sep 17 00:00:00 2001 From: Roland Fredenhagen Date: Thu, 12 Jan 2023 02:28:13 +0100 Subject: [PATCH 025/501] Assist: desugar doc-comment --- .../src/handlers/convert_comment_block.rs | 4 +- .../src/handlers/desugar_doc_comment.rs | 313 ++++++++++++++++++ crates/ide-assists/src/handlers/raw_string.rs | 2 +- crates/ide-assists/src/lib.rs | 2 + crates/ide-assists/src/tests/generated.rs | 15 + 5 files changed, 333 insertions(+), 3 deletions(-) create mode 100644 crates/ide-assists/src/handlers/desugar_doc_comment.rs diff --git a/crates/ide-assists/src/handlers/convert_comment_block.rs b/crates/ide-assists/src/handlers/convert_comment_block.rs index 312cb65abd2a1..282820b16725e 100644 --- a/crates/ide-assists/src/handlers/convert_comment_block.rs +++ b/crates/ide-assists/src/handlers/convert_comment_block.rs @@ -107,7 +107,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> { /// The line -> block assist can be invoked from anywhere within a sequence of line comments. /// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will /// be joined. -fn relevant_line_comments(comment: &ast::Comment) -> Vec { +pub fn relevant_line_comments(comment: &ast::Comment) -> Vec { // The prefix identifies the kind of comment we're dealing with let prefix = comment.prefix(); let same_prefix = |c: &ast::Comment| c.prefix() == prefix; @@ -159,7 +159,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec { // */ // // But since such comments aren't idiomatic we're okay with this. -fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { +pub fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap(); let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix); diff --git a/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/crates/ide-assists/src/handlers/desugar_doc_comment.rs new file mode 100644 index 0000000000000..ea7044a54451c --- /dev/null +++ b/crates/ide-assists/src/handlers/desugar_doc_comment.rs @@ -0,0 +1,313 @@ +use either::Either; +use itertools::Itertools; +use syntax::{ + ast::{self, edit::IndentLevel, CommentPlacement, Whitespace}, + AstToken, TextRange, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +use super::{ + convert_comment_block::{line_comment_text, relevant_line_comments}, + raw_string::required_hashes, +}; + +// Assist: desugar_doc_comment +// +// Desugars doc-comments to the attribute form. +// +// ``` +// /// Multi-line$0 +// /// comment +// ``` +// -> +// ``` +// #[doc = r"Multi-line +// comment"] +// ``` +pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let comment = ctx.find_token_at_offset::()?; + // Only allow doc comments + let Some(placement) = comment.kind().doc else { return None; }; + + // Only allow comments which are alone on their line + if let Some(prev) = comment.syntax().prev_token() { + if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() { + return None; + } + } + + let indentation = IndentLevel::from_token(comment.syntax()).to_string(); + + let (target, comments) = match comment.kind().shape { + ast::CommentShape::Block => (comment.syntax().text_range(), Either::Left(comment)), + ast::CommentShape::Line => { + // Find all the comments we'll be desugaring + let comments = relevant_line_comments(&comment); + + // Establish the target of our edit based on the comments we found + ( + TextRange::new( + comments[0].syntax().text_range().start(), + comments.last().unwrap().syntax().text_range().end(), + ), + Either::Right(comments), + ) + } + }; + + acc.add( + AssistId("desugar_doc_comment", AssistKind::RefactorRewrite), + "Desugar doc-comment to attribute macro", + target, + |edit| { + let text = match comments { + Either::Left(comment) => { + let text = comment.text(); + text[comment.prefix().len()..(text.len() - "*/".len())] + .trim() + .lines() + .map(|l| l.strip_prefix(&indentation).unwrap_or(l)) + .join("\n") + } + Either::Right(comments) => { + comments.into_iter().map(|c| line_comment_text(IndentLevel(0), c)).join("\n") + } + }; + + let hashes = "#".repeat(required_hashes(&text)); + + let prefix = match placement { + CommentPlacement::Inner => "#!", + CommentPlacement::Outer => "#", + }; + + let output = format!(r#"{prefix}[doc = r{hashes}"{text}"{hashes}]"#); + + edit.replace(target, output) + }, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn single_line() { + check_assist( + desugar_doc_comment, + r#" +/// line$0 comment +fn main() { + foo(); +} +"#, + r#" +#[doc = r"line comment"] +fn main() { + foo(); +} +"#, + ); + check_assist( + desugar_doc_comment, + r#" +//! line$0 comment +fn main() { + foo(); +} +"#, + r#" +#![doc = r"line comment"] +fn main() { + foo(); +} +"#, + ); + } + + #[test] + fn single_line_indented() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + /// line$0 comment + struct Foo; +} +"#, + r#" +fn main() { + #[doc = r"line comment"] + struct Foo; +} +"#, + ); + } + + #[test] + fn multiline() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + /// above + /// line$0 comment + /// + /// below + struct Foo; +} +"#, + r#" +fn main() { + #[doc = r"above +line comment + +below"] + struct Foo; +} +"#, + ); + } + + #[test] + fn end_of_line() { + check_assist_not_applicable( + desugar_doc_comment, + r#" +fn main() { /// end-of-line$0 comment + struct Foo; +} +"#, + ); + } + + #[test] + fn single_line_different_kinds() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + //! different prefix + /// line$0 comment + /// below + struct Foo; +} +"#, + r#" +fn main() { + //! different prefix + #[doc = r"line comment +below"] + struct Foo; +} +"#, + ); + } + + #[test] + fn single_line_separate_chunks() { + check_assist( + desugar_doc_comment, + r#" +/// different chunk + +/// line$0 comment +/// below +"#, + r#" +/// different chunk + +#[doc = r"line comment +below"] +"#, + ); + } + + #[test] + fn block_comment() { + check_assist( + desugar_doc_comment, + r#" +/** + hi$0 there +*/ +"#, + r#" +#[doc = r"hi there"] +"#, + ); + } + + #[test] + fn inner_doc_block() { + check_assist( + desugar_doc_comment, + r#" +/*! + hi$0 there +*/ +"#, + r#" +#![doc = r"hi there"] +"#, + ); + } + + #[test] + fn block_indent() { + check_assist( + desugar_doc_comment, + r#" +fn main() { + /*! + hi$0 there + + ``` + code_sample + ``` + */ +} +"#, + r#" +fn main() { + #![doc = r"hi there + +``` + code_sample +```"] +} +"#, + ); + } + + #[test] + fn end_of_line_block() { + check_assist_not_applicable( + desugar_doc_comment, + r#" +fn main() { + foo(); /** end-of-line$0 comment */ +} +"#, + ); + } + + #[test] + fn regular_comment() { + check_assist_not_applicable(desugar_doc_comment, r#"// some$0 comment"#); + check_assist_not_applicable(desugar_doc_comment, r#"/* some$0 comment*/"#); + } + + #[test] + fn quotes_and_escapes() { + check_assist( + desugar_doc_comment, + r###"/// some$0 "\ "## comment"###, + r####"#[doc = r###"some "\ "## comment"###]"####, + ); + } +} diff --git a/crates/ide-assists/src/handlers/raw_string.rs b/crates/ide-assists/src/handlers/raw_string.rs index c9bc25b27a5ed..eff5a1f21f278 100644 --- a/crates/ide-assists/src/handlers/raw_string.rs +++ b/crates/ide-assists/src/handlers/raw_string.rs @@ -155,7 +155,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< }) } -fn required_hashes(s: &str) -> usize { +pub fn required_hashes(s: &str) -> usize { let mut res = 0usize; for idx in s.match_indices('"').map(|(i, _)| i) { let (_, sub) = s.split_at(idx + 1); diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 7813c9f9cbe80..546ef96260f2c 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -126,6 +126,7 @@ mod handlers { mod convert_to_guarded_return; mod convert_two_arm_bool_match_to_matches_macro; mod convert_while_to_loop; + mod desugar_doc_comment; mod destructure_tuple_binding; mod expand_glob_import; mod extract_expressions_from_format_string; @@ -231,6 +232,7 @@ mod handlers { convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct, convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro, convert_while_to_loop::convert_while_to_loop, + desugar_doc_comment::desugar_doc_comment, destructure_tuple_binding::destructure_tuple_binding, expand_glob_import::expand_glob_import, extract_expressions_from_format_string::extract_expressions_from_format_string, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 006ae4b303418..16a06b60de901 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -597,6 +597,21 @@ fn main() { ) } +#[test] +fn doctest_desugar_doc_comment() { + check_doc_test( + "desugar_doc_comment", + r#####" +/// Multi-line$0 +/// comment +"#####, + r#####" +#[doc = r"Multi-line +comment"] +"#####, + ) +} + #[test] fn doctest_expand_glob_import() { check_doc_test( From 03bc46f96ba5c0bdf352cf32660d8760172dd010 Mon Sep 17 00:00:00 2001 From: Roland Fredenhagen Date: Thu, 12 Jan 2023 09:54:48 +0100 Subject: [PATCH 026/501] Convert pub to pub(crate) --- crates/ide-assists/src/handlers/convert_comment_block.rs | 4 ++-- crates/ide-assists/src/handlers/raw_string.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide-assists/src/handlers/convert_comment_block.rs b/crates/ide-assists/src/handlers/convert_comment_block.rs index 282820b16725e..1acd5ee97283f 100644 --- a/crates/ide-assists/src/handlers/convert_comment_block.rs +++ b/crates/ide-assists/src/handlers/convert_comment_block.rs @@ -107,7 +107,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> { /// The line -> block assist can be invoked from anywhere within a sequence of line comments. /// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will /// be joined. -pub fn relevant_line_comments(comment: &ast::Comment) -> Vec { +pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec { // The prefix identifies the kind of comment we're dealing with let prefix = comment.prefix(); let same_prefix = |c: &ast::Comment| c.prefix() == prefix; @@ -159,7 +159,7 @@ pub fn relevant_line_comments(comment: &ast::Comment) -> Vec { // */ // // But since such comments aren't idiomatic we're okay with this. -pub fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { +pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String { let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap(); let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix); diff --git a/crates/ide-assists/src/handlers/raw_string.rs b/crates/ide-assists/src/handlers/raw_string.rs index eff5a1f21f278..b1b3bab7e5142 100644 --- a/crates/ide-assists/src/handlers/raw_string.rs +++ b/crates/ide-assists/src/handlers/raw_string.rs @@ -155,7 +155,7 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option< }) } -pub fn required_hashes(s: &str) -> usize { +pub(crate) fn required_hashes(s: &str) -> usize { let mut res = 0usize; for idx in s.match_indices('"').map(|(i, _)| i) { let (_, sub) = s.split_at(idx + 1); From 14777ce75190d21f52ae068e3d43a7d16de84ca7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 12 Jan 2023 11:24:44 +0100 Subject: [PATCH 027/501] fix indentation of unlinked_file quickfix --- .../src/handlers/unlinked_file.rs | 38 +++++++++---------- crates/ide/src/goto_declaration.rs | 1 + 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/crates/ide-diagnostics/src/handlers/unlinked_file.rs index f527aa290c4cd..3d45a75913ad8 100644 --- a/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -9,7 +9,7 @@ use ide_db::{ RootDatabase, }; use syntax::{ - ast::{self, HasModuleItem, HasName}, + ast::{self, edit::IndentLevel, HasModuleItem, HasName}, AstNode, TextRange, TextSize, }; use text_edit::TextEdit; @@ -184,30 +184,36 @@ fn make_fixes( Some(last) => { cov_mark::hit!(unlinked_file_append_to_existing_mods); let offset = last.syntax().text_range().end(); - mod_decl_builder.insert(offset, format!("\n{mod_decl}")); - pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}")); + let indent = IndentLevel::from_node(last.syntax()); + mod_decl_builder.insert(offset, format!("\n{indent}{mod_decl}")); + pub_mod_decl_builder.insert(offset, format!("\n{indent}{pub_mod_decl}")); } None => { // Prepend before the first item in the file. match items.next() { - Some(item) => { + Some(first) => { cov_mark::hit!(unlinked_file_prepend_before_first_item); - let offset = item.syntax().text_range().start(); - mod_decl_builder.insert(offset, format!("{mod_decl}\n\n")); - pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n")); + let offset = first.syntax().text_range().start(); + let indent = IndentLevel::from_node(first.syntax()); + mod_decl_builder.insert(offset, format!("{mod_decl}\n\n{indent}")); + pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n{indent}")); } None => { // No items in the file, so just append at the end. cov_mark::hit!(unlinked_file_empty_file); + let mut indent = IndentLevel::from(0); let offset = match &source { ModuleSource::SourceFile(it) => it.syntax().text_range().end(), ModuleSource::Module(it) => { + indent = IndentLevel::from_node(it.syntax()) + 1; it.item_list()?.r_curly_token()?.text_range().start() } - ModuleSource::BlockExpr(_) => return None, + ModuleSource::BlockExpr(it) => { + it.stmt_list()?.r_curly_token()?.text_range().start() + } }; - mod_decl_builder.insert(offset, format!("{mod_decl}\n")); - pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n")); + mod_decl_builder.insert(offset, format!("{indent}{mod_decl}\n")); + pub_mod_decl_builder.insert(offset, format!("{indent}{pub_mod_decl}\n")); } } } @@ -406,15 +412,13 @@ mod foo; mod bar; //- /bar.rs mod foo { - } //- /bar/foo/baz.rs $0 "#, r#" mod foo { - -mod baz; + mod baz; } "#, ); @@ -428,15 +432,13 @@ mod baz; mod bar; //- /bar.rs mod baz { - } //- /bar/baz/foo/mod.rs $0 "#, r#" mod baz { - -mod foo; + mod foo; } "#, ); @@ -448,15 +450,13 @@ mod foo; r#" //- /main.rs mod bar { - } //- /bar/foo/mod.rs $0 "#, r#" mod bar { - -mod foo; + mod foo; } "#, ); diff --git a/crates/ide/src/goto_declaration.rs b/crates/ide/src/goto_declaration.rs index c7130a2a4bb0a..e70bc2ec54172 100644 --- a/crates/ide/src/goto_declaration.rs +++ b/crates/ide/src/goto_declaration.rs @@ -17,6 +17,7 @@ use crate::{ // This is the same as `Go to Definition` with the following exceptions: // - outline modules will navigate to the `mod name;` item declaration // - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl +// - fields in patterns will navigate to the field declaration of the struct, union or variant pub(crate) fn goto_declaration( db: &RootDatabase, position: FilePosition, From 7413a9954f2d2967f790fc634509a0850ff81769 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Thu, 12 Jan 2023 10:48:10 +0000 Subject: [PATCH 028/501] interior-mutable types should be 'static' rather than 'const --- crates/hir-def/src/nameres/mod_resolution.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/nameres/mod_resolution.rs b/crates/hir-def/src/nameres/mod_resolution.rs index 4c263846d27d2..51c565fe12339 100644 --- a/crates/hir-def/src/nameres/mod_resolution.rs +++ b/crates/hir-def/src/nameres/mod_resolution.rs @@ -7,7 +7,7 @@ use syntax::SmolStr; use crate::{db::DefDatabase, HirFileId}; -const MOD_DEPTH_LIMIT: Limit = Limit::new(32); +static MOD_DEPTH_LIMIT: Limit = Limit::new(32); #[derive(Clone, Debug)] pub(super) struct ModDir { From c4b36b64aae0d5bace5d19e0fbd64d1db9114bf9 Mon Sep 17 00:00:00 2001 From: Daniel Eades Date: Thu, 12 Jan 2023 11:53:45 +0000 Subject: [PATCH 029/501] remove recursive 'Display' implementations --- .../src/abis/abi_1_58/proc_macro/mod.rs | 24 +++++++++---------- .../src/abis/abi_1_63/proc_macro/mod.rs | 24 +++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs index c5145d00e329e..a94b909941d96 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs @@ -123,8 +123,8 @@ impl FromStr for TokenStream { /// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -522,8 +522,8 @@ impl From for TokenTree { /// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -619,8 +619,8 @@ impl Group { /// into the same group (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters. impl fmt::Display for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -693,8 +693,8 @@ impl Punct { /// Prints the punctuation character as a string that should be losslessly convertible /// back into the same character. impl fmt::Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -771,8 +771,8 @@ impl Ident { /// Prints the identifier as a string that should be losslessly convertible /// back into the same identifier. impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -1014,8 +1014,8 @@ impl FromStr for Literal { /// Prints the literal as a string that should be losslessly convertible /// back into the same literal (except for possible rounding for floating point literals). impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs index 7ab1f421daf89..89bd10da5e486 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs @@ -124,8 +124,8 @@ impl FromStr for TokenStream { /// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -572,8 +572,8 @@ impl From for TokenTree { /// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters and negative numeric literals. impl fmt::Display for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -669,8 +669,8 @@ impl Group { /// into the same group (modulo spans), except for possibly `TokenTree::Group`s /// with `Delimiter::None` delimiters. impl fmt::Display for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -743,8 +743,8 @@ impl Punct { /// Prints the punctuation character as a string that should be losslessly convertible /// back into the same character. impl fmt::Display for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -821,8 +821,8 @@ impl Ident { /// Prints the identifier as a string that should be losslessly convertible /// back into the same identifier. impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } @@ -1064,8 +1064,8 @@ impl FromStr for Literal { /// Prints the literal as a string that should be losslessly convertible /// back into the same literal (except for possible rounding for floating point literals). impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.to_string()) + fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { + unimplemented!() } } From 9721505bf1cc441334c9973525a5c9de826e51bb Mon Sep 17 00:00:00 2001 From: Neel Yadav Date: Wed, 30 Nov 2022 20:14:06 -0600 Subject: [PATCH 030/501] Fix panicking Option unwraping in match arm analysis --- crates/ide-assists/src/handlers/convert_match_to_let_else.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index 5bf04a3ad3719..fbd81c80151d3 100644 --- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -87,7 +87,7 @@ fn find_arms( let mut extracting = None; let mut diverging = None; for arm in arms { - if ctx.sema.type_of_expr(&arm.expr().unwrap()).unwrap().original().is_never() { + if ctx.sema.type_of_expr(&arm.expr()?)?.original().is_never() { diverging = Some(arm); } else { extracting = Some(arm); From aa73366b0b91e1b99a0e6059e7f8dfbcb41cf929 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 13 Jan 2023 11:44:02 +0000 Subject: [PATCH 031/501] internal: explain the idea behind rust-project.json --- crates/project-model/src/project_json.rs | 44 ++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 9af0eafe9fddb..a990fb267a7da 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -4,6 +4,50 @@ //! idea here is that people who do not use Cargo, can instead teach their build //! system to generate `rust-project.json` which can be ingested by //! rust-analyzer. +//! +//! This short file is a somewhat big conceptual piece of the architecture of +//! rust-analyzer, so it's worth elaborating on the underlying ideas and +//! motivation. +//! +//! For rust-analyzer to function, it needs some information about the project. +//! Specifically, it maintains an in-memory data structure which lists all the +//! crates (compilation units) and dependencies between them. This is necessary +//! a global singleton, as we do want, eg, find usages to always search across +//! the whole project, rather than just in the "current" crate. +//! +//! Normally, we get this "crate graph" by calling `cargo metadata +//! --message-format=json` for each cargo workspace and merging results. This +//! works for your typical cargo project, but breaks down for large folks who +//! have a monorepo with an infitine amount of Rust which is build with bazel or +//! some such. +//! +//! To support this use-case, we need to make _something_ configurable. To avoid +//! [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring +//! the lowest possible layer. `ProjectJson` is essentially a hook to just set +//! that global singleton in-memory data structure. It is optimized for power, +//! not for convenience (you'd be using cargo anyway if you wanted nice things, +//! right? :) +//! +//! `rust-project.json` also isn't necessary a file. Architecturally, we support +//! any convenient way to specify this data, which today is: +//! +//! * file on disk +//! * a field in the config (ie, you can send a JSON request with the contents +//! of rust-project.json to rust-analyzer, no need to write anything to disk) +//! +//! Another possible thing we don't do today, but which would be totally valid, +//! is to add an extension point to VS Code extension to register custom +//! project. +//! +//! In general, it is assumed that if you are going to use `rust-project.json`, +//! you'd write a fair bit of custom code gluing your build system to ra through +//! this JSON format. This logic can take form of a VS Code extension, or a +//! proxy process which injects data into "configure" LSP request, or maybe just +//! a simple build system rule to generate the file. +//! +//! In particular, the logic for lazily loading parts of the monorepo as the +//! user explores them belongs to that extension (it's totally valid to change +//! rust-project.json over time via configuration request!) use std::path::PathBuf; From 87315ef5c39647d1ddab66d58153840562dab353 Mon Sep 17 00:00:00 2001 From: Alex Kladov Date: Fri, 13 Jan 2023 11:57:02 +0000 Subject: [PATCH 032/501] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Laurențiu Nicola --- crates/project-model/src/project_json.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index a990fb267a7da..319897890fc50 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -18,10 +18,11 @@ //! Normally, we get this "crate graph" by calling `cargo metadata //! --message-format=json` for each cargo workspace and merging results. This //! works for your typical cargo project, but breaks down for large folks who -//! have a monorepo with an infitine amount of Rust which is build with bazel or +//! have a monorepo with an infinite amount of Rust code which is built with bazel or //! some such. //! -//! To support this use-case, we need to make _something_ configurable. To avoid +//! To support this use case, we need to make _something_ configurable. To avoid +//! a [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring //! [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring //! the lowest possible layer. `ProjectJson` is essentially a hook to just set //! that global singleton in-memory data structure. It is optimized for power, From bd350085f66c5e392a14b11d40ac24b4fd139aca Mon Sep 17 00:00:00 2001 From: Alex Kladov Date: Fri, 13 Jan 2023 12:01:04 +0000 Subject: [PATCH 033/501] Update crates/project-model/src/project_json.rs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Laurențiu Nicola --- crates/project-model/src/project_json.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index 319897890fc50..4b2448e47f1ff 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -23,7 +23,6 @@ //! //! To support this use case, we need to make _something_ configurable. To avoid //! a [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring -//! [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring //! the lowest possible layer. `ProjectJson` is essentially a hook to just set //! that global singleton in-memory data structure. It is optimized for power, //! not for convenience (you'd be using cargo anyway if you wanted nice things, From aafb0f1f8dd4c57d258374ac554b3162c497847d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 13 Jan 2023 15:03:37 +0100 Subject: [PATCH 034/501] Use smallvec for inlay-hint parts --- Cargo.lock | 1 + crates/ide/Cargo.toml | 1 + crates/ide/src/inlay_hints.rs | 15 +++++++++++---- crates/ide/src/inlay_hints/closing_brace.rs | 5 ++++- 4 files changed, 17 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5f426d8856263..13d8d40ddd375 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -645,6 +645,7 @@ dependencies = [ "profile", "pulldown-cmark", "pulldown-cmark-to-cmark", + "smallvec", "stdx", "syntax", "test-utils", diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 73f202630f15b..397383bc3a0b2 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -20,6 +20,7 @@ pulldown-cmark-to-cmark = "10.0.4" pulldown-cmark = { version = "0.9.1", default-features = false } url = "2.3.1" dot = "0.1.4" +smallvec = "1.10.0" stdx = { path = "../stdx", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 48a7bbfecffa0..f4a9d7b653a29 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -7,6 +7,7 @@ use either::Either; use hir::{known, HasVisibility, HirDisplay, HirWrite, ModuleDef, ModuleDefId, Semantics}; use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase}; use itertools::Itertools; +use smallvec::{smallvec, SmallVec}; use stdx::never; use syntax::{ ast::{self, AstNode}, @@ -83,7 +84,7 @@ pub enum AdjustmentHintsMode { PreferPostfix, } -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum InlayKind { BindingModeHint, ChainingHint, @@ -102,9 +103,15 @@ pub enum InlayKind { #[derive(Debug)] pub struct InlayHint { + /// The text range this inlay hint applies to. pub range: TextRange, + /// The kind of this inlay hint. This is used to determine side and padding of the hint for + /// rendering purposes. pub kind: InlayKind, + /// The actual label to show in the inlay hint. pub label: InlayHintLabel, + /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like + /// hover requests to show. pub tooltip: Option, } @@ -117,7 +124,7 @@ pub enum InlayTooltip { #[derive(Default)] pub struct InlayHintLabel { - pub parts: Vec, + pub parts: SmallVec<[InlayHintLabelPart; 1]>, } impl InlayHintLabel { @@ -145,13 +152,13 @@ impl InlayHintLabel { impl From for InlayHintLabel { fn from(s: String) -> Self { - Self { parts: vec![InlayHintLabelPart { text: s, linked_location: None }] } + Self { parts: smallvec![InlayHintLabelPart { text: s, linked_location: None }] } } } impl From<&str> for InlayHintLabel { fn from(s: &str) -> Self { - Self { parts: vec![InlayHintLabelPart { text: s.into(), linked_location: None }] } + Self { parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location: None }] } } } diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index e340c64c54b55..12abefc140d52 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -5,6 +5,7 @@ //! ``` use hir::{HirDisplay, Semantics}; use ide_db::{base_db::FileRange, RootDatabase}; +use smallvec::smallvec; use syntax::{ ast::{self, AstNode, HasName}, match_ast, SyntaxKind, SyntaxNode, T, @@ -116,7 +117,9 @@ pub(super) fn hints( acc.push(InlayHint { range: closing_token.text_range(), kind: InlayKind::ClosingBraceHint, - label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] }, + label: InlayHintLabel { + parts: smallvec![InlayHintLabelPart { text: label, linked_location }], + }, tooltip: None, // provided by label part location }); From e9724e55dfcbd6142a4a746e36bdd28f685072d6 Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Sat, 14 Jan 2023 18:51:49 +0800 Subject: [PATCH 035/501] fix: check orpat in missing match --- .../src/handlers/add_missing_match_arms.rs | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 8e4ac69ae6f63..0461cc790eb30 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -269,6 +269,7 @@ fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool { (Pat::TuplePat(tpat), Pat::TuplePat(tvar)) => { tpat.fields().zip(tvar.fields()).all(|(p, v)| does_pat_match_variant(&p, &v)) } + (Pat::OrPat(opat), _) => opat.pats().any(|p| does_pat_match_variant(&p, var)), _ => utils::does_pat_match_variant(pat, var), } } @@ -525,6 +526,19 @@ fn foo(a: bool) { add_missing_match_arms, r#" fn foo(a: bool) { + match (a, a)$0 { + (true | false, true) => {} + (true, false) => {} + (false, false) => {} + } +} +"#, + ); + + check_assist_not_applicable( + add_missing_match_arms, + r#" +fn foo(a: bool) { match (a, a)$0 { (true, true) => {} (true, false) => {} @@ -565,6 +579,26 @@ fn foo(a: bool) { add_missing_match_arms, r#" fn foo(a: bool) { + match (a, a)$0 { + (true | false, true) => {} + } +} +"#, + r#" +fn foo(a: bool) { + match (a, a) { + (true | false, true) => {} + $0(true, false) => todo!(), + (false, false) => todo!(), + } +} +"#, + ); + + check_assist( + add_missing_match_arms, + r#" +fn foo(a: bool) { match (a, a)$0 { (false, true) => {} } @@ -882,6 +916,33 @@ fn main() { } "#, ); + + check_assist( + add_missing_match_arms, + r#" +enum E { A, B, C } +fn main() { + use E::*; + match (A, B, C)$0 { + (A | B , A, A | B | C) => (), + (A | B | C , B | C, A | B | C) => (), + } +} +"#, + r#" +enum E { A, B, C } +fn main() { + use E::*; + match (A, B, C) { + (A | B , A, A | B | C) => (), + (A | B | C , B | C, A | B | C) => (), + $0(C, A, A) => todo!(), + (C, A, B) => todo!(), + (C, A, C) => todo!(), + } +} +"#, + ) } #[test] From 76e216f29b44322b52c49d8c56f20763beb271b5 Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Sat, 14 Jan 2023 11:48:43 +0100 Subject: [PATCH 036/501] Use associated items of `char` instead of freestanding items in `core::char` --- compiler/rustc_middle/src/ty/print/pretty.rs | 1 - compiler/rustc_span/src/lev_distance/tests.rs | 3 +-- library/alloc/src/string.rs | 8 +++---- library/core/src/char/decode.rs | 6 ++--- library/core/src/char/methods.rs | 24 ++++--------------- library/core/src/char/mod.rs | 2 +- library/core/src/iter/range.rs | 1 - library/core/src/str/iter.rs | 10 ++++---- library/core/tests/iter/range.rs | 1 - library/proc_macro/src/bridge/rpc.rs | 1 - library/std/src/sys/windows/stdio.rs | 3 +-- library/std/src/sys_common/wtf8.rs | 6 ++--- 12 files changed, 21 insertions(+), 45 deletions(-) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index a91e8de5f21ea..f423dcb030afa 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -22,7 +22,6 @@ use rustc_target::spec::abi::Abi; use smallvec::SmallVec; use std::cell::Cell; -use std::char; use std::collections::BTreeMap; use std::fmt::{self, Write as _}; use std::iter; diff --git a/compiler/rustc_span/src/lev_distance/tests.rs b/compiler/rustc_span/src/lev_distance/tests.rs index b17d6588c9f06..ed03b22c61fd6 100644 --- a/compiler/rustc_span/src/lev_distance/tests.rs +++ b/compiler/rustc_span/src/lev_distance/tests.rs @@ -2,9 +2,8 @@ use super::*; #[test] fn test_lev_distance() { - use std::char::{from_u32, MAX}; // Test bytelength agnosticity - for c in (0..MAX as u32).filter_map(from_u32).map(|i| i.to_string()) { + for c in (0..char::MAX as u32).filter_map(char::from_u32).map(|i| i.to_string()) { assert_eq!(lev_distance(&c[..], &c[..], usize::MAX), Some(0)); } diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index 3118c7189a5ed..270e2e34396e4 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -42,8 +42,6 @@ #![stable(feature = "rust1", since = "1.0.0")] -#[cfg(not(no_global_oom_handling))] -use core::char::{decode_utf16, REPLACEMENT_CHARACTER}; use core::error::Error; use core::fmt; use core::hash; @@ -683,7 +681,7 @@ impl String { // This isn't done via collect::>() for performance reasons. // FIXME: the function can be simplified again when #48994 is closed. let mut ret = String::with_capacity(v.len()); - for c in decode_utf16(v.iter().cloned()) { + for c in char::decode_utf16(v.iter().cloned()) { if let Ok(c) = c { ret.push(c); } else { @@ -722,7 +720,9 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_utf16_lossy(v: &[u16]) -> String { - decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect() + char::decode_utf16(v.iter().cloned()) + .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER)) + .collect() } /// Decomposes a `String` into its raw components. diff --git a/library/core/src/char/decode.rs b/library/core/src/char/decode.rs index eeb0880304087..dbfe251f2bb71 100644 --- a/library/core/src/char/decode.rs +++ b/library/core/src/char/decode.rs @@ -3,8 +3,6 @@ use crate::error::Error; use crate::fmt; -use super::from_u32_unchecked; - /// An iterator that decodes UTF-16 encoded code points from an iterator of `u16`s. /// /// This `struct` is created by the [`decode_utf16`] method on [`char`]. See its @@ -49,7 +47,7 @@ impl> Iterator for DecodeUtf16 { if !u.is_utf16_surrogate() { // SAFETY: not a surrogate - Some(Ok(unsafe { from_u32_unchecked(u as u32) })) + Some(Ok(unsafe { char::from_u32_unchecked(u as u32) })) } else if u >= 0xDC00 { // a trailing surrogate Some(Err(DecodeUtf16Error { code: u })) @@ -69,7 +67,7 @@ impl> Iterator for DecodeUtf16 { // all ok, so lets decode it. let c = (((u & 0x3ff) as u32) << 10 | (u2 & 0x3ff) as u32) + 0x1_0000; // SAFETY: we checked that it's a legal unicode value - Some(Ok(unsafe { from_u32_unchecked(c) })) + Some(Ok(unsafe { char::from_u32_unchecked(c) })) } } diff --git a/library/core/src/char/methods.rs b/library/core/src/char/methods.rs index 3e7383b4cd199..9bc97ea0bff18 100644 --- a/library/core/src/char/methods.rs +++ b/library/core/src/char/methods.rs @@ -53,15 +53,13 @@ impl char { /// Basic usage: /// /// ``` - /// use std::char::decode_utf16; - /// /// // 𝄞music /// let v = [ /// 0xD834, 0xDD1E, 0x006d, 0x0075, 0x0073, 0xDD1E, 0x0069, 0x0063, 0xD834, /// ]; /// /// assert_eq!( - /// decode_utf16(v) + /// char::decode_utf16(v) /// .map(|r| r.map_err(|e| e.unpaired_surrogate())) /// .collect::>(), /// vec![ @@ -77,16 +75,14 @@ impl char { /// A lossy decoder can be obtained by replacing `Err` results with the replacement character: /// /// ``` - /// use std::char::{decode_utf16, REPLACEMENT_CHARACTER}; - /// /// // 𝄞music /// let v = [ /// 0xD834, 0xDD1E, 0x006d, 0x0075, 0x0073, 0xDD1E, 0x0069, 0x0063, 0xD834, /// ]; /// /// assert_eq!( - /// decode_utf16(v) - /// .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) + /// char::decode_utf16(v) + /// .map(|r| r.unwrap_or(char::REPLACEMENT_CHARACTER)) /// .collect::(), /// "𝄞mus�ic�" /// ); @@ -123,8 +119,6 @@ impl char { /// Basic usage: /// /// ``` - /// use std::char; - /// /// let c = char::from_u32(0x2764); /// /// assert_eq!(Some('❤'), c); @@ -133,8 +127,6 @@ impl char { /// Returning `None` when the input is not a valid `char`: /// /// ``` - /// use std::char; - /// /// let c = char::from_u32(0x110000); /// /// assert_eq!(None, c); @@ -176,8 +168,6 @@ impl char { /// Basic usage: /// /// ``` - /// use std::char; - /// /// let c = unsafe { char::from_u32_unchecked(0x2764) }; /// /// assert_eq!('❤', c); @@ -210,8 +200,6 @@ impl char { /// Basic usage: /// /// ``` - /// use std::char; - /// /// let c = char::from_digit(4, 10); /// /// assert_eq!(Some('4'), c); @@ -225,8 +213,6 @@ impl char { /// Returning `None` when the input is not a digit: /// /// ``` - /// use std::char; - /// /// let c = char::from_digit(20, 10); /// /// assert_eq!(None, c); @@ -235,8 +221,6 @@ impl char { /// Passing a large radix, causing a panic: /// /// ```should_panic - /// use std::char; - /// /// // this panics /// let _c = char::from_digit(1, 37); /// ``` @@ -1786,7 +1770,7 @@ pub fn encode_utf16_raw(mut code: u32, dst: &mut [u16]) -> &mut [u16] { } else { panic!( "encode_utf16: need {} units to encode U+{:X}, but the buffer has {}", - from_u32_unchecked(code).len_utf16(), + char::from_u32_unchecked(code).len_utf16(), code, dst.len(), ) diff --git a/library/core/src/char/mod.rs b/library/core/src/char/mod.rs index af98059cf42c3..8ec78e88733cf 100644 --- a/library/core/src/char/mod.rs +++ b/library/core/src/char/mod.rs @@ -189,7 +189,7 @@ impl Iterator for EscapeUnicode { } EscapeUnicodeState::Value => { let hex_digit = ((self.c as u32) >> (self.hex_digit_idx * 4)) & 0xf; - let c = from_digit(hex_digit, 16).unwrap(); + let c = char::from_digit(hex_digit, 16).unwrap(); if self.hex_digit_idx == 0 { self.state = EscapeUnicodeState::RightBrace; } else { diff --git a/library/core/src/iter/range.rs b/library/core/src/iter/range.rs index ac7b389b15b4d..1ad33cc973329 100644 --- a/library/core/src/iter/range.rs +++ b/library/core/src/iter/range.rs @@ -1,4 +1,3 @@ -use crate::char; use crate::convert::TryFrom; use crate::mem; use crate::ops::{self, Try}; diff --git a/library/core/src/str/iter.rs b/library/core/src/str/iter.rs index d969475aa484f..95c682f42d0c9 100644 --- a/library/core/src/str/iter.rs +++ b/library/core/src/str/iter.rs @@ -1,6 +1,6 @@ //! Iterators for `str` methods. -use crate::char; +use crate::char as char_mod; use crate::fmt::{self, Write}; use crate::iter::{Chain, FlatMap, Flatten}; use crate::iter::{Copied, Filter, FusedIterator, Map, TrustedLen}; @@ -1455,8 +1455,8 @@ impl FusedIterator for EncodeUtf16<'_> {} #[derive(Clone, Debug)] pub struct EscapeDebug<'a> { pub(super) inner: Chain< - Flatten>, - FlatMap, char::EscapeDebug, CharEscapeDebugContinue>, + Flatten>, + FlatMap, char_mod::EscapeDebug, CharEscapeDebugContinue>, >, } @@ -1464,14 +1464,14 @@ pub struct EscapeDebug<'a> { #[stable(feature = "str_escape", since = "1.34.0")] #[derive(Clone, Debug)] pub struct EscapeDefault<'a> { - pub(super) inner: FlatMap, char::EscapeDefault, CharEscapeDefault>, + pub(super) inner: FlatMap, char_mod::EscapeDefault, CharEscapeDefault>, } /// The return type of [`str::escape_unicode`]. #[stable(feature = "str_escape", since = "1.34.0")] #[derive(Clone, Debug)] pub struct EscapeUnicode<'a> { - pub(super) inner: FlatMap, char::EscapeUnicode, CharEscapeUnicode>, + pub(super) inner: FlatMap, char_mod::EscapeUnicode, CharEscapeUnicode>, } macro_rules! escape_types_impls { diff --git a/library/core/tests/iter/range.rs b/library/core/tests/iter/range.rs index 84498a8eae52e..0f91ffe2dfc94 100644 --- a/library/core/tests/iter/range.rs +++ b/library/core/tests/iter/range.rs @@ -26,7 +26,6 @@ fn test_range() { #[test] fn test_char_range() { - use std::char; // Miri is too slow let from = if cfg!(miri) { char::from_u32(0xD800 - 10).unwrap() } else { '\0' }; let to = if cfg!(miri) { char::from_u32(0xDFFF + 10).unwrap() } else { char::MAX }; diff --git a/library/proc_macro/src/bridge/rpc.rs b/library/proc_macro/src/bridge/rpc.rs index e9d7a46c06f6d..5b1bfb30983b2 100644 --- a/library/proc_macro/src/bridge/rpc.rs +++ b/library/proc_macro/src/bridge/rpc.rs @@ -1,7 +1,6 @@ //! Serialization for client-server communication. use std::any::Any; -use std::char; use std::io::Write; use std::num::NonZeroU32; use std::str; diff --git a/library/std/src/sys/windows/stdio.rs b/library/std/src/sys/windows/stdio.rs index 70c9b14a08fa7..c2cd48470bd9a 100644 --- a/library/std/src/sys/windows/stdio.rs +++ b/library/std/src/sys/windows/stdio.rs @@ -1,6 +1,5 @@ #![unstable(issue = "none", feature = "windows_stdio")] -use crate::char::decode_utf16; use crate::cmp; use crate::io; use crate::mem::MaybeUninit; @@ -369,7 +368,7 @@ fn read_u16s(handle: c::HANDLE, buf: &mut [MaybeUninit]) -> io::Result io::Result { let mut written = 0; - for chr in decode_utf16(utf16.iter().cloned()) { + for chr in char::decode_utf16(utf16.iter().cloned()) { match chr { Ok(chr) => { chr.encode_utf8(&mut utf8[written..]); diff --git a/library/std/src/sys_common/wtf8.rs b/library/std/src/sys_common/wtf8.rs index dd53767d4521f..e202d17e1c23b 100644 --- a/library/std/src/sys_common/wtf8.rs +++ b/library/std/src/sys_common/wtf8.rs @@ -18,10 +18,10 @@ #[cfg(test)] mod tests; +use core::char::{encode_utf16_raw, encode_utf8_raw}; use core::str::next_code_point; use crate::borrow::Cow; -use crate::char; use crate::collections::TryReserveError; use crate::fmt; use crate::hash::{Hash, Hasher}; @@ -235,7 +235,7 @@ impl Wtf8Buf { /// This does **not** include the WTF-8 concatenation check or `is_known_utf8` check. fn push_code_point_unchecked(&mut self, code_point: CodePoint) { let mut bytes = [0; 4]; - let bytes = char::encode_utf8_raw(code_point.value, &mut bytes); + let bytes = encode_utf8_raw(code_point.value, &mut bytes); self.bytes.extend_from_slice(bytes) } @@ -939,7 +939,7 @@ impl<'a> Iterator for EncodeWide<'a> { let mut buf = [0; 2]; self.code_points.next().map(|code_point| { - let n = char::encode_utf16_raw(code_point.value, &mut buf).len(); + let n = encode_utf16_raw(code_point.value, &mut buf).len(); if n == 2 { self.extra = buf[1]; } From 60075a6625bb5d4064ef02123b84bd81e574c8e6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 14 Jan 2023 12:19:29 +0100 Subject: [PATCH 037/501] Remove hover inlay tooltips, replace them with location links --- crates/ide/src/inlay_hints.rs | 88 ++++++---- crates/ide/src/inlay_hints/adjustment.rs | 43 +---- crates/ide/src/inlay_hints/bind_pat.rs | 28 +--- crates/ide/src/inlay_hints/binding_mode.rs | 18 +- crates/ide/src/inlay_hints/chaining.rs | 158 +++--------------- crates/ide/src/inlay_hints/closing_brace.rs | 10 +- crates/ide/src/inlay_hints/closure_ret.rs | 5 +- crates/ide/src/inlay_hints/discriminant.rs | 23 ++- crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 5 +- crates/ide/src/inlay_hints/implicit_static.rs | 3 +- crates/ide/src/inlay_hints/param_name.rs | 9 +- crates/ide/src/lib.rs | 3 +- crates/rust-analyzer/src/handlers.rs | 50 +----- crates/rust-analyzer/src/lsp_ext.rs | 7 +- crates/rust-analyzer/src/to_proto.rs | 117 ++++++------- 15 files changed, 187 insertions(+), 380 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index f4a9d7b653a29..108696673ef09 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -11,7 +11,7 @@ use smallvec::{smallvec, SmallVec}; use stdx::never; use syntax::{ ast::{self, AstNode}, - match_ast, NodeOrToken, SyntaxNode, TextRange, TextSize, + match_ast, NodeOrToken, SyntaxNode, TextRange, }; use crate::{navigation_target::TryToNav, FileId}; @@ -110,16 +110,21 @@ pub struct InlayHint { pub kind: InlayKind, /// The actual label to show in the inlay hint. pub label: InlayHintLabel, - /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like - /// hover requests to show. - pub tooltip: Option, +} + +impl InlayHint { + fn closing_paren(range: TextRange) -> InlayHint { + InlayHint { range, kind: InlayKind::ClosingParenthesis, label: InlayHintLabel::from(")") } + } + fn opening_paren(range: TextRange) -> InlayHint { + InlayHint { range, kind: InlayKind::OpeningParenthesis, label: InlayHintLabel::from("(") } + } } #[derive(Debug)] pub enum InlayTooltip { String(String), - HoverRanged(FileId, TextRange), - HoverOffset(FileId, TextSize), + Markdown(String), } #[derive(Default)] @@ -128,37 +133,59 @@ pub struct InlayHintLabel { } impl InlayHintLabel { - pub fn as_simple_str(&self) -> Option<&str> { - match &*self.parts { - [part] => part.as_simple_str(), - _ => None, + pub fn simple( + s: impl Into, + tooltip: Option, + linked_location: Option, + ) -> InlayHintLabel { + InlayHintLabel { + parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location, tooltip }], } } pub fn prepend_str(&mut self, s: &str) { match &mut *self.parts { - [part, ..] if part.as_simple_str().is_some() => part.text = format!("{s}{}", part.text), - _ => self.parts.insert(0, InlayHintLabelPart { text: s.into(), linked_location: None }), + [InlayHintLabelPart { text, linked_location: None, tooltip: None }, ..] => { + text.insert_str(0, s) + } + _ => self.parts.insert( + 0, + InlayHintLabelPart { text: s.into(), linked_location: None, tooltip: None }, + ), } } pub fn append_str(&mut self, s: &str) { match &mut *self.parts { - [.., part] if part.as_simple_str().is_some() => part.text.push_str(s), - _ => self.parts.push(InlayHintLabelPart { text: s.into(), linked_location: None }), + [.., InlayHintLabelPart { text, linked_location: None, tooltip: None }] => { + text.push_str(s) + } + _ => self.parts.push(InlayHintLabelPart { + text: s.into(), + linked_location: None, + tooltip: None, + }), } } } impl From for InlayHintLabel { fn from(s: String) -> Self { - Self { parts: smallvec![InlayHintLabelPart { text: s, linked_location: None }] } + Self { + parts: smallvec![InlayHintLabelPart { text: s, linked_location: None, tooltip: None }], + } } } impl From<&str> for InlayHintLabel { fn from(s: &str) -> Self { - Self { parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location: None }] } + Self { + parts: smallvec![InlayHintLabelPart { + text: s.into(), + linked_location: None, + tooltip: None + }], + } } } @@ -182,25 +209,25 @@ pub struct InlayHintLabelPart { /// When setting this, no tooltip must be set on the containing hint, or VS Code will display /// them both. pub linked_location: Option, -} - -impl InlayHintLabelPart { - pub fn as_simple_str(&self) -> Option<&str> { - match self { - Self { text, linked_location: None } => Some(text), - _ => None, - } - } + /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like + /// hover requests to show. + pub tooltip: Option, } impl fmt::Debug for InlayHintLabelPart { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.as_simple_str() { - Some(string) => string.fmt(f), - None => f + match self { + Self { text, linked_location: None, tooltip: None } => text.fmt(f), + Self { text, linked_location, tooltip } => f .debug_struct("InlayHintLabelPart") - .field("text", &self.text) - .field("linked_location", &self.linked_location) + .field("text", text) + .field("linked_location", linked_location) + .field( + "tooltip", + &tooltip.as_ref().map_or("", |it| match it { + InlayTooltip::String(it) | InlayTooltip::Markdown(it) => it, + }), + ) .finish(), } } @@ -249,6 +276,7 @@ impl InlayHintLabelBuilder<'_> { self.result.parts.push(InlayHintLabelPart { text: take(&mut self.last_part), linked_location: self.location.take(), + tooltip: None, }); } diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index bdd7c05e008c1..9c4caec605fe7 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -44,27 +44,12 @@ pub(super) fn hints( mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode); if needs_outer_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(expr.syntax().text_range())); } if postfix && needs_inner_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(expr.syntax().text_range())); + acc.push(InlayHint::closing_paren(expr.syntax().text_range())); } let (mut tmp0, mut tmp1); @@ -118,30 +103,14 @@ pub(super) fn hints( InlayKind::AdjustmentHint }, label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, - tooltip: None, }); } if !postfix && needs_inner_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(expr.syntax().text_range())); + acc.push(InlayHint::closing_paren(expr.syntax().text_range())); } if needs_outer_parens { - acc.push(InlayHint { - range: expr.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::closing_paren(expr.syntax().text_range())); } Some(()) } diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index adec19c765a14..1a4bd353e7736 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -12,9 +12,7 @@ use syntax::{ match_ast, }; -use crate::{ - inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, -}; +use crate::{inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind}; use super::label_of_ty; @@ -22,7 +20,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + _file_id: FileId, pat: &ast::IdentPat, ) -> Option<()> { if !config.type_hints { @@ -52,10 +50,6 @@ pub(super) fn hints( }, kind: InlayKind::TypeHint, label, - tooltip: pat - .name() - .map(|it| it.syntax().text_range()) - .map(|it| InlayTooltip::HoverRanged(file_id, it)), }); Some(()) @@ -326,14 +320,6 @@ fn main(a: SliceIter<'_, Container>) { label: [ "impl Iterator>", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..554, - ), - ), }, InlayHint { range: 484..485, @@ -350,6 +336,7 @@ fn main(a: SliceIter<'_, Container>) { range: 289..298, }, ), + tooltip: "", }, "<", InlayHintLabelPart { @@ -362,17 +349,10 @@ fn main(a: SliceIter<'_, Container>) { range: 238..247, }, ), + tooltip: "", }, ">", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 484..485, - ), - ), }, ] "#]], diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs index a0166d0048ab0..d0e42bc9141b0 100644 --- a/crates/ide/src/inlay_hints/binding_mode.rs +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -7,7 +7,7 @@ use ide_db::RootDatabase; use syntax::ast::{self, AstNode}; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{InlayHint, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -44,7 +44,6 @@ pub(super) fn hints( range, kind: InlayKind::BindingModeHint, label: r.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), }); }); match pat { @@ -59,22 +58,11 @@ pub(super) fn hints( range: pat.syntax().text_range(), kind: InlayKind::BindingModeHint, label: bm.to_string().into(), - tooltip: Some(InlayTooltip::String("Inferred binding mode".into())), }); } ast::Pat::OrPat(pat) if !pattern_adjustments.is_empty() && outer_paren_pat.is_none() => { - acc.push(InlayHint { - range: pat.syntax().text_range(), - kind: InlayKind::OpeningParenthesis, - label: "(".into(), - tooltip: None, - }); - acc.push(InlayHint { - range: pat.syntax().text_range(), - kind: InlayKind::ClosingParenthesis, - label: ")".into(), - tooltip: None, - }); + acc.push(InlayHint::opening_paren(pat.syntax().text_range())); + acc.push(InlayHint::closing_paren(pat.syntax().text_range())); } _ => (), } diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index 8810d5d34dbd9..c00fb83a88e19 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -5,7 +5,7 @@ use syntax::{ Direction, NodeOrToken, SyntaxKind, T, }; -use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind}; use super::label_of_ty; @@ -13,7 +13,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + _file_id: FileId, expr: &ast::Expr, ) -> Option<()> { if !config.chaining_hints { @@ -61,7 +61,6 @@ pub(super) fn hints( range: expr.syntax().text_range(), kind: InlayKind::ChainingHint, label: label_of_ty(famous_defs, config, ty)?, - tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())), }); } } @@ -124,17 +123,10 @@ fn main() { range: 63..64, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..172, - ), - ), }, InlayHint { range: 147..154, @@ -151,17 +143,10 @@ fn main() { range: 7..8, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 147..154, - ), - ), }, ] "#]], @@ -214,14 +199,6 @@ fn main() { label: [ "C", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..190, - ), - ), }, InlayHint { range: 143..179, @@ -229,14 +206,6 @@ fn main() { label: [ "B", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..179, - ), - ), }, ] "#]], @@ -282,17 +251,10 @@ fn main() { range: 51..52, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..190, - ), - ), }, InlayHint { range: 143..179, @@ -309,17 +271,10 @@ fn main() { range: 29..30, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 143..179, - ), - ), }, ] "#]], @@ -366,6 +321,7 @@ fn main() { range: 23..24, }, ), + tooltip: "", }, "<", InlayHintLabelPart { @@ -378,17 +334,10 @@ fn main() { range: 55..56, }, ), + tooltip: "", }, ">", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..283, - ), - ), }, InlayHint { range: 246..265, @@ -405,6 +354,7 @@ fn main() { range: 7..8, }, ), + tooltip: "", }, "<", InlayHintLabelPart { @@ -417,17 +367,10 @@ fn main() { range: 55..56, }, ), + tooltip: "", }, ">", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 246..265, - ), - ), }, ] "#]], @@ -467,14 +410,6 @@ fn main() { label: [ "impl Iterator", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..241, - ), - ), }, InlayHint { range: 174..224, @@ -482,14 +417,6 @@ fn main() { label: [ "impl Iterator", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..224, - ), - ), }, InlayHint { range: 174..206, @@ -497,14 +424,6 @@ fn main() { label: [ "impl Iterator", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..206, - ), - ), }, InlayHint { range: 174..189, @@ -521,17 +440,10 @@ fn main() { range: 24..30, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 174..189, - ), - ), }, ] "#]], @@ -577,17 +489,10 @@ fn main() { range: 7..13, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 124..130, - ), - ), }, InlayHint { range: 145..185, @@ -604,17 +509,10 @@ fn main() { range: 7..13, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..185, - ), - ), }, InlayHint { range: 145..168, @@ -631,32 +529,28 @@ fn main() { range: 7..13, }, ), + tooltip: "", }, "", ], - tooltip: Some( - HoverRanged( - FileId( - 0, - ), - 145..168, - ), - ), }, InlayHint { range: 222..228, kind: ParameterHint, label: [ - "self", - ], - tooltip: Some( - HoverOffset( - FileId( - 0, + InlayHintLabelPart { + text: "self", + linked_location: Some( + FileRange { + file_id: FileId( + 0, + ), + range: 42..46, + }, ), - 42, - ), - ), + tooltip: "", + }, + ], }, ] "#]], diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index 12abefc140d52..684d74282bd95 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -5,15 +5,12 @@ //! ``` use hir::{HirDisplay, Semantics}; use ide_db::{base_db::FileRange, RootDatabase}; -use smallvec::smallvec; use syntax::{ ast::{self, AstNode, HasName}, match_ast, SyntaxKind, SyntaxNode, T, }; -use crate::{ - inlay_hints::InlayHintLabelPart, FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, -}; +use crate::{FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -117,10 +114,7 @@ pub(super) fn hints( acc.push(InlayHint { range: closing_token.text_range(), kind: InlayKind::ClosingBraceHint, - label: InlayHintLabel { - parts: smallvec![InlayHintLabelPart { text: label, linked_location }], - }, - tooltip: None, // provided by label part location + label: InlayHintLabel::simple(label, None, linked_location), }); None diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs index d9929beaac0c2..b1aa726ba5e2a 100644 --- a/crates/ide/src/inlay_hints/closure_ret.rs +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -4,7 +4,7 @@ use syntax::ast::{self, AstNode}; use crate::{ inlay_hints::closure_has_block_body, ClosureReturnTypeHints, InlayHint, InlayHintsConfig, - InlayKind, InlayTooltip, + InlayKind, }; use super::label_of_ty; @@ -13,7 +13,7 @@ pub(super) fn hints( acc: &mut Vec, famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, - file_id: FileId, + _file_id: FileId, closure: ast::ClosureExpr, ) -> Option<()> { if config.closure_return_type_hints == ClosureReturnTypeHints::Never { @@ -43,7 +43,6 @@ pub(super) fn hints( range: param_list.syntax().text_range(), kind: InlayKind::ClosureReturnTypeHint, label: label_of_ty(famous_defs, config, ty)?, - tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())), }); Some(()) } diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index f32c4bdf2883c..6afc4b6330700 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -7,7 +7,9 @@ use ide_db::{base_db::FileId, famous_defs::FamousDefs}; use syntax::ast::{self, AstNode, HasName}; -use crate::{DiscriminantHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{ + DiscriminantHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, +}; pub(super) fn hints( acc: &mut Vec, @@ -42,14 +44,17 @@ pub(super) fn hints( None => name.syntax().text_range(), }, kind: InlayKind::DiscriminantHint, - label: match &d { - Ok(v) => format!("{}", v).into(), - Err(_) => "?".into(), - }, - tooltip: Some(InlayTooltip::String(match &d { - Ok(_) => "enum variant discriminant".into(), - Err(e) => format!("{e:?}").into(), - })), + label: InlayHintLabel::simple( + match &d { + Ok(v) => format!("{}", v), + Err(_) => "?".into(), + }, + Some(InlayTooltip::String(match &d { + Ok(_) => "enum variant discriminant".into(), + Err(e) => format!("{e:?}").into(), + })), + None, + ), }); Some(()) diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs index 2aa5e3dc734fc..ce6f2e486c0be 100644 --- a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -10,7 +10,7 @@ use syntax::{ SyntaxToken, }; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; +use crate::{InlayHint, InlayHintsConfig, InlayKind, LifetimeElisionHints}; pub(super) fn hints( acc: &mut Vec, @@ -25,7 +25,6 @@ pub(super) fn hints( range: t.text_range(), kind: InlayKind::LifetimeHint, label: label.into(), - tooltip: Some(InlayTooltip::String("Elided lifetime".into())), }; let param_list = func.param_list()?; @@ -190,14 +189,12 @@ pub(super) fn hints( if is_empty { "" } else { ", " } ) .into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), }); } (None, allocated_lifetimes) => acc.push(InlayHint { range: func.name()?.syntax().text_range(), kind: InlayKind::GenericParamListHint, label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), - tooltip: Some(InlayTooltip::String("Elided lifetimes".into())), }), } Some(()) diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs index 588a0e3b6a4b6..f65b1d09a4067 100644 --- a/crates/ide/src/inlay_hints/implicit_static.rs +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -8,7 +8,7 @@ use syntax::{ SyntaxKind, }; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints}; +use crate::{InlayHint, InlayHintsConfig, InlayKind, LifetimeElisionHints}; pub(super) fn hints( acc: &mut Vec, @@ -34,7 +34,6 @@ pub(super) fn hints( range: t.text_range(), kind: InlayKind::LifetimeHint, label: "'static".to_owned().into(), - tooltip: Some(InlayTooltip::String("Elided static lifetime".into())), }); } } diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index ecee67632e35e..dbbc35d5893c5 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -10,7 +10,7 @@ use ide_db::{base_db::FileRange, RootDatabase}; use stdx::to_lower_snake_case; use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp}; -use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip}; +use crate::{InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind}; pub(super) fn hints( acc: &mut Vec, @@ -43,12 +43,12 @@ pub(super) fn hints( !should_hide_param_name_hint(sema, &callable, param_name, arg) }) .map(|(param, param_name, _, FileRange { range, .. })| { - let mut tooltip = None; + let mut linked_location = None; if let Some(name) = param { if let hir::CallableKind::Function(f) = callable.kind() { // assert the file is cached so we can map out of macros if let Some(_) = sema.source(f) { - tooltip = sema.original_range_opt(name.syntax()); + linked_location = sema.original_range_opt(name.syntax()); } } } @@ -56,8 +56,7 @@ pub(super) fn hints( InlayHint { range, kind: InlayKind::ParameterHint, - label: param_name.into(), - tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())), + label: InlayHintLabel::simple(param_name, None, linked_location), } }); diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 239456cb28167..8424d82aa1830 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -82,7 +82,8 @@ pub use crate::{ hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult}, inlay_hints::{ AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayHint, - InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints, + InlayHintLabel, InlayHintLabelPart, InlayHintsConfig, InlayKind, InlayTooltip, + LifetimeElisionHints, }, join_lines::JoinLinesConfig, markup::Markup, diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 59bdd3061272c..033ef75cca0de 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -29,7 +29,6 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; -use tracing::error; use vfs::AbsPathBuf; use crate::{ @@ -1360,55 +1359,10 @@ pub(crate) fn handle_inlay_hints( } pub(crate) fn handle_inlay_hints_resolve( - snap: GlobalStateSnapshot, - mut hint: InlayHint, + _snap: GlobalStateSnapshot, + hint: InlayHint, ) -> Result { let _p = profile::span("handle_inlay_hints_resolve"); - let data = match hint.data.take() { - Some(it) => it, - None => return Ok(hint), - }; - - let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?; - - match snap.url_file_version(&resolve_data.text_document.uri) { - Some(version) if version == resolve_data.text_document.version => {} - Some(version) => { - error!( - "attempted inlayHints/resolve of '{}' at version {} while server version is {}", - resolve_data.text_document.uri, resolve_data.text_document.version, version, - ); - return Ok(hint); - } - None => { - error!( - "attempted inlayHints/resolve of unknown file '{}' at version {}", - resolve_data.text_document.uri, resolve_data.text_document.version, - ); - return Ok(hint); - } - } - let file_range = from_proto::file_range_uri( - &snap, - &resolve_data.text_document.uri, - match resolve_data.position { - PositionOrRange::Position(pos) => Range::new(pos, pos), - PositionOrRange::Range(range) => range, - }, - )?; - let info = match snap.analysis.hover(&snap.config.hover(), file_range)? { - None => return Ok(hint), - Some(info) => info, - }; - - let markup_kind = - snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind); - - // FIXME: hover actions? - hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content( - info.info.markup, - markup_kind, - ))); Ok(hint) } diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index 65620b4209b40..b117acd1b0f7c 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -3,11 +3,11 @@ use std::{collections::HashMap, path::PathBuf}; use lsp_types::request::Request; +use lsp_types::PositionEncodingKind; use lsp_types::{ notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams, PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams, }; -use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier}; use serde::{Deserialize, Serialize}; pub enum AnalyzerStatus {} @@ -568,10 +568,7 @@ pub struct CompletionResolveData { } #[derive(Debug, Serialize, Deserialize)] -pub struct InlayHintResolveData { - pub text_document: VersionedTextDocumentIdentifier, - pub position: PositionOrRange, -} +pub struct InlayHintResolveData {} #[derive(Debug, Serialize, Deserialize)] pub struct CompletionImport { diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index e736b2ff9a3be..d9cbb2bc206a2 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -9,9 +9,9 @@ use ide::{ Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint, - InlayHintLabel, InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, - Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, - TextSize, + InlayHintLabel, InlayHintLabelPart, InlayKind, Markup, NavigationTarget, ReferenceCategory, + RenameError, Runnable, Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, + TextEdit, TextRange, TextSize, }; use itertools::Itertools; use serde_json::to_value; @@ -438,6 +438,8 @@ pub(crate) fn inlay_hint( _ => {} } + let (label, tooltip) = inlay_hint_label(snap, inlay_hint.label)?; + Ok(lsp_types::InlayHint { position: match inlay_hint.kind { // before annotated thing @@ -481,7 +483,9 @@ pub(crate) fn inlay_hint( | InlayKind::TypeHint | InlayKind::DiscriminantHint | InlayKind::ClosingBraceHint => false, - InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"), + InlayKind::BindingModeHint => { + matches!(&label, lsp_types::InlayHintLabel::String(s) if s != "&") + } InlayKind::ParameterHint | InlayKind::LifetimeHint => true, }), kind: match inlay_hint.kind { @@ -500,68 +504,67 @@ pub(crate) fn inlay_hint( | InlayKind::ClosingBraceHint => None, }, text_edits: None, - data: (|| match inlay_hint.tooltip { - Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => { - let uri = url(snap, file_id); - let line_index = snap.file_line_index(file_id).ok()?; - - let text_document = lsp_types::VersionedTextDocumentIdentifier { - version: snap.url_file_version(&uri)?, - uri, - }; - to_value(lsp_ext::InlayHintResolveData { - text_document, - position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)), - }) - .ok() - } - Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => { - let uri = url(snap, file_id); - let text_document = lsp_types::VersionedTextDocumentIdentifier { - version: snap.url_file_version(&uri)?, - uri, - }; - let line_index = snap.file_line_index(file_id).ok()?; - to_value(lsp_ext::InlayHintResolveData { - text_document, - position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)), - }) - .ok() - } - _ => None, - })(), - tooltip: Some(match inlay_hint.tooltip { - Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s), - _ => lsp_types::InlayHintTooltip::String(inlay_hint.label.to_string()), - }), - label: inlay_hint_label(snap, inlay_hint.label)?, + data: None, + tooltip, + label, }) } fn inlay_hint_label( snap: &GlobalStateSnapshot, - label: InlayHintLabel, -) -> Cancellable { - Ok(match label.as_simple_str() { - Some(s) => lsp_types::InlayHintLabel::String(s.into()), - None => lsp_types::InlayHintLabel::LabelParts( - label + mut label: InlayHintLabel, +) -> Cancellable<(lsp_types::InlayHintLabel, Option)> { + let res = match &*label.parts { + [InlayHintLabelPart { linked_location: None, .. }] => { + let InlayHintLabelPart { text, tooltip, .. } = label.parts.pop().unwrap(); + ( + lsp_types::InlayHintLabel::String(text), + match tooltip { + Some(ide::InlayTooltip::String(s)) => { + Some(lsp_types::InlayHintTooltip::String(s)) + } + Some(ide::InlayTooltip::Markdown(s)) => { + Some(lsp_types::InlayHintTooltip::MarkupContent(lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: s, + })) + } + None => None, + }, + ) + } + _ => { + let parts = label .parts .into_iter() .map(|part| { - Ok(lsp_types::InlayHintLabelPart { - value: part.text, - tooltip: None, - location: part - .linked_location - .map(|range| location(snap, range)) - .transpose()?, - command: None, - }) + part.linked_location.map(|range| location(snap, range)).transpose().map( + |location| lsp_types::InlayHintLabelPart { + value: part.text, + tooltip: match part.tooltip { + Some(ide::InlayTooltip::String(s)) => { + Some(lsp_types::InlayHintLabelPartTooltip::String(s)) + } + Some(ide::InlayTooltip::Markdown(s)) => { + Some(lsp_types::InlayHintLabelPartTooltip::MarkupContent( + lsp_types::MarkupContent { + kind: lsp_types::MarkupKind::Markdown, + value: s, + }, + )) + } + None => None, + }, + location, + command: None, + }, + ) }) - .collect::>>()?, - ), - }) + .collect::>()?; + (lsp_types::InlayHintLabel::LabelParts(parts), None) + } + }; + Ok(res) } static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1); From f2444b2a402f8d551152cb482686c7839574ac1d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 14 Jan 2023 12:20:12 +0100 Subject: [PATCH 038/501] Remove unnecessary Hint suffix on InlayKind variants --- crates/ide/src/inlay_hints.rs | 22 ++--- crates/ide/src/inlay_hints/adjustment.rs | 6 +- crates/ide/src/inlay_hints/bind_pat.rs | 6 +- crates/ide/src/inlay_hints/binding_mode.rs | 8 +- crates/ide/src/inlay_hints/chaining.rs | 34 +++---- crates/ide/src/inlay_hints/closing_brace.rs | 2 +- crates/ide/src/inlay_hints/closure_ret.rs | 2 +- crates/ide/src/inlay_hints/discriminant.rs | 2 +- crates/ide/src/inlay_hints/fn_lifetime_fn.rs | 6 +- crates/ide/src/inlay_hints/implicit_static.rs | 2 +- crates/ide/src/inlay_hints/param_name.rs | 2 +- crates/rust-analyzer/src/to_proto.rs | 88 +++++++++---------- docs/dev/lsp-extensions.md | 2 +- 13 files changed, 87 insertions(+), 95 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 108696673ef09..861bf1c66cbe9 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -86,17 +86,17 @@ pub enum AdjustmentHintsMode { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum InlayKind { - BindingModeHint, - ChainingHint, - ClosingBraceHint, - ClosureReturnTypeHint, - GenericParamListHint, - AdjustmentHint, - AdjustmentHintPostfix, - LifetimeHint, - ParameterHint, - TypeHint, - DiscriminantHint, + BindingMode, + Chaining, + ClosingBrace, + ClosureReturnType, + GenericParamList, + Adjustment, + AdjustmentPostfix, + Lifetime, + Parameter, + Type, + Discriminant, OpeningParenthesis, ClosingParenthesis, } diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 9c4caec605fe7..581813f3b7700 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -97,11 +97,7 @@ pub(super) fn hints( }; acc.push(InlayHint { range: expr.syntax().text_range(), - kind: if postfix { - InlayKind::AdjustmentHintPostfix - } else { - InlayKind::AdjustmentHint - }, + kind: if postfix { InlayKind::AdjustmentPostfix } else { InlayKind::Adjustment }, label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() }, }); } diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 1a4bd353e7736..5227c651ff454 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -48,7 +48,7 @@ pub(super) fn hints( Some(name) => name.syntax().text_range(), None => pat.syntax().text_range(), }, - kind: InlayKind::TypeHint, + kind: InlayKind::Type, label, }); @@ -316,14 +316,14 @@ fn main(a: SliceIter<'_, Container>) { [ InlayHint { range: 484..554, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator>", ], }, InlayHint { range: 484..485, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { diff --git a/crates/ide/src/inlay_hints/binding_mode.rs b/crates/ide/src/inlay_hints/binding_mode.rs index d0e42bc9141b0..11b9cd269bfa8 100644 --- a/crates/ide/src/inlay_hints/binding_mode.rs +++ b/crates/ide/src/inlay_hints/binding_mode.rs @@ -40,11 +40,7 @@ pub(super) fn hints( (true, false) => "&", _ => return, }; - acc.push(InlayHint { - range, - kind: InlayKind::BindingModeHint, - label: r.to_string().into(), - }); + acc.push(InlayHint { range, kind: InlayKind::BindingMode, label: r.to_string().into() }); }); match pat { ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => { @@ -56,7 +52,7 @@ pub(super) fn hints( }; acc.push(InlayHint { range: pat.syntax().text_range(), - kind: InlayKind::BindingModeHint, + kind: InlayKind::BindingMode, label: bm.to_string().into(), }); } diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index c00fb83a88e19..e0045a53d7469 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -59,7 +59,7 @@ pub(super) fn hints( } acc.push(InlayHint { range: expr.syntax().text_range(), - kind: InlayKind::ChainingHint, + kind: InlayKind::Chaining, label: label_of_ty(famous_defs, config, ty)?, }); } @@ -110,7 +110,7 @@ fn main() { [ InlayHint { range: 147..172, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -130,7 +130,7 @@ fn main() { }, InlayHint { range: 147..154, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -195,14 +195,14 @@ fn main() { [ InlayHint { range: 143..190, - kind: ChainingHint, + kind: Chaining, label: [ "C", ], }, InlayHint { range: 143..179, - kind: ChainingHint, + kind: Chaining, label: [ "B", ], @@ -238,7 +238,7 @@ fn main() { [ InlayHint { range: 143..190, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -258,7 +258,7 @@ fn main() { }, InlayHint { range: 143..179, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -308,7 +308,7 @@ fn main() { [ InlayHint { range: 246..283, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -341,7 +341,7 @@ fn main() { }, InlayHint { range: 246..265, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -406,28 +406,28 @@ fn main() { [ InlayHint { range: 174..241, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator", ], }, InlayHint { range: 174..224, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator", ], }, InlayHint { range: 174..206, - kind: ChainingHint, + kind: Chaining, label: [ "impl Iterator", ], }, InlayHint { range: 174..189, - kind: ChainingHint, + kind: Chaining, label: [ "&mut ", InlayHintLabelPart { @@ -476,7 +476,7 @@ fn main() { [ InlayHint { range: 124..130, - kind: TypeHint, + kind: Type, label: [ "", InlayHintLabelPart { @@ -496,7 +496,7 @@ fn main() { }, InlayHint { range: 145..185, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -516,7 +516,7 @@ fn main() { }, InlayHint { range: 145..168, - kind: ChainingHint, + kind: Chaining, label: [ "", InlayHintLabelPart { @@ -536,7 +536,7 @@ fn main() { }, InlayHint { range: 222..228, - kind: ParameterHint, + kind: Parameter, label: [ InlayHintLabelPart { text: "self", diff --git a/crates/ide/src/inlay_hints/closing_brace.rs b/crates/ide/src/inlay_hints/closing_brace.rs index 684d74282bd95..aae805f78d690 100644 --- a/crates/ide/src/inlay_hints/closing_brace.rs +++ b/crates/ide/src/inlay_hints/closing_brace.rs @@ -113,7 +113,7 @@ pub(super) fn hints( .flatten(); acc.push(InlayHint { range: closing_token.text_range(), - kind: InlayKind::ClosingBraceHint, + kind: InlayKind::ClosingBrace, label: InlayHintLabel::simple(label, None, linked_location), }); diff --git a/crates/ide/src/inlay_hints/closure_ret.rs b/crates/ide/src/inlay_hints/closure_ret.rs index b1aa726ba5e2a..f03a18b8e960f 100644 --- a/crates/ide/src/inlay_hints/closure_ret.rs +++ b/crates/ide/src/inlay_hints/closure_ret.rs @@ -41,7 +41,7 @@ pub(super) fn hints( } acc.push(InlayHint { range: param_list.syntax().text_range(), - kind: InlayKind::ClosureReturnTypeHint, + kind: InlayKind::ClosureReturnType, label: label_of_ty(famous_defs, config, ty)?, }); Some(()) diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index 6afc4b6330700..310295cc37935 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -43,7 +43,7 @@ pub(super) fn hints( Some(field_list) => name.syntax().text_range().cover(field_list.syntax().text_range()), None => name.syntax().text_range(), }, - kind: InlayKind::DiscriminantHint, + kind: InlayKind::Discriminant, label: InlayHintLabel::simple( match &d { Ok(v) => format!("{}", v), diff --git a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs index ce6f2e486c0be..b7182085b312e 100644 --- a/crates/ide/src/inlay_hints/fn_lifetime_fn.rs +++ b/crates/ide/src/inlay_hints/fn_lifetime_fn.rs @@ -23,7 +23,7 @@ pub(super) fn hints( let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint { range: t.text_range(), - kind: InlayKind::LifetimeHint, + kind: InlayKind::Lifetime, label: label.into(), }; @@ -182,7 +182,7 @@ pub(super) fn hints( let is_empty = gpl.generic_params().next().is_none(); acc.push(InlayHint { range: angle_tok.text_range(), - kind: InlayKind::LifetimeHint, + kind: InlayKind::Lifetime, label: format!( "{}{}", allocated_lifetimes.iter().format(", "), @@ -193,7 +193,7 @@ pub(super) fn hints( } (None, allocated_lifetimes) => acc.push(InlayHint { range: func.name()?.syntax().text_range(), - kind: InlayKind::GenericParamListHint, + kind: InlayKind::GenericParamList, label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(), }), } diff --git a/crates/ide/src/inlay_hints/implicit_static.rs b/crates/ide/src/inlay_hints/implicit_static.rs index f65b1d09a4067..1122ee2e3925d 100644 --- a/crates/ide/src/inlay_hints/implicit_static.rs +++ b/crates/ide/src/inlay_hints/implicit_static.rs @@ -32,7 +32,7 @@ pub(super) fn hints( let t = ty.amp_token()?; acc.push(InlayHint { range: t.text_range(), - kind: InlayKind::LifetimeHint, + kind: InlayKind::Lifetime, label: "'static".to_owned().into(), }); } diff --git a/crates/ide/src/inlay_hints/param_name.rs b/crates/ide/src/inlay_hints/param_name.rs index dbbc35d5893c5..9cdae63241044 100644 --- a/crates/ide/src/inlay_hints/param_name.rs +++ b/crates/ide/src/inlay_hints/param_name.rs @@ -55,7 +55,7 @@ pub(super) fn hints( InlayHint { range, - kind: InlayKind::ParameterHint, + kind: InlayKind::Parameter, label: InlayHintLabel::simple(param_name, None, linked_location), } }); diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index d9cbb2bc206a2..0f0642bb4b568 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -431,10 +431,10 @@ pub(crate) fn inlay_hint( mut inlay_hint: InlayHint, ) -> Cancellable { match inlay_hint.kind { - InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"), - InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "), - InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "), - InlayKind::DiscriminantHint => inlay_hint.label.prepend_str(" = "), + InlayKind::Parameter if render_colons => inlay_hint.label.append_str(":"), + InlayKind::Type if render_colons => inlay_hint.label.prepend_str(": "), + InlayKind::ClosureReturnType => inlay_hint.label.prepend_str(" -> "), + InlayKind::Discriminant => inlay_hint.label.prepend_str(" = "), _ => {} } @@ -444,64 +444,64 @@ pub(crate) fn inlay_hint( position: match inlay_hint.kind { // before annotated thing InlayKind::OpeningParenthesis - | InlayKind::ParameterHint - | InlayKind::AdjustmentHint - | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()), + | InlayKind::Parameter + | InlayKind::Adjustment + | InlayKind::BindingMode => position(line_index, inlay_hint.range.start()), // after annotated thing - InlayKind::ClosureReturnTypeHint - | InlayKind::TypeHint - | InlayKind::DiscriminantHint - | InlayKind::ChainingHint - | InlayKind::GenericParamListHint + InlayKind::ClosureReturnType + | InlayKind::Type + | InlayKind::Discriminant + | InlayKind::Chaining + | InlayKind::GenericParamList | InlayKind::ClosingParenthesis - | InlayKind::AdjustmentHintPostfix - | InlayKind::LifetimeHint - | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()), + | InlayKind::AdjustmentPostfix + | InlayKind::Lifetime + | InlayKind::ClosingBrace => position(line_index, inlay_hint.range.end()), }, padding_left: Some(match inlay_hint.kind { - InlayKind::TypeHint => !render_colons, - InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true, + InlayKind::Type => !render_colons, + InlayKind::Chaining | InlayKind::ClosingBrace => true, InlayKind::ClosingParenthesis - | InlayKind::DiscriminantHint + | InlayKind::Discriminant | InlayKind::OpeningParenthesis - | InlayKind::BindingModeHint - | InlayKind::ClosureReturnTypeHint - | InlayKind::GenericParamListHint - | InlayKind::AdjustmentHint - | InlayKind::AdjustmentHintPostfix - | InlayKind::LifetimeHint - | InlayKind::ParameterHint => false, + | InlayKind::BindingMode + | InlayKind::ClosureReturnType + | InlayKind::GenericParamList + | InlayKind::Adjustment + | InlayKind::AdjustmentPostfix + | InlayKind::Lifetime + | InlayKind::Parameter => false, }), padding_right: Some(match inlay_hint.kind { InlayKind::ClosingParenthesis | InlayKind::OpeningParenthesis - | InlayKind::ChainingHint - | InlayKind::ClosureReturnTypeHint - | InlayKind::GenericParamListHint - | InlayKind::AdjustmentHint - | InlayKind::AdjustmentHintPostfix - | InlayKind::TypeHint - | InlayKind::DiscriminantHint - | InlayKind::ClosingBraceHint => false, - InlayKind::BindingModeHint => { + | InlayKind::Chaining + | InlayKind::ClosureReturnType + | InlayKind::GenericParamList + | InlayKind::Adjustment + | InlayKind::AdjustmentPostfix + | InlayKind::Type + | InlayKind::Discriminant + | InlayKind::ClosingBrace => false, + InlayKind::BindingMode => { matches!(&label, lsp_types::InlayHintLabel::String(s) if s != "&") } - InlayKind::ParameterHint | InlayKind::LifetimeHint => true, + InlayKind::Parameter | InlayKind::Lifetime => true, }), kind: match inlay_hint.kind { - InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER), - InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => { + InlayKind::Parameter => Some(lsp_types::InlayHintKind::PARAMETER), + InlayKind::ClosureReturnType | InlayKind::Type | InlayKind::Chaining => { Some(lsp_types::InlayHintKind::TYPE) } InlayKind::ClosingParenthesis - | InlayKind::DiscriminantHint + | InlayKind::Discriminant | InlayKind::OpeningParenthesis - | InlayKind::BindingModeHint - | InlayKind::GenericParamListHint - | InlayKind::LifetimeHint - | InlayKind::AdjustmentHint - | InlayKind::AdjustmentHintPostfix - | InlayKind::ClosingBraceHint => None, + | InlayKind::BindingMode + | InlayKind::GenericParamList + | InlayKind::Lifetime + | InlayKind::Adjustment + | InlayKind::AdjustmentPostfix + | InlayKind::ClosingBrace => None, }, text_edits: None, data: None, diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index a4780af1a2615..0f24ddbbc00e2 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ $DIR/projection-as-union-type-error-2.rs:18:8 + | +LL | a: ::Identity, + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `NotImplemented` is not implemented for `u8` + | +note: required for `u8` to implement `Identity` + --> $DIR/projection-as-union-type-error-2.rs:11:25 + | +LL | impl Identity for T { + | -------------- ^^^^^^^^ ^ + | | + | unsatisfied trait bound introduced here + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/union/projection-as-union-type-error.rs b/tests/ui/union/projection-as-union-type-error.rs new file mode 100644 index 0000000000000..17091c35fb230 --- /dev/null +++ b/tests/ui/union/projection-as-union-type-error.rs @@ -0,0 +1,15 @@ +// Test to ensure that there is no ICE when normalizing a projection +// which is invalid (from ). + +#![crate_type = "lib"] + +pub trait Identity { + type Identity; +} + +pub type Foo = u8; + +pub union Bar { + a: ::Identity, //~ ERROR + b: u8, +} diff --git a/tests/ui/union/projection-as-union-type-error.stderr b/tests/ui/union/projection-as-union-type-error.stderr new file mode 100644 index 0000000000000..e4fbe9603ad45 --- /dev/null +++ b/tests/ui/union/projection-as-union-type-error.stderr @@ -0,0 +1,9 @@ +error[E0277]: the trait bound `u8: Identity` is not satisfied + --> $DIR/projection-as-union-type-error.rs:13:9 + | +LL | a: ::Identity, + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Identity` is not implemented for `u8` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0277`. From 210757769d65cce8dd4837ec850c0a9b248ffb9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Tue, 17 Jan 2023 16:31:34 +0200 Subject: [PATCH 061/501] Fix replace_arith label --- crates/ide-assists/src/handlers/replace_arith_op.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/replace_arith_op.rs b/crates/ide-assists/src/handlers/replace_arith_op.rs index f1ca35cafc3a4..4b20b35c44624 100644 --- a/crates/ide-assists/src/handlers/replace_arith_op.rs +++ b/crates/ide-assists/src/handlers/replace_arith_op.rs @@ -81,7 +81,7 @@ fn replace_arith(acc: &mut Assists, ctx: &AssistContext<'_>, kind: ArithKind) -> let range = TextRange::new(start, end); acc.add_group( - &GroupLabel("replace_arith".into()), + &GroupLabel("Replace arithmetic...".into()), kind.assist_id(), kind.label(), range, From 837c1aff0546edca18b43fbfc65b8dacf0b0b699 Mon Sep 17 00:00:00 2001 From: Ali MJ Al-Nasrawy Date: Mon, 16 Jan 2023 01:31:58 +0300 Subject: [PATCH 062/501] rework min_choice algorithm of member constraints See the inline comments for the description of the new algorithm. --- .../rustc_borrowck/src/region_infer/mod.rs | 35 ++++++--- ...mber-constraints-min-choice-issue-63033.rs | 10 +++ .../min-choice-reject-ambiguous.rs | 43 +++++++++++ .../min-choice-reject-ambiguous.stderr | 40 ++++++++++ tests/ui/nll/member-constraints/min-choice.rs | 34 +++++++++ .../nested-impl-trait-fail.rs | 33 ++++++++ .../nested-impl-trait-fail.stderr | 75 +++++++++++++++++++ .../nested-impl-trait-pass.rs | 29 +++++++ 8 files changed, 288 insertions(+), 11 deletions(-) create mode 100644 tests/ui/async-await/multiple-lifetimes/member-constraints-min-choice-issue-63033.rs create mode 100644 tests/ui/nll/member-constraints/min-choice-reject-ambiguous.rs create mode 100644 tests/ui/nll/member-constraints/min-choice-reject-ambiguous.stderr create mode 100644 tests/ui/nll/member-constraints/min-choice.rs create mode 100644 tests/ui/nll/member-constraints/nested-impl-trait-fail.rs create mode 100644 tests/ui/nll/member-constraints/nested-impl-trait-fail.stderr create mode 100644 tests/ui/nll/member-constraints/nested-impl-trait-pass.rs diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs index 308f6e19a73e8..9dd5a6c760716 100644 --- a/compiler/rustc_borrowck/src/region_infer/mod.rs +++ b/compiler/rustc_borrowck/src/region_infer/mod.rs @@ -739,20 +739,33 @@ impl<'tcx> RegionInferenceContext<'tcx> { } debug!(?choice_regions, "after ub"); - // If we ruled everything out, we're done. - if choice_regions.is_empty() { - return false; - } - - // Otherwise, we need to find the minimum remaining choice, if - // any, and take that. - debug!("choice_regions remaining are {:#?}", choice_regions); - let Some(&min_choice) = choice_regions.iter().find(|&r1| { + // At this point we can pick any member of `choice_regions`, but to avoid potential + // non-determinism we will pick the *unique minimum* choice. + // + // Because universal regions are only partially ordered (i.e, not every two regions are + // comparable), we will ignore any region that doesn't compare to all others when picking + // the minimum choice. + // For example, consider `choice_regions = ['static, 'a, 'b, 'c, 'd, 'e]`, where + // `'static: 'a, 'static: 'b, 'a: 'c, 'b: 'c, 'c: 'd, 'c: 'e`. + // `['d, 'e]` are ignored because they do not compare - the same goes for `['a, 'b]`. + let totally_ordered_subset = choice_regions.iter().copied().filter(|&r1| { choice_regions.iter().all(|&r2| { - self.universal_region_relations.outlives(r2, *r1) + self.universal_region_relations.outlives(r1, r2) + || self.universal_region_relations.outlives(r2, r1) }) + }); + // Now we're left with `['static, 'c]`. Pick `'c` as the minimum! + let Some(min_choice) = totally_ordered_subset.reduce(|r1, r2| { + let r1_outlives_r2 = self.universal_region_relations.outlives(r1, r2); + let r2_outlives_r1 = self.universal_region_relations.outlives(r2, r1); + match (r1_outlives_r2, r2_outlives_r1) { + (true, true) => r1.min(r2), + (true, false) => r2, + (false, true) => r1, + (false, false) => bug!("incomparable regions in total order"), + } }) else { - debug!("no choice region outlived by all others"); + debug!("no unique minimum choice"); return false; }; diff --git a/tests/ui/async-await/multiple-lifetimes/member-constraints-min-choice-issue-63033.rs b/tests/ui/async-await/multiple-lifetimes/member-constraints-min-choice-issue-63033.rs new file mode 100644 index 0000000000000..614f189729126 --- /dev/null +++ b/tests/ui/async-await/multiple-lifetimes/member-constraints-min-choice-issue-63033.rs @@ -0,0 +1,10 @@ +// Regression test for #63033. + +// check-pass +// edition: 2018 + +async fn test1(_: &'static u8, _: &'_ u8, _: &'_ u8) {} + +async fn test2<'s>(_: &'s u8, _: &'_ &'s u8, _: &'_ &'s u8) {} + +fn main() {} diff --git a/tests/ui/nll/member-constraints/min-choice-reject-ambiguous.rs b/tests/ui/nll/member-constraints/min-choice-reject-ambiguous.rs new file mode 100644 index 0000000000000..52ea0f28d69f3 --- /dev/null +++ b/tests/ui/nll/member-constraints/min-choice-reject-ambiguous.rs @@ -0,0 +1,43 @@ +// ... continued from ./min-choice.rs + +// check-fail + +trait Cap<'a> {} +impl Cap<'_> for T {} + +fn type_test<'a, T: 'a>() -> &'a u8 { &0 } + +// Make sure we don't pick `'b`. +fn test_b<'a, 'b, 'c, T>() -> impl Cap<'a> + Cap<'b> + Cap<'c> +where + 'a: 'b, + 'a: 'c, + T: 'b, +{ + type_test::<'_, T>() // This should pass if we pick 'b. + //~^ ERROR the parameter type `T` may not live long enough +} + +// Make sure we don't pick `'c`. +fn test_c<'a, 'b, 'c, T>() -> impl Cap<'a> + Cap<'b> + Cap<'c> +where + 'a: 'b, + 'a: 'c, + T: 'c, +{ + type_test::<'_, T>() // This should pass if we pick 'c. + //~^ ERROR the parameter type `T` may not live long enough +} + +// We need to pick min_choice from `['b, 'c]`, but it's ambiguous which one to pick because +// they're incomparable. +fn test_ambiguous<'a, 'b, 'c>(s: &'a u8) -> impl Cap<'b> + Cap<'c> +where + 'a: 'b, + 'a: 'c, +{ + s + //~^ ERROR captures lifetime that does not appear in bounds +} + +fn main() {} diff --git a/tests/ui/nll/member-constraints/min-choice-reject-ambiguous.stderr b/tests/ui/nll/member-constraints/min-choice-reject-ambiguous.stderr new file mode 100644 index 0000000000000..1e6ef614dee24 --- /dev/null +++ b/tests/ui/nll/member-constraints/min-choice-reject-ambiguous.stderr @@ -0,0 +1,40 @@ +error[E0309]: the parameter type `T` may not live long enough + --> $DIR/min-choice-reject-ambiguous.rs:17:5 + | +LL | type_test::<'_, T>() // This should pass if we pick 'b. + | ^^^^^^^^^^^^^^^^^^ ...so that the type `T` will meet its required lifetime bounds + | +help: consider adding an explicit lifetime bound... + | +LL | T: 'b + 'a, + | ++++ + +error[E0309]: the parameter type `T` may not live long enough + --> $DIR/min-choice-reject-ambiguous.rs:28:5 + | +LL | type_test::<'_, T>() // This should pass if we pick 'c. + | ^^^^^^^^^^^^^^^^^^ ...so that the type `T` will meet its required lifetime bounds + | +help: consider adding an explicit lifetime bound... + | +LL | T: 'c + 'a, + | ++++ + +error[E0700]: hidden type for `impl Cap<'b> + Cap<'c>` captures lifetime that does not appear in bounds + --> $DIR/min-choice-reject-ambiguous.rs:39:5 + | +LL | fn test_ambiguous<'a, 'b, 'c>(s: &'a u8) -> impl Cap<'b> + Cap<'c> + | -- hidden type `&'a u8` captures the lifetime `'a` as defined here +... +LL | s + | ^ + | +help: to declare that `impl Cap<'b> + Cap<'c>` captures `'a`, you can add an explicit `'a` lifetime bound + | +LL | fn test_ambiguous<'a, 'b, 'c>(s: &'a u8) -> impl Cap<'b> + Cap<'c> + 'a + | ++++ + +error: aborting due to 3 previous errors + +Some errors have detailed explanations: E0309, E0700. +For more information about an error, try `rustc --explain E0309`. diff --git a/tests/ui/nll/member-constraints/min-choice.rs b/tests/ui/nll/member-constraints/min-choice.rs new file mode 100644 index 0000000000000..14b4dae7abfde --- /dev/null +++ b/tests/ui/nll/member-constraints/min-choice.rs @@ -0,0 +1,34 @@ +// Assuming that the hidden type in these tests is `&'_#15r u8`, +// we have a member constraint: `'_#15r member ['static, 'a, 'b, 'c]`. +// +// Make sure we pick up the minimum non-ambiguous region among them. +// We will have to exclude `['b, 'c]` because they're incomparable, +// and then we should pick `'a` because we know `'static: 'a`. + +// check-pass + +trait Cap<'a> {} +impl Cap<'_> for T {} + +fn type_test<'a, T: 'a>() -> &'a u8 { &0 } + +// Basic test: make sure we don't bail out because 'b and 'c are incomparable. +fn basic<'a, 'b, 'c>() -> impl Cap<'a> + Cap<'b> + Cap<'c> +where + 'a: 'b, + 'a: 'c, +{ + &0 +} + +// Make sure we don't pick `'static`. +fn test_static<'a, 'b, 'c, T>() -> impl Cap<'a> + Cap<'b> + Cap<'c> +where + 'a: 'b, + 'a: 'c, + T: 'a, +{ + type_test::<'_, T>() // This will fail if we pick 'static +} + +fn main() {} diff --git a/tests/ui/nll/member-constraints/nested-impl-trait-fail.rs b/tests/ui/nll/member-constraints/nested-impl-trait-fail.rs new file mode 100644 index 0000000000000..66ff828a84f7c --- /dev/null +++ b/tests/ui/nll/member-constraints/nested-impl-trait-fail.rs @@ -0,0 +1,33 @@ +// Nested impl-traits can impose different member constraints on the same region variable. + +// check-fail + +trait Cap<'a> {} +impl Cap<'_> for T {} + +// Assuming the hidden type is `[&'_#15r u8; 1]`, we have two distinct member constraints: +// - '_#15r member ['static, 'a, 'b] // from outer impl-trait +// - '_#15r member ['static, 'a, 'b] // from inner impl-trait +// To satisfy both we can choose 'a or 'b, so it's a failure due to ambiguity. +fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b>> +where + 's: 'a, + 's: 'b, +{ + [a] + //~^ E0700 + //~| E0700 +} + +// Same as the above but with late-bound regions. +fn fail_late_bound<'s, 'a, 'b>( + a: &'s u8, + _: &'a &'s u8, + _: &'b &'s u8, +) -> impl IntoIterator + Cap<'b>> { + [a] + //~^ E0700 + //~| E0700 +} + +fn main() {} diff --git a/tests/ui/nll/member-constraints/nested-impl-trait-fail.stderr b/tests/ui/nll/member-constraints/nested-impl-trait-fail.stderr new file mode 100644 index 0000000000000..6824e27ead028 --- /dev/null +++ b/tests/ui/nll/member-constraints/nested-impl-trait-fail.stderr @@ -0,0 +1,75 @@ +error[E0700]: hidden type for `impl IntoIterator + Cap<'b>>` captures lifetime that does not appear in bounds + --> $DIR/nested-impl-trait-fail.rs:17:5 + | +LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b>> + | -- hidden type `[&'s u8; 1]` captures the lifetime `'s` as defined here +... +LL | [a] + | ^^^ + | +help: to declare that `impl IntoIterator + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b>> + 's + | ++++ +help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b> + 's> + | ++++ + +error[E0700]: hidden type for `impl Cap<'a> + Cap<'b>` captures lifetime that does not appear in bounds + --> $DIR/nested-impl-trait-fail.rs:17:5 + | +LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b>> + | -- hidden type `&'s u8` captures the lifetime `'s` as defined here +... +LL | [a] + | ^^^ + | +help: to declare that `impl IntoIterator + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b>> + 's + | ++++ +help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | fn fail_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator + Cap<'b> + 's> + | ++++ + +error[E0700]: hidden type for `impl IntoIterator + Cap<'b>>` captures lifetime that does not appear in bounds + --> $DIR/nested-impl-trait-fail.rs:28:5 + | +LL | fn fail_late_bound<'s, 'a, 'b>( + | -- hidden type `[&'s u8; 1]` captures the lifetime `'s` as defined here +... +LL | [a] + | ^^^ + | +help: to declare that `impl IntoIterator + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | ) -> impl IntoIterator + Cap<'b>> + 's { + | ++++ +help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | ) -> impl IntoIterator + Cap<'b> + 's> { + | ++++ + +error[E0700]: hidden type for `impl Cap<'a> + Cap<'b>` captures lifetime that does not appear in bounds + --> $DIR/nested-impl-trait-fail.rs:28:5 + | +LL | fn fail_late_bound<'s, 'a, 'b>( + | -- hidden type `&'s u8` captures the lifetime `'s` as defined here +... +LL | [a] + | ^^^ + | +help: to declare that `impl IntoIterator + Cap<'b>>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | ) -> impl IntoIterator + Cap<'b>> + 's { + | ++++ +help: to declare that `impl Cap<'a> + Cap<'b>` captures `'s`, you can add an explicit `'s` lifetime bound + | +LL | ) -> impl IntoIterator + Cap<'b> + 's> { + | ++++ + +error: aborting due to 4 previous errors + +For more information about this error, try `rustc --explain E0700`. diff --git a/tests/ui/nll/member-constraints/nested-impl-trait-pass.rs b/tests/ui/nll/member-constraints/nested-impl-trait-pass.rs new file mode 100644 index 0000000000000..15540cb460e7a --- /dev/null +++ b/tests/ui/nll/member-constraints/nested-impl-trait-pass.rs @@ -0,0 +1,29 @@ +// Nested impl-traits can impose different member constraints on the same region variable. + +// check-pass + +trait Cap<'a> {} +impl Cap<'_> for T {} + +// Assuming the hidden type is `[&'_#15r u8; 1]`, we have two distinct member constraints: +// - '_#15r member ['static, 'a, 'b] // from outer impl-trait +// - '_#15r member ['static, 'a] // from inner impl-trait +// To satisfy both we can only choose 'a. +fn pass_early_bound<'s, 'a, 'b>(a: &'s u8) -> impl IntoIterator> + Cap<'b> +where + 's: 'a, + 's: 'b, +{ + [a] +} + +// Same as the above but with late-bound regions. +fn pass_late_bound<'s, 'a, 'b>( + a: &'s u8, + _: &'a &'s u8, + _: &'b &'s u8, +) -> impl IntoIterator> + Cap<'b> { + [a] +} + +fn main() {} From 1c454736a4f593eebac2c43de9dfb50f9200c250 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Tue, 17 Jan 2023 15:29:52 -0500 Subject: [PATCH 063/501] Suppress extra indent after the end of dot chains --- editors/code/src/config.ts | 118 ++++++++++++++++++++++--------------- 1 file changed, 70 insertions(+), 48 deletions(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index eb4f965291fe5..029dc3afd553d 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -86,58 +86,80 @@ export class Config { * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076 */ private configureLanguage() { - if (this.typingContinueCommentsOnNewline && !this.configureLang) { + if (this.configureLang) return; + + let onEnterRules: vscode.OnEnterRule[] = [ + { + // Carry indentation from the previous line + beforeText: /^\s*$/, + action: { indentAction: vscode.IndentAction.None }, + }, + { + // After the end of a function/field chain, + // with the semicolon on the same line + beforeText: /^\s+\..*;/, + action: { indentAction: vscode.IndentAction.Outdent }, + }, + { + // After the end of a function/field chain, + // with semicolon detached from the rest + beforeText: /^\s+;/, + previousLineText: /^\s+\..*/, + action: { indentAction: vscode.IndentAction.Outdent }, + }, + ]; + + if (this.typingContinueCommentsOnNewline) { const indentAction = vscode.IndentAction.None; - this.configureLang = vscode.languages.setLanguageConfiguration("rust", { - onEnterRules: [ - { - // Doc single-line comment - // e.g. ///| - beforeText: /^\s*\/{3}.*$/, - action: { indentAction, appendText: "/// " }, + onEnterRules = [ + ...onEnterRules, + { + // Doc single-line comment + // e.g. ///| + beforeText: /^\s*\/{3}.*$/, + action: { indentAction, appendText: "/// " }, + }, + { + // Parent doc single-line comment + // e.g. //!| + beforeText: /^\s*\/{2}\!.*$/, + action: { indentAction, appendText: "//! " }, + }, + { + // Begins an auto-closed multi-line comment (standard or parent doc) + // e.g. /** | */ or /*! | */ + beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, + afterText: /^\s*\*\/$/, + action: { + indentAction: vscode.IndentAction.IndentOutdent, + appendText: " * ", }, - { - // Parent doc single-line comment - // e.g. //!| - beforeText: /^\s*\/{2}\!.*$/, - action: { indentAction, appendText: "//! " }, - }, - { - // Begins an auto-closed multi-line comment (standard or parent doc) - // e.g. /** | */ or /*! | */ - beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, - afterText: /^\s*\*\/$/, - action: { - indentAction: vscode.IndentAction.IndentOutdent, - appendText: " * ", - }, - }, - { - // Begins a multi-line comment (standard or parent doc) - // e.g. /** ...| or /*! ...| - beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, - action: { indentAction, appendText: " * " }, - }, - { - // Continues a multi-line comment - // e.g. * ...| - beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/, - action: { indentAction, appendText: "* " }, - }, - { - // Dedents after closing a multi-line comment - // e.g. */| - beforeText: /^(\ \ )*\ \*\/\s*$/, - action: { indentAction, removeText: 1 }, - }, - ], - }); - } - if (!this.typingContinueCommentsOnNewline && this.configureLang) { - this.configureLang.dispose(); - this.configureLang = undefined; + }, + { + // Begins a multi-line comment (standard or parent doc) + // e.g. /** ...| or /*! ...| + beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/, + action: { indentAction, appendText: " * " }, + }, + { + // Continues a multi-line comment + // e.g. * ...| + beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/, + action: { indentAction, appendText: "* " }, + }, + { + // Dedents after closing a multi-line comment + // e.g. */| + beforeText: /^(\ \ )*\ \*\/\s*$/, + action: { indentAction, removeText: 1 }, + }, + ]; } + + this.configureLang = vscode.languages.setLanguageConfiguration("rust", { + onEnterRules, + }); } // We don't do runtime config validation here for simplicity. More on stackoverflow: From 992bafa773255cf9858bdc25c302abbd07d89c7b Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Tue, 17 Jan 2023 16:25:46 -0500 Subject: [PATCH 064/501] Fix change detection for relevant lang config opts --- editors/code/src/config.ts | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 029dc3afd553d..30ff97848299f 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -11,7 +11,9 @@ export type RunnableEnvCfg = export class Config { readonly extensionId = "rust-lang.rust-analyzer"; - configureLang: vscode.Disposable | undefined; + configureLang: + | { handle: vscode.Disposable; typingContinueCommentsOnNewline: boolean } + | undefined; readonly rootSection = "rust-analyzer"; private readonly requiresReloadOpts = [ @@ -43,7 +45,7 @@ export class Config { } dispose() { - this.configureLang?.dispose(); + this.configureLang?.handle.dispose(); } private refreshLogging() { @@ -86,7 +88,15 @@ export class Config { * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076 */ private configureLanguage() { - if (this.configureLang) return; + // Only need to dispose of the config if there's a change + if ( + this.configureLang && + this.typingContinueCommentsOnNewline !== + this.configureLang.typingContinueCommentsOnNewline + ) { + this.configureLang.handle.dispose(); + this.configureLang = undefined; + } let onEnterRules: vscode.OnEnterRule[] = [ { @@ -157,9 +167,12 @@ export class Config { ]; } - this.configureLang = vscode.languages.setLanguageConfiguration("rust", { - onEnterRules, - }); + this.configureLang = { + handle: vscode.languages.setLanguageConfiguration("rust", { + onEnterRules, + }), + typingContinueCommentsOnNewline: this.typingContinueCommentsOnNewline, + }; } // We don't do runtime config validation here for simplicity. More on stackoverflow: From 1e4a1829543c3d0dcee91d775239d16384db7821 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 18 Jan 2023 20:29:55 +0100 Subject: [PATCH 065/501] Fix checkOnSave to check config patching not always working --- .../src/config/patch_old_style.rs | 42 ++++++++++++++----- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/crates/rust-analyzer/src/config/patch_old_style.rs b/crates/rust-analyzer/src/config/patch_old_style.rs index de6ac946a682f..73d2ed329845a 100644 --- a/crates/rust-analyzer/src/config/patch_old_style.rs +++ b/crates/rust-analyzer/src/config/patch_old_style.rs @@ -114,16 +114,18 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) { } // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets - let res = match ( - copy.pointer("/completion/addCallArgumentSnippets"), - copy.pointer("/completion/addCallParenthesis"), - ) { - (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"), - (_, Some(Value::Bool(true))) => json!("add_parentheses"), - (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"), - (_, _) => return, - }; - merge(json, json!({ "completion": { "callable": {"snippets": res }} })); + 'completion: { + let res = match ( + copy.pointer("/completion/addCallArgumentSnippets"), + copy.pointer("/completion/addCallParenthesis"), + ) { + (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"), + (_, Some(Value::Bool(true))) => json!("add_parentheses"), + (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"), + (_, _) => break 'completion, + }; + merge(json, json!({ "completion": { "callable": {"snippets": res }} })); + } // We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool // checkOnSave_* -> check_* @@ -146,3 +148,23 @@ fn merge(dst: &mut Value, src: Value) { (dst, src) => *dst = src, } } + +#[test] +fn check_on_save_patching() { + let mut json = json!({ "checkOnSave": { "overrideCommand": "foo" }}); + patch_json_for_outdated_configs(&mut json); + assert_eq!( + json, + json!({ "checkOnSave": { "overrideCommand": "foo" }, "check": { "overrideCommand": "foo" }}) + ); +} + +#[test] +fn check_on_save_patching_enable() { + let mut json = json!({ "checkOnSave": { "enable": true, "overrideCommand": "foo" }}); + patch_json_for_outdated_configs(&mut json); + assert_eq!( + json, + json!({ "checkOnSave": true, "check": { "enable": true, "overrideCommand": "foo" }}) + ); +} From a6fda3ee7fb24c9ccf1544ec719e8db01f77f36b Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Wed, 18 Jan 2023 20:04:26 +0100 Subject: [PATCH 066/501] Support `true` and `false` as boolean flag params Implements MCP 577. --- compiler/rustc_session/src/options.rs | 10 +++++----- tests/codegen/issue-75659.rs | 2 +- tests/ui/lint/reasons-forbidden.rs | 2 +- .../next-power-of-two-overflow-debug.rs | 2 +- tests/ui/rfc-2091-track-caller/call-chain.rs | 2 +- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 7b5fd6cc2a81d..b102dea570f50 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -349,7 +349,7 @@ fn build_options( #[allow(non_upper_case_globals)] mod desc { pub const parse_no_flag: &str = "no value"; - pub const parse_bool: &str = "one of: `y`, `yes`, `on`, `n`, `no`, or `off`"; + pub const parse_bool: &str = "one of: `y`, `yes`, `on`, `true`, `n`, `no`, `off` or `false`"; pub const parse_opt_bool: &str = parse_bool; pub const parse_string: &str = "a string"; pub const parse_opt_string: &str = parse_string; @@ -432,11 +432,11 @@ mod parse { /// Use this for any boolean option that has a static default. pub(crate) fn parse_bool(slot: &mut bool, v: Option<&str>) -> bool { match v { - Some("y") | Some("yes") | Some("on") | None => { + Some("y") | Some("yes") | Some("on") | Some("true") | None => { *slot = true; true } - Some("n") | Some("no") | Some("off") => { + Some("n") | Some("no") | Some("off") | Some("false") => { *slot = false; true } @@ -449,11 +449,11 @@ mod parse { /// other factors, such as other options, or target options.) pub(crate) fn parse_opt_bool(slot: &mut Option, v: Option<&str>) -> bool { match v { - Some("y") | Some("yes") | Some("on") | None => { + Some("y") | Some("yes") | Some("on") | Some("true") | None => { *slot = Some(true); true } - Some("n") | Some("no") | Some("off") => { + Some("n") | Some("no") | Some("off") | Some("false") => { *slot = Some(false); true } diff --git a/tests/codegen/issue-75659.rs b/tests/codegen/issue-75659.rs index 6bcb59affe328..9394868c08db5 100644 --- a/tests/codegen/issue-75659.rs +++ b/tests/codegen/issue-75659.rs @@ -1,7 +1,7 @@ // This test checks that the call to memchr/slice_contains is optimized away // when searching in small slices. -// compile-flags: -O -Zinline-mir=no +// compile-flags: -O -Zinline-mir=false // only-x86_64 #![crate_type = "lib"] diff --git a/tests/ui/lint/reasons-forbidden.rs b/tests/ui/lint/reasons-forbidden.rs index 9c2edec4d5214..947099fdd13e7 100644 --- a/tests/ui/lint/reasons-forbidden.rs +++ b/tests/ui/lint/reasons-forbidden.rs @@ -8,7 +8,7 @@ // // The test is much cleaner if we deduplicate, though. -// compile-flags: -Z deduplicate-diagnostics=yes +// compile-flags: -Z deduplicate-diagnostics=true #![forbid( unsafe_code, diff --git a/tests/ui/numbers-arithmetic/next-power-of-two-overflow-debug.rs b/tests/ui/numbers-arithmetic/next-power-of-two-overflow-debug.rs index 79d78da3328a9..0e487a700b802 100644 --- a/tests/ui/numbers-arithmetic/next-power-of-two-overflow-debug.rs +++ b/tests/ui/numbers-arithmetic/next-power-of-two-overflow-debug.rs @@ -1,5 +1,5 @@ // run-pass -// compile-flags: -C debug_assertions=yes +// compile-flags: -C debug_assertions=true // needs-unwind // ignore-emscripten dies with an LLVM error diff --git a/tests/ui/rfc-2091-track-caller/call-chain.rs b/tests/ui/rfc-2091-track-caller/call-chain.rs index 28b3f76c9d536..a8814ce285286 100644 --- a/tests/ui/rfc-2091-track-caller/call-chain.rs +++ b/tests/ui/rfc-2091-track-caller/call-chain.rs @@ -1,6 +1,6 @@ // run-pass // revisions: default mir-opt -//[default] compile-flags: -Zinline-mir=no +//[default] compile-flags: -Zinline-mir=false //[mir-opt] compile-flags: -Zmir-opt-level=4 use std::panic::Location; From c9d33cddc96c34047be7a57c0a6b36998f4d7c6d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 18 Jan 2023 21:38:30 +0100 Subject: [PATCH 067/501] Don't run flycheck on startup unless checkOnSave is enabled --- crates/rust-analyzer/src/main_loop.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index a270049019f32..b00107c69a108 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -287,8 +287,10 @@ impl GlobalState { || self.fetch_build_data_queue.op_requested()); if became_quiescent { - // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(FlycheckHandle::restart); + if self.config.check_on_save() { + // Project has loaded properly, kick off initial flycheck + self.flycheck.iter().for_each(FlycheckHandle::restart); + } if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_string()); } From 872408500b9ed4f083a6ba36f1c5dbdf174f7627 Mon Sep 17 00:00:00 2001 From: onestacked Date: Thu, 19 Jan 2023 15:40:07 +0100 Subject: [PATCH 068/501] Parse const_closures syntax. Enables parsing of the syntax for `#![features(const_closures)]` introduced in https://github.com/rust-lang/rust/pull/106004 --- crates/parser/src/grammar/expressions/atom.rs | 8 ++-- .../parser/inline/ok/0205_const_closure.rast | 42 +++++++++++++++++++ .../parser/inline/ok/0205_const_closure.rs | 1 + crates/syntax/rust.ungram | 2 +- crates/syntax/src/ast/generated/nodes.rs | 1 + 5 files changed, 50 insertions(+), 4 deletions(-) create mode 100644 crates/parser/test_data/parser/inline/ok/0205_const_closure.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0205_const_closure.rs diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index efa3997353bf8..a23f900b73864 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -152,7 +152,7 @@ pub(super) fn atom_expr( m.complete(p, BLOCK_EXPR) } - T![static] | T![async] | T![move] | T![|] => closure_expr(p), + T![const] | T![static] | T![async] | T![move] | T![|] => closure_expr(p), T![for] if la == T![<] => closure_expr(p), T![for] => for_expr(p, None), @@ -255,7 +255,7 @@ fn array_expr(p: &mut Parser<'_>) -> CompletedMarker { // } fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { assert!(match p.current() { - T![static] | T![async] | T![move] | T![|] => true, + T![const] | T![static] | T![async] | T![move] | T![|] => true, T![for] => p.nth(1) == T![<], _ => false, }); @@ -265,7 +265,9 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { if p.at(T![for]) { types::for_binder(p); } - + // test const_closure + // fn main() { let cl = const || _ = 0; } + p.eat(T![const]); p.eat(T![static]); p.eat(T![async]); p.eat(T![move]); diff --git a/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast b/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast new file mode 100644 index 0000000000000..06442a1d0f1f7 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast @@ -0,0 +1,42 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE " " + LET_STMT + LET_KW "let" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "cl" + WHITESPACE " " + EQ "=" + WHITESPACE " " + CLOSURE_EXPR + CONST_KW "const" + WHITESPACE " " + PARAM_LIST + PIPE "|" + PIPE "|" + WHITESPACE " " + BIN_EXPR + UNDERSCORE_EXPR + UNDERSCORE "_" + WHITESPACE " " + EQ "=" + WHITESPACE " " + LITERAL + INT_NUMBER "0" + SEMICOLON ";" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs b/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs new file mode 100644 index 0000000000000..0c05cc70bd376 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs @@ -0,0 +1 @@ +fn main() { let cl = const || _ = 0; } diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index 2c67586a3905d..36ad5fddfd0c5 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -452,7 +452,7 @@ FieldExpr = Attr* Expr '.' NameRef ClosureExpr = - Attr* ('for' GenericParamList)? 'static'? 'async'? 'move'? ParamList RetType? + Attr* ('for' GenericParamList)? 'const'? 'static'? 'async'? 'move'? ParamList RetType? body:Expr IfExpr = diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index a214a5e4462cf..642a3bfc35d13 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -842,6 +842,7 @@ impl ast::HasAttrs for ClosureExpr {} impl ClosureExpr { pub fn for_token(&self) -> Option { support::token(&self.syntax, T![for]) } pub fn generic_param_list(&self) -> Option { support::child(&self.syntax) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } pub fn static_token(&self) -> Option { support::token(&self.syntax, T![static]) } pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } pub fn move_token(&self) -> Option { support::token(&self.syntax, T![move]) } From 7605853b32111219aa99db94a7ef5a00fe3e751d Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Thu, 19 Jan 2023 19:46:53 +0100 Subject: [PATCH 069/501] Change codegen documentation --- src/doc/rustc/src/codegen-options/index.md | 64 +++++++++++----------- 1 file changed, 32 insertions(+), 32 deletions(-) diff --git a/src/doc/rustc/src/codegen-options/index.md b/src/doc/rustc/src/codegen-options/index.md index 7e355b7fccfc4..5ed5e3c7f11af 100644 --- a/src/doc/rustc/src/codegen-options/index.md +++ b/src/doc/rustc/src/codegen-options/index.md @@ -49,10 +49,10 @@ Guard](https://docs.microsoft.com/en-us/windows/win32/secbp/control-flow-guard) platform security feature. This flag is currently ignored for non-Windows targets. It takes one of the following values: -* `y`, `yes`, `on`, `checks`, or no value: enable Control Flow Guard. +* `y`, `yes`, `on`, `true`, `checks`, or no value: enable Control Flow Guard. * `nochecks`: emit Control Flow Guard metadata without runtime enforcement checks (this should only be used for testing purposes as it does not provide security enforcement). -* `n`, `no`, `off`: do not enable Control Flow Guard (the default). +* `n`, `no`, `off`, `false`: do not enable Control Flow Guard (the default). ## debug-assertions @@ -60,8 +60,8 @@ This flag lets you turn `cfg(debug_assertions)` [conditional compilation](../../reference/conditional-compilation.md#debug_assertions) on or off. It takes one of the following values: -* `y`, `yes`, `on`, or no value: enable debug-assertions. -* `n`, `no`, or `off`: disable debug-assertions. +* `y`, `yes`, `on`, `true`, or no value: enable debug-assertions. +* `n`, `no`, `off` or `false`: disable debug-assertions. If not specified, debug assertions are automatically enabled only if the [opt-level](#opt-level) is 0. @@ -82,8 +82,8 @@ Note: The [`-g` flag][option-g-debug] is an alias for `-C debuginfo=2`. This flag controls whether or not the linker includes its default libraries. It takes one of the following values: -* `y`, `yes`, `on`, or no value: include default libraries (the default). -* `n`, `no`, or `off`: exclude default libraries. +* `y`, `yes`, `on`, `true` or no value: include default libraries (the default). +* `n`, `no`, `off` or `false`: exclude default libraries. For example, for gcc flavor linkers, this issues the `-nodefaultlibs` flag to the linker. @@ -93,8 +93,8 @@ the linker. This flag controls whether or not the compiler embeds LLVM bitcode into object files. It takes one of the following values: -* `y`, `yes`, `on`, or no value: put bitcode in rlibs (the default). -* `n`, `no`, or `off`: omit bitcode from rlibs. +* `y`, `yes`, `on`, `true` or no value: put bitcode in rlibs (the default). +* `n`, `no`, `off` or `false`: omit bitcode from rlibs. LLVM bitcode is required when rustc is performing link-time optimization (LTO). It is also required on some targets like iOS ones where vendors look for LLVM @@ -135,8 +135,8 @@ flag][option-emit] for more information. This flag forces the use of frame pointers. It takes one of the following values: -* `y`, `yes`, `on`, or no value: force-enable frame pointers. -* `n`, `no`, or `off`: do not force-enable frame pointers. This does +* `y`, `yes`, `on`, `true` or no value: force-enable frame pointers. +* `n`, `no`, `off` or `false`: do not force-enable frame pointers. This does not necessarily mean frame pointers will be removed. The default behaviour, if frame pointers are not force-enabled, depends on the @@ -147,8 +147,8 @@ target. This flag forces the generation of unwind tables. It takes one of the following values: -* `y`, `yes`, `on`, or no value: Unwind tables are forced to be generated. -* `n`, `no`, or `off`: Unwind tables are not forced to be generated. If unwind +* `y`, `yes`, `on`, `true` or no value: Unwind tables are forced to be generated. +* `n`, `no`, `off` or `false`: Unwind tables are not forced to be generated. If unwind tables are required by the target an error will be emitted. The default if not specified depends on the target. @@ -202,8 +202,8 @@ options should be separated by spaces. This flag controls whether the linker will keep dead code. It takes one of the following values: -* `y`, `yes`, `on`, or no value: keep dead code. -* `n`, `no`, or `off`: remove dead code (the default). +* `y`, `yes`, `on`, `true` or no value: keep dead code. +* `n`, `no`, `off` or `false`: remove dead code (the default). An example of when this flag might be useful is when trying to construct code coverage metrics. @@ -215,8 +215,8 @@ linker will use libraries and objects shipped with Rust instead or those in the It takes one of the following values: * no value: rustc will use heuristic to disable self-contained mode if system has necessary tools. -* `y`, `yes`, `on`: use only libraries/objects shipped with Rust. -* `n`, `no`, or `off`: rely on the user or the linker to provide non-Rust libraries/objects. +* `y`, `yes`, `on`, `true`: use only libraries/objects shipped with Rust. +* `n`, `no`, `off` or `false`: rely on the user or the linker to provide non-Rust libraries/objects. This allows overriding cases when detection fails or user wants to use shipped libraries. @@ -261,8 +261,8 @@ This flag defers LTO optimizations to the linker. See [linker-plugin-LTO](../linker-plugin-lto.md) for more details. It takes one of the following values: -* `y`, `yes`, `on`, or no value: enable linker plugin LTO. -* `n`, `no`, or `off`: disable linker plugin LTO (the default). +* `y`, `yes`, `on`, `true` or no value: enable linker plugin LTO. +* `n`, `no`, `off` or `false`: disable linker plugin LTO (the default). * A path to the linker plugin. More specifically this flag will cause the compiler to replace its typical @@ -292,9 +292,9 @@ optimizations](https://llvm.org/docs/LinkTimeOptimization.html) to produce better optimized code, using whole-program analysis, at the cost of longer linking time. It takes one of the following values: -* `y`, `yes`, `on`, `fat`, or no value: perform "fat" LTO which attempts to +* `y`, `yes`, `on`, `true`, `fat`, or no value: perform "fat" LTO which attempts to perform optimizations across all crates within the dependency graph. -* `n`, `no`, `off`: disables LTO. +* `n`, `no`, `off`, `false`: disables LTO. * `thin`: perform ["thin" LTO](http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html). This is similar to "fat", but takes substantially less time to run while @@ -333,8 +333,8 @@ This flag allows you to disable [the red zone](https://en.wikipedia.org/wiki/Red_zone_\(computing\)). It takes one of the following values: -* `y`, `yes`, `on`, or no value: disable the red zone. -* `n`, `no`, or `off`: enable the red zone. +* `y`, `yes`, `on`, `true` or no value: disable the red zone. +* `n`, `no`, `off` or `false`: enable the red zone. The default behaviour, if the flag is not specified, depends on the target. @@ -376,8 +376,8 @@ overflow](../../reference/expressions/operator-expr.md#overflow). When overflow-checks are enabled, a panic will occur on overflow. This flag takes one of the following values: -* `y`, `yes`, `on`, or no value: enable overflow checks. -* `n`, `no`, or `off`: disable overflow checks. +* `y`, `yes`, `on`, `true` or no value: enable overflow checks. +* `n`, `no`, `off` or `false`: disable overflow checks. If not specified, overflow checks are enabled if [debug-assertions](#debug-assertions) are enabled, disabled otherwise. @@ -409,8 +409,8 @@ for determining whether or not it is possible to statically or dynamically link with a dependency. For example, `cdylib` crate types may only use static linkage. This flag takes one of the following values: -* `y`, `yes`, `on`, or no value: use dynamic linking. -* `n`, `no`, or `off`: use static linking (the default). +* `y`, `yes`, `on`, `true` or no value: use dynamic linking. +* `n`, `no`, `off` or `false`: use static linking (the default). ## profile-generate @@ -487,24 +487,24 @@ The list of passes should be separated by spaces. This flag controls whether [`rpath`](https://en.wikipedia.org/wiki/Rpath) is enabled. It takes one of the following values: -* `y`, `yes`, `on`, or no value: enable rpath. -* `n`, `no`, or `off`: disable rpath (the default). +* `y`, `yes`, `on`, `true` or no value: enable rpath. +* `n`, `no`, `off` or `false`: disable rpath (the default). ## save-temps This flag controls whether temporary files generated during compilation are deleted once compilation finishes. It takes one of the following values: -* `y`, `yes`, `on`, or no value: save temporary files. -* `n`, `no`, or `off`: delete temporary files (the default). +* `y`, `yes`, `on`, `true` or no value: save temporary files. +* `n`, `no`, `off` or `false`: delete temporary files (the default). ## soft-float This option controls whether `rustc` generates code that emulates floating point instructions in software. It takes one of the following values: -* `y`, `yes`, `on`, or no value: use soft floats. -* `n`, `no`, or `off`: use hardware floats (the default). +* `y`, `yes`, `on`, `true` or no value: use soft floats. +* `n`, `no`, `off` or `false`: use hardware floats (the default). ## split-debuginfo From 384fa4b84ae881f402933e105c5c92b8b471036a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 19 Jan 2023 19:21:44 +0100 Subject: [PATCH 070/501] fix: Fix target-data-layout fetching incorrectly passing 'rustc' to rustc --- crates/base-db/src/fixture.rs | 15 ++- crates/base-db/src/input.rs | 25 +++-- crates/base-db/src/lib.rs | 1 + crates/hir-ty/src/layout/target.rs | 11 +- crates/ide/src/lib.rs | 2 +- .../project-model/src/target_data_layout.rs | 11 +- crates/project-model/src/tests.rs | 106 +++++++++++++----- crates/project-model/src/workspace.rs | 36 ++++-- 8 files changed, 144 insertions(+), 63 deletions(-) diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index b267700a43185..5b0ed1648db5b 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -163,7 +163,10 @@ impl ChangeFixture { Ok(Vec::new()), false, origin, - meta.target_data_layout.as_deref().map(Arc::from), + meta.target_data_layout + .as_deref() + .map(Arc::from) + .ok_or_else(|| "target_data_layout unset".into()), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none()); @@ -200,7 +203,9 @@ impl ChangeFixture { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - default_target_data_layout.map(|x| x.into()), + default_target_data_layout + .map(|x| x.into()) + .ok_or_else(|| "target_data_layout unset".into()), ); } else { for (from, to, prelude) in crate_deps { @@ -214,8 +219,10 @@ impl ChangeFixture { .unwrap(); } } - let target_layout = - crate_graph.iter().next().and_then(|it| crate_graph[it].target_layout.clone()); + let target_layout = crate_graph.iter().next().map_or_else( + || Err("target_data_layout unset".into()), + |it| crate_graph[it].target_layout.clone(), + ); if let Some(mini_core) = mini_core { let core_file = file_id; diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index b44a157e2538c..ea0561772d225 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -243,6 +243,7 @@ pub enum ProcMacroExpansionError { } pub type ProcMacroLoadResult = Result, String>; +pub type TargetLayoutLoadResult = Result, Arc>; #[derive(Debug, Clone)] pub struct ProcMacro { @@ -265,7 +266,7 @@ pub struct CrateData { pub display_name: Option, pub cfg_options: CfgOptions, pub potential_cfg_options: CfgOptions, - pub target_layout: Option>, + pub target_layout: TargetLayoutLoadResult, pub env: Env, pub dependencies: Vec, pub proc_macro: ProcMacroLoadResult, @@ -324,7 +325,7 @@ impl CrateGraph { proc_macro: ProcMacroLoadResult, is_proc_macro: bool, origin: CrateOrigin, - target_layout: Option>, + target_layout: Result, Arc>, ) -> CrateId { let data = CrateData { root_file_id, @@ -647,7 +648,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -660,7 +661,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -673,7 +674,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -700,7 +701,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -713,7 +714,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -737,7 +738,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -750,7 +751,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); let crate3 = graph.add_crate_root( FileId(3u32), @@ -763,7 +764,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); assert!(graph .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2)) @@ -787,7 +788,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); let crate2 = graph.add_crate_root( FileId(2u32), @@ -800,7 +801,7 @@ mod tests { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("".into()), ); assert!(graph .add_dep( diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index 55a51d3bbb2c7..9720db9d8ace3 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -17,6 +17,7 @@ pub use crate::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, SourceRoot, SourceRootId, + TargetLayoutLoadResult, }, }; pub use salsa::{self, Cancelled}; diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs index 93dcd79e120c6..adfae0a1abb38 100644 --- a/crates/hir-ty/src/layout/target.rs +++ b/crates/hir-ty/src/layout/target.rs @@ -12,6 +12,13 @@ pub fn target_data_layout_query( krate: CrateId, ) -> Option> { let crate_graph = db.crate_graph(); - let target_layout = crate_graph[krate].target_layout.as_ref()?; - Some(Arc::new(TargetDataLayout::parse_from_llvm_datalayout_string(&target_layout).ok()?)) + let target_layout = crate_graph[krate].target_layout.as_ref().ok()?; + let res = TargetDataLayout::parse_from_llvm_datalayout_string(&target_layout); + if let Err(_e) = &res { + // FIXME: Print the error here once it implements debug/display + // also logging here is somewhat wrong, but unfortunately this is the earliest place we can + // parse that doesn't impose a dependency to the rust-abi crate for project-model + tracing::error!("Failed to parse target data layout for {krate:?}"); + } + res.ok().map(Arc::new) } diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index 8424d82aa1830..4ead9d4d0a869 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -237,7 +237,7 @@ impl Analysis { Ok(Vec::new()), false, CrateOrigin::CratesIo { repo: None, name: None }, - None, + Err("Analysis::from_single_file has no target layout".into()), ); change.change_file(file_id, Some(Arc::new(text))); change.set_crate_graph(crate_graph); diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index 40cf47c3f5597..267a73ac5bd88 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -1,6 +1,7 @@ //! Runs `rustc --print target-spec-json` to get the target_data_layout. use std::process::Command; +use anyhow::Result; use rustc_hash::FxHashMap; use crate::{utf8_stdout, ManifestPath}; @@ -9,7 +10,7 @@ pub(super) fn get( cargo_toml: Option<&ManifestPath>, target: Option<&str>, extra_env: &FxHashMap, -) -> Option { +) -> Result { let output = (|| { if let Some(cargo_toml) = cargo_toml { let mut cmd = Command::new(toolchain::rustc()); @@ -28,13 +29,13 @@ pub(super) fn get( // using unstable cargo features failed, fall back to using plain rustc let mut cmd = Command::new(toolchain::rustc()); cmd.envs(extra_env) - .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"]) + .args(["-Z", "unstable-options", "--print", "target-spec-json"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); } utf8_stdout(cmd) - })() - .ok()?; - Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()) + })()?; + (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() + .ok_or_else(|| anyhow::format_err!("could not fetch target-spec-json from command output")) } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 2bb9ebf998bdb..19ee685691107 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -29,7 +29,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr rustc_cfg: Vec::new(), cfg_overrides, toolchain: None, - target_layout: None, + target_layout: Err("target_data_layout not loaded".into()), }; to_crate_graph(project_workspace) } @@ -151,7 +151,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -221,7 +223,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -300,7 +304,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -379,7 +385,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "debug_assertions", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -467,7 +475,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() { "feature=use_std", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -553,7 +563,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -625,7 +637,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -706,7 +720,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -787,7 +803,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -875,7 +893,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() { "feature=use_std", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -952,7 +972,9 @@ fn cargo_hello_world_project_model() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1024,7 +1046,9 @@ fn cargo_hello_world_project_model() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1105,7 +1129,9 @@ fn cargo_hello_world_project_model() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1186,7 +1212,9 @@ fn cargo_hello_world_project_model() { "test", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1274,7 +1302,9 @@ fn cargo_hello_world_project_model() { "feature=use_std", ], ), - target_layout: None, + target_layout: Err( + "target_data_layout not loaded", + ), env: Env { entries: { "CARGO_PKG_LICENSE": "", @@ -1343,7 +1373,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1388,7 +1420,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1423,7 +1457,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1458,7 +1494,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1493,7 +1531,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1538,7 +1578,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1573,7 +1615,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1681,7 +1725,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1716,7 +1762,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1751,7 +1799,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, @@ -1786,7 +1836,9 @@ fn rust_project_hello_world_project_model() { potential_cfg_options: CfgOptions( [], ), - target_layout: None, + target_layout: Err( + "rust-project.json projects have no target layout set", + ), env: Env { entries: {}, }, diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index d562066533eee..9aa04eaa75a42 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -7,7 +7,7 @@ use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc}; use anyhow::{format_err, Context, Result}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, - FileId, LangCrateOrigin, ProcMacroLoadResult, + FileId, LangCrateOrigin, ProcMacroLoadResult, TargetLayoutLoadResult, }; use cfg::{CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; @@ -79,7 +79,7 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, - target_layout: Option, + target_layout: Result, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { project: ProjectJson, sysroot: Option, rustc_cfg: Vec }, @@ -249,6 +249,9 @@ impl ProjectWorkspace { config.target.as_deref(), &config.extra_env, ); + if let Err(e) = &data_layout { + tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace"); + } ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -257,7 +260,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout, + target_layout: data_layout.map_err(|it| it.to_string()), } } }; @@ -540,7 +543,7 @@ impl ProjectWorkspace { project, sysroot, extra_env, - None, + Err("rust-project.json projects have no target layout set".into()), ), ProjectWorkspace::Cargo { cargo, @@ -560,10 +563,19 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - target_layout.as_deref().map(Arc::from), + match target_layout.as_ref() { + Ok(it) => Ok(Arc::from(it.as_str())), + Err(it) => Err(Arc::from(it.as_str())), + }, ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot, None) + detached_files_to_crate_graph( + rustc_cfg.clone(), + load, + files, + sysroot, + Err("detached file projects have no target layout set".into()), + ) } }; if crate_graph.patch_cfg_if() { @@ -582,7 +594,7 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: &Option, extra_env: &FxHashMap, - target_layout: Option>, + target_layout: TargetLayoutLoadResult, ) -> CrateGraph { let mut crate_graph = CrateGraph::default(); let sysroot_deps = sysroot.as_ref().map(|sysroot| { @@ -686,7 +698,7 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: Option>, + target_layout: TargetLayoutLoadResult, ) -> CrateGraph { let _p = profile::span("cargo_to_crate_graph"); let mut crate_graph = CrateGraph::default(); @@ -852,7 +864,7 @@ fn detached_files_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], sysroot: &Option, - target_layout: Option>, + target_layout: TargetLayoutLoadResult, ) -> CrateGraph { let _p = profile::span("detached_files_to_crate_graph"); let mut crate_graph = CrateGraph::default(); @@ -917,7 +929,7 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: Option>, + target_layout: TargetLayoutLoadResult, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1039,7 +1051,7 @@ fn add_target_crate_root( file_id: FileId, cargo_name: &str, is_proc_macro: bool, - target_layout: Option>, + target_layout: TargetLayoutLoadResult, ) -> CrateId { let edition = pkg.edition; let mut potential_cfg_options = cfg_options.clone(); @@ -1108,7 +1120,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, - target_layout: Option>, + target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, ) -> (SysrootPublicDeps, Option) { let _p = profile::span("sysroot_to_crate_graph"); From 7385467f2e4652ad32108c16e1e2642289384926 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 19 Jan 2023 21:44:13 +0100 Subject: [PATCH 071/501] Don't respond with a ContentModified while loading the workspace --- crates/rust-analyzer/src/config.rs | 4 ++++ crates/rust-analyzer/src/handlers.rs | 13 ++++++++++--- crates/rust-analyzer/src/main_loop.rs | 20 ++++++++------------ 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 9ea042b423519..fd2f934f9fe28 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -1438,6 +1438,10 @@ impl Config { try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?) } + pub fn inlay_hints_refresh(&self) -> bool { + try_or_def!(self.caps.workspace.as_ref()?.inlay_hint.as_ref()?.refresh_support?) + } + pub fn insert_replace_support(&self) -> bool { try_or_def!( self.caps diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 033ef75cca0de..33ca7810667fb 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -1470,7 +1470,8 @@ pub(crate) fn handle_semantic_tokens_full( let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. - highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded; + highlight_config.syntactic_name_ref_highlighting = + snap.workspaces.is_empty() || !snap.proc_macros_loaded; let highlights = snap.analysis.highlight(highlight_config, file_id)?; let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); @@ -1493,7 +1494,8 @@ pub(crate) fn handle_semantic_tokens_full_delta( let mut highlight_config = snap.config.highlighting_config(); // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. - highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded; + highlight_config.syntactic_name_ref_highlighting = + snap.workspaces.is_empty() || !snap.proc_macros_loaded; let highlights = snap.analysis.highlight(highlight_config, file_id)?; let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); @@ -1524,7 +1526,12 @@ pub(crate) fn handle_semantic_tokens_range( let text = snap.analysis.file_text(frange.file_id)?; let line_index = snap.file_line_index(frange.file_id)?; - let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?; + let mut highlight_config = snap.config.highlighting_config(); + // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet. + highlight_config.syntactic_name_ref_highlighting = + snap.workspaces.is_empty() || !snap.proc_macros_loaded; + + let highlights = snap.analysis.highlight_range(highlight_config, frange)?; let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); Ok(Some(semantic_tokens.into())) } diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index b00107c69a108..4290b7760687e 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -307,6 +307,11 @@ impl GlobalState { if self.config.code_lens_refresh() { self.send_request::((), |_, _| ()); } + + // Refresh inlay hints if the client supports it. + if self.config.inlay_hints_refresh() { + self.send_request::((), |_, _| ()); + } } if (!was_quiescent || state_changed || memdocs_added_or_removed) @@ -606,8 +611,8 @@ impl GlobalState { Ok(()) }); - if let RequestDispatcher { req: Some(req), global_state: this } = &mut dispatcher { - if this.shutdown_requested { + match &mut dispatcher { + RequestDispatcher { req: Some(req), global_state: this } if this.shutdown_requested => { this.respond(lsp_server::Response::new_err( req.id.clone(), lsp_server::ErrorCode::InvalidRequest as i32, @@ -615,16 +620,7 @@ impl GlobalState { )); return; } - - // Avoid flashing a bunch of unresolved references during initial load. - if this.workspaces.is_empty() && !this.is_quiescent() { - this.respond(lsp_server::Response::new_err( - req.id.clone(), - lsp_server::ErrorCode::ContentModified as i32, - "waiting for cargo metadata or cargo check".to_owned(), - )); - return; - } + _ => (), } dispatcher From ec89fc85a84b0529156782b77311b208e1531c47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Thu, 19 Jan 2023 18:21:43 -0800 Subject: [PATCH 072/501] Add limit setting --- crates/ide-completion/src/config.rs | 1 + crates/ide-completion/src/tests.rs | 1 + crates/rust-analyzer/src/config.rs | 3 +++ crates/rust-analyzer/src/integrated_benchmarks.rs | 2 ++ 4 files changed, 7 insertions(+) diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs index a0f5e81b4fb6c..d0e3eb4fded5a 100644 --- a/crates/ide-completion/src/config.rs +++ b/crates/ide-completion/src/config.rs @@ -19,6 +19,7 @@ pub struct CompletionConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub snippets: Vec, + pub limit: Option } #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index abe14e48e2236..540b0fd0ef7d9 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -75,6 +75,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { skip_glob_imports: true, }, snippets: Vec::new(), + limit: None, }; pub(crate) fn completion_list(ra_fixture: &str) -> String { diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index fd2f934f9fe28..49b4aa615a5a4 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -199,6 +199,8 @@ config_data! { completion_postfix_enable: bool = "true", /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. completion_privateEditable_enable: bool = "false", + /// Maximum number of completions to return. If `None`, the limit is infinite. + completion_limit: Option = "null", /// Custom completion snippets. // NOTE: Keep this list in sync with the feature docs of user snippets. completion_snippets_custom: FxHashMap = r#"{ @@ -1313,6 +1315,7 @@ impl Config { .snippet_support? )), snippets: self.snippets.clone(), + limit: self.data.completion_limit, } } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 405d261db6fb4..ce5fe207a907b 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -146,6 +146,7 @@ fn integrated_completion_benchmark() { }, snippets: Vec::new(), prefer_no_std: false, + limit: None }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -184,6 +185,7 @@ fn integrated_completion_benchmark() { }, snippets: Vec::new(), prefer_no_std: false, + limit: None }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; From d5fb7a4ba4abf638e293d872b8767a206922e995 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Thu, 19 Jan 2023 18:23:21 -0800 Subject: [PATCH 073/501] Limit number of completions --- crates/rust-analyzer/src/to_proto.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 0f0642bb4b568..f5cee5f907a86 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -215,8 +215,19 @@ pub(crate) fn completion_items( let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default(); let mut res = Vec::with_capacity(items.len()); for item in items { - completion_item(&mut res, config, line_index, &tdpp, max_relevance, item) + completion_item(&mut res, config, line_index, &tdpp, max_relevance, item); + + if let Some(limit) = config.completion().limit { + if res.len() >= limit { + break; + } + } } + + if let Some(limit) = config.completion().limit { + res.truncate(limit); + } + res } From d044bc35042881e26427034bbf2c48b63acf187a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Thu, 19 Jan 2023 18:33:47 -0800 Subject: [PATCH 074/501] Format code --- crates/ide-completion/src/config.rs | 2 +- crates/rust-analyzer/src/integrated_benchmarks.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs index d0e3eb4fded5a..8f6a97e1e09d8 100644 --- a/crates/ide-completion/src/config.rs +++ b/crates/ide-completion/src/config.rs @@ -19,7 +19,7 @@ pub struct CompletionConfig { pub insert_use: InsertUseConfig, pub prefer_no_std: bool, pub snippets: Vec, - pub limit: Option + pub limit: Option, } #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index ce5fe207a907b..7c13d9bad281a 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -146,7 +146,7 @@ fn integrated_completion_benchmark() { }, snippets: Vec::new(), prefer_no_std: false, - limit: None + limit: None, }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -185,7 +185,7 @@ fn integrated_completion_benchmark() { }, snippets: Vec::new(), prefer_no_std: false, - limit: None + limit: None, }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; From f7fcdb62186a2c7572b50ca893adebe638c7d2f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Thu, 19 Jan 2023 18:34:01 -0800 Subject: [PATCH 075/501] Order alphabetically --- crates/rust-analyzer/src/config.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 49b4aa615a5a4..78e264dce30fe 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -195,12 +195,12 @@ config_data! { completion_autoself_enable: bool = "true", /// Whether to add parenthesis and argument snippets when completing function. completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"", + /// Maximum number of completions to return. If `None`, the limit is infinite. + completion_limit: Option = "null", /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc. completion_postfix_enable: bool = "true", /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position. completion_privateEditable_enable: bool = "false", - /// Maximum number of completions to return. If `None`, the limit is infinite. - completion_limit: Option = "null", /// Custom completion snippets. // NOTE: Keep this list in sync with the feature docs of user snippets. completion_snippets_custom: FxHashMap = r#"{ From 2ea703c659fa88f14435ddf7529e8d0f7b893d87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Thu, 19 Jan 2023 18:34:19 -0800 Subject: [PATCH 076/501] Update VS Code settings --- docs/user/generated_config.adoc | 5 +++++ editors/code/package.json | 9 +++++++++ 2 files changed, 14 insertions(+) diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 1bfb8a917a803..8bc7dc7938f7a 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -219,6 +219,11 @@ with `self` prefixed to them when inside a method. -- Whether to add parenthesis and argument snippets when completing function. -- +[[rust-analyzer.completion.limit]]rust-analyzer.completion.limit (default: `null`):: ++ +-- +Maximum number of completions to return. If `None`, the limit is infinite. +-- [[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`):: + -- diff --git a/editors/code/package.json b/editors/code/package.json index 599e9c5a7bf5b..ead4c50f6e25a 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -697,6 +697,15 @@ "Do no snippet completions for callables." ] }, + "rust-analyzer.completion.limit": { + "markdownDescription": "Maximum number of completions to return. If `None`, the limit is infinite.", + "default": null, + "type": [ + "null", + "integer" + ], + "minimum": 0 + }, "rust-analyzer.completion.postfix.enable": { "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.", "default": true, From 8fa69f9f7d815632c3b80ea0395dd51c2938525a Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Wed, 18 Jan 2023 20:23:12 +0800 Subject: [PATCH 077/501] feat: array match --- crates/hir/src/lib.rs | 19 ++- .../src/handlers/add_missing_match_arms.rs | 140 ++++++++++++++++-- crates/syntax/src/ast/make.rs | 9 ++ 3 files changed, 157 insertions(+), 11 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 54342f1b7c44f..012812cea24aa 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -50,6 +50,7 @@ use hir_def::{ per_ns::PerNs, resolver::{HasResolver, Resolver}, src::HasSource as _, + type_ref::ConstScalar, AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId, @@ -65,8 +66,9 @@ use hir_ty::{ primitive::UintTy, traits::FnTrait, AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, - GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution, - TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, WhereClause, + ConcreteConst, ConstValue, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, + Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, + WhereClause, }; use itertools::Itertools; use nameres::diagnostics::DefDiagnosticKind; @@ -3232,6 +3234,19 @@ impl Type { } } + pub fn as_array(&self, _db: &dyn HirDatabase) -> Option<(Type, usize)> { + if let TyKind::Array(ty, len) = &self.ty.kind(Interner) { + match len.data(Interner).value { + ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(len) }) => { + Some((self.derived(ty.clone()), len as usize)) + } + _ => None, + } + } else { + None + } + } + pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator + 'a { self.autoderef_(db).map(move |ty| self.derived(ty)) } diff --git a/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 0461cc790eb30..5d81e8cfeacbc 100644 --- a/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -140,6 +140,31 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) }) .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); ((Box::new(missing_pats) as Box>).peekable(), is_non_exhaustive) + } else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) { + let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate()); + let variants = enum_def.variants(ctx.db()); + + if len.pow(variants.len() as u32) > 256 { + return None; + } + + let variants_of_enums = vec![variants.clone(); len]; + + let missing_pats = variants_of_enums + .into_iter() + .multi_cartesian_product() + .inspect(|_| cov_mark::hit!(add_missing_match_arms_lazy_computation)) + .map(|variants| { + let is_hidden = variants + .iter() + .any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); + let patterns = variants.into_iter().filter_map(|variant| { + build_pat(ctx.db(), module, variant.clone(), ctx.config.prefer_no_std) + }); + (ast::Pat::from(make::slice_pat(patterns)), is_hidden) + }) + .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); + ((Box::new(missing_pats) as Box>).peekable(), is_non_exhaustive) } else { return None; }; @@ -266,6 +291,9 @@ fn is_variant_missing(existing_pats: &[Pat], var: &Pat) -> bool { fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool { match (pat, var) { (Pat::WildcardPat(_), _) => true, + (Pat::SlicePat(spat), Pat::SlicePat(svar)) => { + spat.pats().zip(svar.pats()).all(|(p, v)| does_pat_match_variant(&p, &v)) + } (Pat::TuplePat(tpat), Pat::TuplePat(tvar)) => { tpat.fields().zip(tvar.fields()).all(|(p, v)| does_pat_match_variant(&p, &v)) } @@ -280,7 +308,7 @@ enum ExtendedEnum { Enum(hir::Enum), } -#[derive(Eq, PartialEq, Clone, Copy)] +#[derive(Eq, PartialEq, Clone, Copy, Debug)] enum ExtendedVariant { True, False, @@ -340,15 +368,30 @@ fn resolve_tuple_of_enum_def( .tuple_fields(sema.db) .iter() .map(|ty| { - ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() { - Some(Adt::Enum(e)) => Some(lift_enum(e)), - // For now we only handle expansion for a tuple of enums. Here - // we map non-enum items to None and rely on `collect` to - // convert Vec> into Option>. - _ => ty.is_bool().then_some(ExtendedEnum::Bool), + ty.autoderef(sema.db).find_map(|ty| { + match ty.as_adt() { + Some(Adt::Enum(e)) => Some(lift_enum(e)), + // For now we only handle expansion for a tuple of enums. Here + // we map non-enum items to None and rely on `collect` to + // convert Vec> into Option>. + _ => ty.is_bool().then_some(ExtendedEnum::Bool), + } }) }) - .collect() + .collect::>>() + .and_then(|list| if list.is_empty() { None } else { Some(list) }) +} + +fn resolve_array_of_enum_def( + sema: &Semantics<'_, RootDatabase>, + expr: &ast::Expr, +) -> Option<(ExtendedEnum, usize)> { + sema.type_of_expr(expr)?.adjusted().as_array(sema.db).and_then(|(ty, len)| { + ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() { + Some(Adt::Enum(e)) => Some((lift_enum(e), len)), + _ => ty.is_bool().then_some((ExtendedEnum::Bool, len)), + }) + }) } fn build_pat( @@ -377,7 +420,6 @@ fn build_pat( } ast::StructKind::Unit => make::path_pat(path), }; - Some(pat) } ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))), @@ -573,6 +615,86 @@ fn foo(a: bool) { ) } + #[test] + fn fill_boolean_array() { + check_assist( + add_missing_match_arms, + r#" +fn foo(a: bool) { + match [a]$0 { + } +} +"#, + r#" +fn foo(a: bool) { + match [a] { + $0[true] => todo!(), + [false] => todo!(), + } +} +"#, + ); + + check_assist( + add_missing_match_arms, + r#" +fn foo(a: bool) { + match [a,]$0 { + } +} +"#, + r#" +fn foo(a: bool) { + match [a,] { + $0[true] => todo!(), + [false] => todo!(), + } +} +"#, + ); + + check_assist( + add_missing_match_arms, + r#" +fn foo(a: bool) { + match [a, a]$0 { + [true, true] => todo!(), + } +} +"#, + r#" +fn foo(a: bool) { + match [a, a] { + [true, true] => todo!(), + $0[true, false] => todo!(), + [false, true] => todo!(), + [false, false] => todo!(), + } +} +"#, + ); + + check_assist( + add_missing_match_arms, + r#" +fn foo(a: bool) { + match [a, a]$0 { + } +} +"#, + r#" +fn foo(a: bool) { + match [a, a] { + $0[true, true] => todo!(), + [true, false] => todo!(), + [false, true] => todo!(), + [false, false] => todo!(), + } +} +"#, + ) + } + #[test] fn partial_fill_boolean_tuple() { check_assist( diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index d5b3296980c91..a35983435c7b4 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -520,6 +520,15 @@ pub fn literal_pat(lit: &str) -> ast::LiteralPat { } } +pub fn slice_pat(pats: impl IntoIterator) -> ast::SlicePat { + let pats_str = pats.into_iter().join(", "); + return from_text(&format!("[{pats_str}]")); + + fn from_text(text: &str) -> ast::SlicePat { + ast_from_text(&format!("fn f() {{ match () {{{text} => ()}} }}")) + } +} + /// Creates a tuple of patterns from an iterator of patterns. /// /// Invariant: `pats` must be length > 0 From c5b1e3f2ae5c397eb98d86228186a8e533243bfe Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 20 Jan 2023 14:29:12 +0100 Subject: [PATCH 078/501] Fix markdown removal in hover handling whitespace weirdly --- crates/ide/src/hover.rs | 25 +++-- crates/ide/src/hover/render.rs | 57 ++++------- crates/ide/src/hover/tests.rs | 23 +++-- crates/ide/src/markdown_remove.rs | 147 ++++++++++++++++++++++++++- crates/ide/src/static_index.rs | 6 +- crates/rust-analyzer/src/config.rs | 5 +- crates/rust-analyzer/src/handlers.rs | 3 +- 7 files changed, 202 insertions(+), 64 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index f0c6505ee6ea0..c46c1c1cd1e05 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -19,6 +19,7 @@ use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T} use crate::{ doc_links::token_as_doc_comment, + markdown_remove::remove_markdown, markup::Markup, runnables::{runnable_fn, runnable_mod}, FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav, @@ -26,14 +27,9 @@ use crate::{ #[derive(Clone, Debug, PartialEq, Eq)] pub struct HoverConfig { pub links_in_hover: bool, - pub documentation: Option, + pub documentation: bool, pub keywords: bool, -} - -impl HoverConfig { - fn markdown(&self) -> bool { - matches!(self.documentation, Some(HoverDocFormat::Markdown)) - } + pub format: HoverDocFormat, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -90,12 +86,23 @@ pub struct HoverResult { // image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[] pub(crate) fn hover( db: &RootDatabase, - FileRange { file_id, range }: FileRange, + file_range: FileRange, config: &HoverConfig, ) -> Option> { let sema = &hir::Semantics::new(db); - let file = sema.parse(file_id).syntax().clone(); + let mut res = hover_impl(sema, file_range, config)?; + if let HoverDocFormat::PlainText = config.format { + res.info.markup = remove_markdown(res.info.markup.as_str()).into(); + } + Some(res) +} +fn hover_impl( + sema: &Semantics<'_, RootDatabase>, + FileRange { file_id, range }: FileRange, + config: &HoverConfig, +) -> Option> { + let file = sema.parse(file_id).syntax().clone(); if !range.is_empty() { return hover_ranged(&file, range, sema, config); } diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index cb537d7ef79d7..d7b6264957821 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -26,13 +26,12 @@ use syntax::{ use crate::{ doc_links::{remove_links, rewrite_links}, hover::walk_and_push_ty, - markdown_remove::remove_markdown, HoverAction, HoverConfig, HoverResult, Markup, }; pub(super) fn type_info( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + _config: &HoverConfig, expr_or_pat: &Either, ) -> Option { let TypeInfo { original, adjusted } = match expr_or_pat { @@ -55,19 +54,15 @@ pub(super) fn type_info( let adjusted = adjusted_ty.display(sema.db).to_string(); let static_text_diff_len = "Coerced to: ".len() - "Type: ".len(); format!( - "{bt_start}Type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}", + "```text\nType: {:>apad$}\nCoerced to: {:>opad$}\n```\n", original, adjusted, apad = static_text_diff_len + adjusted.len().max(original.len()), opad = original.len(), - bt_start = if config.markdown() { "```text\n" } else { "" }, - bt_end = if config.markdown() { "```\n" } else { "" } ) .into() - } else if config.markdown() { - Markup::fenced_block(&original.display(sema.db)) } else { - original.display(sema.db).to_string().into() + Markup::fenced_block(&original.display(sema.db)) }; res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); Some(res) @@ -75,7 +70,7 @@ pub(super) fn type_info( pub(super) fn try_expr( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + _config: &HoverConfig, try_expr: &ast::TryExpr, ) -> Option { let inner_ty = sema.type_of_expr(&try_expr.expr()?)?.original; @@ -151,14 +146,12 @@ pub(super) fn try_expr( let ppad = static_text_len_diff.min(0).abs() as usize; res.markup = format!( - "{bt_start}{} Type: {:>pad0$}\nPropagated as: {:>pad1$}\n{bt_end}", + "```text\n{} Type: {:>pad0$}\nPropagated as: {:>pad1$}\n```\n", s, inner_ty, body_ty, pad0 = ty_len_max + tpad, pad1 = ty_len_max + ppad, - bt_start = if config.markdown() { "```text\n" } else { "" }, - bt_end = if config.markdown() { "```\n" } else { "" } ) .into(); Some(res) @@ -166,7 +159,7 @@ pub(super) fn try_expr( pub(super) fn deref_expr( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + _config: &HoverConfig, deref_expr: &ast::PrefixExpr, ) -> Option { let inner_ty = sema.type_of_expr(&deref_expr.expr()?)?.original; @@ -195,15 +188,13 @@ pub(super) fn deref_expr( .max(adjusted.len() + coerced_len) .max(inner.len() + deref_len); format!( - "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}", + "```text\nDereferenced from: {:>ipad$}\nTo type: {:>apad$}\nCoerced to: {:>opad$}\n```\n", inner, original, adjusted, ipad = max_len - deref_len, apad = max_len - type_len, opad = max_len - coerced_len, - bt_start = if config.markdown() { "```text\n" } else { "" }, - bt_end = if config.markdown() { "```\n" } else { "" } ) .into() } else { @@ -213,13 +204,11 @@ pub(super) fn deref_expr( let deref_len = "Dereferenced from: ".len(); let max_len = (original.len() + type_len).max(inner.len() + deref_len); format!( - "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\n{bt_end}", + "```text\nDereferenced from: {:>ipad$}\nTo type: {:>apad$}\n```\n", inner, original, ipad = max_len - deref_len, apad = max_len - type_len, - bt_start = if config.markdown() { "```text\n" } else { "" }, - bt_end = if config.markdown() { "```\n" } else { "" } ) .into() }; @@ -233,7 +222,7 @@ pub(super) fn keyword( config: &HoverConfig, token: &SyntaxToken, ) -> Option { - if !token.kind().is_keyword() || !config.documentation.is_some() || !config.keywords { + if !token.kind().is_keyword() || !config.documentation || !config.keywords { return None; } let parent = token.parent()?; @@ -257,7 +246,7 @@ pub(super) fn keyword( /// i.e. `let S {a, ..} = S {a: 1, b: 2}` pub(super) fn struct_rest_pat( sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, + _config: &HoverConfig, pattern: &RecordPat, ) -> HoverResult { let missing_fields = sema.record_pattern_missing_fields(pattern); @@ -286,11 +275,7 @@ pub(super) fn struct_rest_pat( // get rid of trailing comma s.truncate(s.len() - 2); - if config.markdown() { - Markup::fenced_block(&s) - } else { - s.into() - } + Markup::fenced_block(&s) }; res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); res @@ -344,13 +329,8 @@ pub(super) fn process_markup( config: &HoverConfig, ) -> Markup { let markup = markup.as_str(); - let markup = if !config.markdown() { - remove_markdown(markup) - } else if config.links_in_hover { - rewrite_links(db, markup, def) - } else { - remove_links(markup) - }; + let markup = + if config.links_in_hover { rewrite_links(db, markup, def) } else { remove_links(markup) }; Markup::from(markup) } @@ -463,8 +443,9 @@ pub(super) fn definition( Definition::DeriveHelper(it) => (format!("derive_helper {}", it.name(db)), None), }; - let docs = match config.documentation { - Some(_) => docs.or_else(|| { + let docs = docs + .filter(|_| config.documentation) + .or_else(|| { // docs are missing, for assoc items of trait impls try to fall back to the docs of the // original item of the trait let assoc = def.as_assoc_item(db)?; @@ -472,10 +453,8 @@ pub(super) fn definition( let name = Some(assoc.name(db)?); let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?; item.docs(db) - }), - None => None, - }; - let docs = docs.filter(|_| config.documentation.is_some()).map(Into::into); + }) + .map(Into::into); markup(docs, label, mod_path) } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index f90ca86f9b6c4..db2aaddc0be8d 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -2,7 +2,7 @@ use expect_test::{expect, Expect}; use ide_db::base_db::{FileLoader, FileRange}; use syntax::TextRange; -use crate::{fixture, hover::HoverDocFormat, HoverConfig}; +use crate::{fixture, HoverConfig, HoverDocFormat}; fn check_hover_no_result(ra_fixture: &str) { let (analysis, position) = fixture::position(ra_fixture); @@ -10,8 +10,9 @@ fn check_hover_no_result(ra_fixture: &str) { .hover( &HoverConfig { links_in_hover: true, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: HoverDocFormat::Markdown, }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) @@ -26,8 +27,9 @@ fn check(ra_fixture: &str, expect: Expect) { .hover( &HoverConfig { links_in_hover: true, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: HoverDocFormat::Markdown, }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) @@ -47,8 +49,9 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) { .hover( &HoverConfig { links_in_hover: false, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: HoverDocFormat::Markdown, }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) @@ -68,8 +71,9 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) { .hover( &HoverConfig { links_in_hover: true, - documentation: Some(HoverDocFormat::PlainText), + documentation: true, keywords: true, + format: HoverDocFormat::PlainText, }, FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) }, ) @@ -89,8 +93,9 @@ fn check_actions(ra_fixture: &str, expect: Expect) { .hover( &HoverConfig { links_in_hover: true, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: HoverDocFormat::Markdown, }, FileRange { file_id, range: position.range_or_empty() }, ) @@ -105,8 +110,9 @@ fn check_hover_range(ra_fixture: &str, expect: Expect) { .hover( &HoverConfig { links_in_hover: false, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: HoverDocFormat::Markdown, }, range, ) @@ -121,8 +127,9 @@ fn check_hover_range_no_results(ra_fixture: &str) { .hover( &HoverConfig { links_in_hover: false, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: HoverDocFormat::Markdown, }, range, ) diff --git a/crates/ide/src/markdown_remove.rs b/crates/ide/src/markdown_remove.rs index 3ec5c629e4f26..07a3fe3f02b10 100644 --- a/crates/ide/src/markdown_remove.rs +++ b/crates/ide/src/markdown_remove.rs @@ -14,9 +14,154 @@ pub(crate) fn remove_markdown(markdown: &str) -> String { Event::SoftBreak | Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => { out.push('\n') } - _ => {} + Event::End(Tag::Paragraph) => { + out.push('\n'); + out.push('\n'); + } + Event::Start(_) + | Event::End(_) + | Event::Html(_) + | Event::FootnoteReference(_) + | Event::TaskListMarker(_) => (), } } + if let Some(p) = out.rfind(|c| c != '\n') { + out.drain(p + 1..); + } + out } + +#[cfg(test)] +mod tests { + use expect_test::expect; + + use super::*; + + #[test] + fn smoke_test() { + let res = remove_markdown( + r##" +A function or function pointer. + +Functions are the primary way code is executed within Rust. Function blocks, usually just +called functions, can be defined in a variety of different places and be assigned many +different attributes and modifiers. + +Standalone functions that just sit within a module not attached to anything else are common, +but most functions will end up being inside [`impl`] blocks, either on another type itself, or +as a trait impl for that type. + +```rust +fn standalone_function() { + // code +} + +pub fn public_thing(argument: bool) -> String { + // code + # "".to_string() +} + +struct Thing { + foo: i32, +} + +impl Thing { + pub fn new() -> Self { + Self { + foo: 42, + } + } +} +``` + +In addition to presenting fixed types in the form of `fn name(arg: type, ..) -> return_type`, +functions can also declare a list of type parameters along with trait bounds that they fall +into. + +```rust +fn generic_function(x: T) -> (T, T, T) { + (x.clone(), x.clone(), x.clone()) +} + +fn generic_where(x: T) -> T + where T: std::ops::Add + Copy +{ + x + x + x +} +``` + +Declaring trait bounds in the angle brackets is functionally identical to using a `where` +clause. It's up to the programmer to decide which works better in each situation, but `where` +tends to be better when things get longer than one line. + +Along with being made public via `pub`, `fn` can also have an [`extern`] added for use in +FFI. + +For more information on the various types of functions and how they're used, consult the [Rust +book] or the [Reference]. + +[`impl`]: keyword.impl.html +[`extern`]: keyword.extern.html +[Rust book]: ../book/ch03-03-how-functions-work.html +[Reference]: ../reference/items/functions.html +"##, + ); + expect![[r#" + A function or function pointer. + + Functions are the primary way code is executed within Rust. Function blocks, usually just + called functions, can be defined in a variety of different places and be assigned many + different attributes and modifiers. + + Standalone functions that just sit within a module not attached to anything else are common, + but most functions will end up being inside impl blocks, either on another type itself, or + as a trait impl for that type. + + fn standalone_function() { + // code + } + + pub fn public_thing(argument: bool) -> String { + // code + # "".to_string() + } + + struct Thing { + foo: i32, + } + + impl Thing { + pub fn new() -> Self { + Self { + foo: 42, + } + } + } + + In addition to presenting fixed types in the form of fn name(arg: type, ..) -> return_type, + functions can also declare a list of type parameters along with trait bounds that they fall + into. + + fn generic_function(x: T) -> (T, T, T) { + (x.clone(), x.clone(), x.clone()) + } + + fn generic_where(x: T) -> T + where T: std::ops::Add + Copy + { + x + x + x + } + + Declaring trait bounds in the angle brackets is functionally identical to using a where + clause. It's up to the programmer to decide which works better in each situation, but where + tends to be better when things get longer than one line. + + Along with being made public via pub, fn can also have an extern added for use in + FFI. + + For more information on the various types of functions and how they're used, consult the Rust + book or the Reference."#]].assert_eq(&res); + } +} diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 7ada4f1be07f3..3f7f6885f611e 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -16,8 +16,7 @@ use crate::{ inlay_hints::AdjustmentHintsMode, moniker::{def_to_moniker, MonikerResult}, parent_module::crates_for, - Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig, - TryToNav, + Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav, }; /// A static representation of fully analyzed source code. @@ -137,8 +136,9 @@ impl StaticIndex<'_> { }); let hover_config = HoverConfig { links_in_hover: true, - documentation: Some(HoverDocFormat::Markdown), + documentation: true, keywords: true, + format: crate::HoverDocFormat::Markdown, }; let tokens = tokens.filter(|token| { matches!( diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index fd2f934f9fe28..c4d9ad7dff573 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -1393,7 +1393,8 @@ impl Config { pub fn hover(&self) -> HoverConfig { HoverConfig { links_in_hover: self.data.hover_links_enable, - documentation: self.data.hover_documentation_enable.then(|| { + documentation: self.data.hover_documentation_enable, + format: { let is_markdown = try_or_def!(self .caps .text_document @@ -1409,7 +1410,7 @@ impl Config { } else { HoverDocFormat::PlainText } - }), + }, keywords: self.data.hover_documentation_keywords_enable, } } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 33ca7810667fb..4e08bd0a724a2 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -936,8 +936,7 @@ pub(crate) fn handle_hover( let line_index = snap.file_line_index(file_range.file_id)?; let range = to_proto::range(&line_index, info.range); - let markup_kind = - snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind); + let markup_kind = snap.config.hover().format; let hover = lsp_ext::Hover { hover: lsp_types::Hover { contents: HoverContents::Markup(to_proto::markup_content( From a542bd46bf22a56dd7b519de515949fef6976b42 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 20 Jan 2023 16:30:08 +0100 Subject: [PATCH 079/501] Split out some hover functions --- crates/ide/src/hover.rs | 112 ++++++++++++++++++--------------- crates/ide/src/hover/render.rs | 108 ++++++++++++++++++++++--------- crates/ide/src/hover/tests.rs | 90 ++++++++++++++++++++++++++ 3 files changed, 230 insertions(+), 80 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index c46c1c1cd1e05..4a76ac9320d08 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -86,30 +86,38 @@ pub struct HoverResult { // image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[] pub(crate) fn hover( db: &RootDatabase, - file_range: FileRange, + frange @ FileRange { file_id, range }: FileRange, config: &HoverConfig, ) -> Option> { let sema = &hir::Semantics::new(db); - let mut res = hover_impl(sema, file_range, config)?; + let file = sema.parse(file_id).syntax().clone(); + let mut res = if range.is_empty() { + hover_simple(sema, FilePosition { file_id, offset: range.start() }, file, config) + } else { + hover_ranged(sema, frange, file, config) + }?; + if let HoverDocFormat::PlainText = config.format { res.info.markup = remove_markdown(res.info.markup.as_str()).into(); } Some(res) } -fn hover_impl( +fn hover_simple( sema: &Semantics<'_, RootDatabase>, - FileRange { file_id, range }: FileRange, + FilePosition { file_id, offset }: FilePosition, + file: SyntaxNode, config: &HoverConfig, ) -> Option> { - let file = sema.parse(file_id).syntax().clone(); - if !range.is_empty() { - return hover_ranged(&file, range, sema, config); - } - let offset = range.start(); - let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind { - IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4, + IDENT + | INT_NUMBER + | LIFETIME_IDENT + | T![self] + | T![super] + | T![crate] + | T![Self] + | T![_] => 4, // index and prefix ops T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3, kind if kind.is_keyword() => 2, @@ -142,19 +150,18 @@ fn hover_impl( } else { sema.descend_into_macros_with_same_text(original_token.clone()) }; + let descended = || descended.iter(); - // try lint hover - let result = descended - .iter() + let result = descended() + // try lint hover .find_map(|token| { // FIXME: Definition should include known lints and the like instead of having this special case here let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; render::try_for_lint(&attr, token) }) - // try item definitions + // try definitions .or_else(|| { - descended - .iter() + descended() .filter_map(|token| { let node = token.parent()?; let class = IdentClass::classify_token(sema, token)?; @@ -175,10 +182,12 @@ fn hover_impl( }) }) // try keywords - .or_else(|| descended.iter().find_map(|token| render::keyword(sema, config, token))) - // try rest item hover + .or_else(|| descended().find_map(|token| render::keyword(sema, config, token))) + // try _ hovers + .or_else(|| descended().find_map(|token| render::underscore(sema, config, token))) + // try rest pattern hover .or_else(|| { - descended.iter().find_map(|token| { + descended().find_map(|token| { if token.kind() != DOT2 { return None; } @@ -201,39 +210,13 @@ fn hover_impl( }) // fallback to type hover if there aren't any other suggestions // this finds its own range instead of using the closest token's range - .or_else(|| { - descended.iter().find_map(|token| hover_type_fallback(sema, config, token, token)) - }) -} - -pub(crate) fn hover_for_definition( - sema: &Semantics<'_, RootDatabase>, - file_id: FileId, - definition: Definition, - node: &SyntaxNode, - config: &HoverConfig, -) -> Option { - let famous_defs = match &definition { - Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())), - _ => None, - }; - render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| { - HoverResult { - markup: render::process_markup(sema.db, definition, &markup, config), - actions: show_implementations_action(sema.db, definition) - .into_iter() - .chain(show_fn_references_action(sema.db, definition)) - .chain(runnable_action(sema, definition, file_id)) - .chain(goto_type_action_for_def(sema.db, definition)) - .collect(), - } - }) + .or_else(|| descended().find_map(|token| hover_type_fallback(sema, config, token, token))) } fn hover_ranged( - file: &SyntaxNode, - range: syntax::TextRange, sema: &Semantics<'_, RootDatabase>, + FileRange { range, .. }: FileRange, + file: SyntaxNode, config: &HoverConfig, ) -> Option> { // FIXME: make this work in attributes @@ -248,7 +231,7 @@ fn hover_ranged( } _ => None, }; - let res = res.or_else(|| render::type_info(sema, config, &expr_or_pat)); + let res = res.or_else(|| render::type_info_of(sema, config, &expr_or_pat)); res.map(|it| { let range = match expr_or_pat { Either::Left(it) => it.syntax().text_range(), @@ -258,6 +241,33 @@ fn hover_ranged( }) } +pub(crate) fn hover_for_definition( + sema: &Semantics<'_, RootDatabase>, + file_id: FileId, + definition: Definition, + node: &SyntaxNode, + config: &HoverConfig, +) -> Option { + let famous_defs = match &definition { + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())), + _ => None, + }; + render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| { + HoverResult { + markup: render::process_markup(sema.db, definition, &markup, config), + actions: [ + show_implementations_action(sema.db, definition), + show_fn_references_action(sema.db, definition), + runnable_action(sema, definition, file_id), + goto_type_action_for_def(sema.db, definition), + ] + .into_iter() + .flatten() + .collect(), + } + }) +} + fn hover_type_fallback( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, @@ -282,7 +292,7 @@ fn hover_type_fallback( } }; - let res = render::type_info(sema, config, &expr_or_pat)?; + let res = render::type_info_of(sema, config, &expr_or_pat)?; let range = sema .original_range_opt(&node) diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index d7b6264957821..22611cfb892f8 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -29,7 +29,7 @@ use crate::{ HoverAction, HoverConfig, HoverResult, Markup, }; -pub(super) fn type_info( +pub(super) fn type_info_of( sema: &Semantics<'_, RootDatabase>, _config: &HoverConfig, expr_or_pat: &Either, @@ -38,34 +38,7 @@ pub(super) fn type_info( Either::Left(expr) => sema.type_of_expr(expr)?, Either::Right(pat) => sema.type_of_pat(pat)?, }; - - let mut res = HoverResult::default(); - let mut targets: Vec = Vec::new(); - let mut push_new_def = |item: hir::ModuleDef| { - if !targets.contains(&item) { - targets.push(item); - } - }; - walk_and_push_ty(sema.db, &original, &mut push_new_def); - - res.markup = if let Some(adjusted_ty) = adjusted { - walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def); - let original = original.display(sema.db).to_string(); - let adjusted = adjusted_ty.display(sema.db).to_string(); - let static_text_diff_len = "Coerced to: ".len() - "Type: ".len(); - format!( - "```text\nType: {:>apad$}\nCoerced to: {:>opad$}\n```\n", - original, - adjusted, - apad = static_text_diff_len + adjusted.len().max(original.len()), - opad = original.len(), - ) - .into() - } else { - Markup::fenced_block(&original.display(sema.db)) - }; - res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); - Some(res) + type_info(sema, _config, original, adjusted) } pub(super) fn try_expr( @@ -217,6 +190,48 @@ pub(super) fn deref_expr( Some(res) } +pub(super) fn underscore( + sema: &Semantics<'_, RootDatabase>, + config: &HoverConfig, + token: &SyntaxToken, +) -> Option { + if token.kind() != T![_] { + return None; + } + let parent = token.parent()?; + let _it = match_ast! { + match parent { + ast::InferType(it) => it, + ast::UnderscoreExpr(it) => return type_info_of(sema, config, &Either::Left(ast::Expr::UnderscoreExpr(it))), + ast::WildcardPat(it) => return type_info_of(sema, config, &Either::Right(ast::Pat::WildcardPat(it))), + _ => return None, + } + }; + // let it = infer_type.syntax().parent()?; + // match_ast! { + // match it { + // ast::LetStmt(_it) => (), + // ast::Param(_it) => (), + // ast::RetType(_it) => (), + // ast::TypeArg(_it) => (), + + // ast::CastExpr(_it) => (), + // ast::ParenType(_it) => (), + // ast::TupleType(_it) => (), + // ast::PtrType(_it) => (), + // ast::RefType(_it) => (), + // ast::ArrayType(_it) => (), + // ast::SliceType(_it) => (), + // ast::ForType(_it) => (), + // _ => return None, + // } + // } + + // FIXME: https://github.com/rust-lang/rust-analyzer/issues/11762, this currently always returns Unknown + // type_info(sema, config, sema.resolve_type(&ast::Type::InferType(it))?, None) + None +} + pub(super) fn keyword( sema: &Semantics<'_, RootDatabase>, config: &HoverConfig, @@ -458,6 +473,41 @@ pub(super) fn definition( markup(docs, label, mod_path) } +fn type_info( + sema: &Semantics<'_, RootDatabase>, + _config: &HoverConfig, + original: hir::Type, + adjusted: Option, +) -> Option { + let mut res = HoverResult::default(); + let mut targets: Vec = Vec::new(); + let mut push_new_def = |item: hir::ModuleDef| { + if !targets.contains(&item) { + targets.push(item); + } + }; + walk_and_push_ty(sema.db, &original, &mut push_new_def); + + res.markup = if let Some(adjusted_ty) = adjusted { + walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def); + let original = original.display(sema.db).to_string(); + let adjusted = adjusted_ty.display(sema.db).to_string(); + let static_text_diff_len = "Coerced to: ".len() - "Type: ".len(); + format!( + "```text\nType: {:>apad$}\nCoerced to: {:>opad$}\n```\n", + original, + adjusted, + apad = static_text_diff_len + adjusted.len().max(original.len()), + opad = original.len(), + ) + .into() + } else { + Markup::fenced_block(&original.display(sema.db)) + }; + res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets)); + Some(res) +} + fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option { let name = attr.name(db); let desc = format!("#[{name}]"); diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index db2aaddc0be8d..2930aab68a987 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -5592,3 +5592,93 @@ fn main() { "#]], ); } + +#[test] +fn hover_underscore_pat() { + check( + r#" +fn main() { + let _$0 = 0; +} +"#, + expect![[r#" + *_* + ```rust + i32 + ``` + "#]], + ); + check( + r#" +fn main() { + let (_$0,) = (0,); +} +"#, + expect![[r#" + *_* + ```rust + i32 + ``` + "#]], + ); +} + +#[test] +fn hover_underscore_expr() { + check( + r#" +fn main() { + _$0 = 0; +} +"#, + expect![[r#" + *_* + ```rust + i32 + ``` + "#]], + ); + check( + r#" +fn main() { + (_$0,) = (0,); +} +"#, + expect![[r#" + *_* + ```rust + i32 + ``` + "#]], + ); +} + +#[test] +fn hover_underscore_type() { + check( + r#" +fn main() { + let x: _$0 = 0; +} +"#, + expect![[r#" + *_* + ```rust + {unknown} + ``` + "#]], + ); + check( + r#" +fn main() { + let x: (_$0,) = (0,); +} +"#, + expect![[r#" + *_* + ```rust + {unknown} + ``` + "#]], + ); +} From 69ffbe25a1ed473678ac142ad5ce852f82d041c6 Mon Sep 17 00:00:00 2001 From: Alex Vasilev Date: Sat, 21 Jan 2023 00:17:23 +0530 Subject: [PATCH 080/501] feat: add braces assist --- crates/ide-assists/src/handlers/add_braces.rs | 157 ++++++++++++++++++ crates/ide-assists/src/lib.rs | 2 + 2 files changed, 159 insertions(+) create mode 100644 crates/ide-assists/src/handlers/add_braces.rs diff --git a/crates/ide-assists/src/handlers/add_braces.rs b/crates/ide-assists/src/handlers/add_braces.rs new file mode 100644 index 0000000000000..90bde1bab9b2b --- /dev/null +++ b/crates/ide-assists/src/handlers/add_braces.rs @@ -0,0 +1,157 @@ +use syntax::{ + ast::{self, edit::AstNodeEdit, make}, + AstNode, +}; + +use crate::{AssistContext, AssistId, AssistKind, Assists}; + +enum ParentType { + MatchArmExpr, + ClosureExpr, +} + +fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> { + if let Some(match_arm) = ctx.find_node_at_offset::() { + let match_arm_expr = match_arm.syntax().children().find_map(ast::Expr::cast)?; + + if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) { + return None; + } + + return Some((ParentType::MatchArmExpr, match_arm_expr)); + } else if let Some(closure_expr) = ctx.find_node_at_offset::() { + let body = closure_expr.body()?; + + if matches!(body, ast::Expr::BlockExpr(_)) { + return None; + } + + return Some((ParentType::ClosureExpr, body)); + } + + None +} + +// Assist: add_braces +// +// Adds braces to lamda and match arm expressions +// +// ``` +// fn foo(n: i32) -> i32 { +// match n { +// 1 =>$0 n + 1, +// _ => 0 +// } +// } +// ``` +// -> +// ``` +// fn foo(n: i32) -> i32 { +// match n { +// 1 => { +// n + 1 +// }, +// _ => 0 +// } +// } +// ``` +pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let (expr_type, expr) = get_replacement_node(ctx)?; + + acc.add( + AssistId("wrap_with_braces", AssistKind::RefactorRewrite), + match expr_type { + ParentType::ClosureExpr => "Add braces to lamda expression", + ParentType::MatchArmExpr => "Add braces to arm expression", + }, + expr.syntax().text_range(), + |builder| { + let block_expr = AstNodeEdit::indent( + &make::block_expr(None, Some(expr.clone())), + AstNodeEdit::indent_level(&expr), + ); + + builder.replace(expr.syntax().text_range(), block_expr.syntax().text()); + }, + ); + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn suggest_add_braces_for_closure() { + check_assist( + add_braces, + r#" +fn foo() { + t(|n|$0 n + 100); +} +"#, + r#" +fn foo() { + t(|n| { + n + 100 + }); +} +"#, + ); + } + + #[test] + fn no_assist_for_closures_with_braces() { + check_assist_not_applicable( + add_braces, + r#" +fn foo() { + t(|n|$0 { n + 100 }); +} +"#, + ); + } + + #[test] + fn suggest_add_braces_for_match() { + check_assist( + add_braces, + r#" +fn foo() { + match n { + Some(n) $0=> 29, + _ => () + }; +} +"#, + r#" +fn foo() { + match n { + Some(n) => { + 29 + }, + _ => () + }; +} +"#, + ); + } + + #[test] + fn no_assist_for_match_with_braces() { + check_assist_not_applicable( + add_braces, + r#" +fn foo() { + match n { + Some(n) $0=> { return 29; }, + _ => () + }; +} +"#, + ); + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 546ef96260f2c..276cf5f5dd018 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -106,6 +106,7 @@ mod handlers { pub(crate) type Handler = fn(&mut Assists, &AssistContext<'_>) -> Option<()>; + mod add_braces; mod add_explicit_type; mod add_label_to_loop; mod add_lifetime_to_type; @@ -209,6 +210,7 @@ mod handlers { pub(crate) fn all() -> &'static [Handler] { &[ // These are alphabetic for the foolish consistency + add_braces::add_braces, add_explicit_type::add_explicit_type, add_label_to_loop::add_label_to_loop, add_missing_match_arms::add_missing_match_arms, From 2901e2803fa5d7f1e0bd084d0f3265a0250f6f32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 20 Jan 2023 20:54:37 +0200 Subject: [PATCH 081/501] Bump anyhow --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25c33bb653b4a..654853bcc3c81 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -37,9 +37,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.65" +version = "1.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" +checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" [[package]] name = "anymap" From 5691c20bb4887e0b22cd795dca22ef1888ef05fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 20 Jan 2023 20:57:27 +0200 Subject: [PATCH 082/501] Bump serde and serde_json --- Cargo.lock | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 654853bcc3c81..258e202cc7ee7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1563,18 +1563,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.145" +version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" +checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.145" +version = "1.0.152" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" +checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" dependencies = [ "proc-macro2", "quote", @@ -1583,9 +1583,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.86" +version = "1.0.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" +checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" dependencies = [ "indexmap", "itoa", @@ -1660,9 +1660,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.102" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" dependencies = [ "proc-macro2", "quote", From ddb2f8760b9834e11c9021637d2a214c7f1fd8d5 Mon Sep 17 00:00:00 2001 From: Alex Vasilev Date: Sat, 21 Jan 2023 00:30:38 +0530 Subject: [PATCH 083/501] typo fix --- crates/ide-assists/src/handlers/add_braces.rs | 58 +++++++++---------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/crates/ide-assists/src/handlers/add_braces.rs b/crates/ide-assists/src/handlers/add_braces.rs index 90bde1bab9b2b..f6213a6a88ece 100644 --- a/crates/ide-assists/src/handlers/add_braces.rs +++ b/crates/ide-assists/src/handlers/add_braces.rs @@ -5,36 +5,9 @@ use syntax::{ use crate::{AssistContext, AssistId, AssistKind, Assists}; -enum ParentType { - MatchArmExpr, - ClosureExpr, -} - -fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> { - if let Some(match_arm) = ctx.find_node_at_offset::() { - let match_arm_expr = match_arm.syntax().children().find_map(ast::Expr::cast)?; - - if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) { - return None; - } - - return Some((ParentType::MatchArmExpr, match_arm_expr)); - } else if let Some(closure_expr) = ctx.find_node_at_offset::() { - let body = closure_expr.body()?; - - if matches!(body, ast::Expr::BlockExpr(_)) { - return None; - } - - return Some((ParentType::ClosureExpr, body)); - } - - None -} - // Assist: add_braces // -// Adds braces to lamda and match arm expressions +// Adds braces to lambda and match arm expressions. // // ``` // fn foo(n: i32) -> i32 { @@ -61,7 +34,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( acc.add( AssistId("wrap_with_braces", AssistKind::RefactorRewrite), match expr_type { - ParentType::ClosureExpr => "Add braces to lamda expression", + ParentType::ClosureExpr => "Add braces to lambda expression", ParentType::MatchArmExpr => "Add braces to arm expression", }, expr.syntax().text_range(), @@ -78,6 +51,33 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( Some(()) } +enum ParentType { + MatchArmExpr, + ClosureExpr, +} + +fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> { + if let Some(match_arm) = ctx.find_node_at_offset::() { + let match_arm_expr = match_arm.syntax().children().find_map(ast::Expr::cast)?; + + if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) { + return None; + } + + return Some((ParentType::MatchArmExpr, match_arm_expr)); + } else if let Some(closure_expr) = ctx.find_node_at_offset::() { + let body = closure_expr.body()?; + + if matches!(body, ast::Expr::BlockExpr(_)) { + return None; + } + + return Some((ParentType::ClosureExpr, body)); + } + + None +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; From 1ab58b190e03534ec13bbe0828e69a66517eaf9a Mon Sep 17 00:00:00 2001 From: Alex Vasilev Date: Sat, 21 Jan 2023 00:42:29 +0530 Subject: [PATCH 084/501] cargo test --- crates/ide-assists/src/tests/generated.rs | 25 +++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 16a06b60de901..8a25e1f648ae0 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -2,6 +2,31 @@ use super::check_doc_test; +#[test] +fn doctest_add_braces() { + check_doc_test( + "add_braces", + r#####" +fn foo(n: i32) -> i32 { + match n { + 1 =>$0 n + 1, + _ => 0 + } +} +"#####, + r#####" +fn foo(n: i32) -> i32 { + match n { + 1 => { + n + 1 + }, + _ => 0 + } +} +"#####, + ) +} + #[test] fn doctest_add_explicit_type() { check_doc_test( From 10c868686df84794dc449972d173a29d53b1b116 Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Thu, 19 Jan 2023 21:45:38 +0100 Subject: [PATCH 085/501] fix overlapping spans for `clippy::uninlined_format_args` --- clippy_lints/src/format_args.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/clippy_lints/src/format_args.rs b/clippy_lints/src/format_args.rs index 043112bbc9596..bb7fa3087b748 100644 --- a/clippy_lints/src/format_args.rs +++ b/clippy_lints/src/format_args.rs @@ -311,6 +311,10 @@ fn check_uninlined_args( // in those cases, make the code suggestion hidden let multiline_fix = fixes.iter().any(|(span, _)| cx.sess().source_map().is_multiline(*span)); + // Suggest removing each argument only once, for example in `format!("{0} {0}", arg)`. + fixes.sort_unstable_by_key(|(span, _)| *span); + fixes.dedup_by_key(|(span, _)| *span); + span_lint_and_then( cx, UNINLINED_FORMAT_ARGS, From dd9dcca7a2c6e7b15466265b6b6fd88336bfef59 Mon Sep 17 00:00:00 2001 From: Alex Vasilev Date: Sat, 21 Jan 2023 00:47:02 +0530 Subject: [PATCH 086/501] assist id fix --- crates/ide-assists/src/handlers/add_braces.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/add_braces.rs b/crates/ide-assists/src/handlers/add_braces.rs index f6213a6a88ece..ccdac3074e605 100644 --- a/crates/ide-assists/src/handlers/add_braces.rs +++ b/crates/ide-assists/src/handlers/add_braces.rs @@ -32,7 +32,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let (expr_type, expr) = get_replacement_node(ctx)?; acc.add( - AssistId("wrap_with_braces", AssistKind::RefactorRewrite), + AssistId("add_braces", AssistKind::RefactorRewrite), match expr_type { ParentType::ClosureExpr => "Add braces to lambda expression", ParentType::MatchArmExpr => "Add braces to arm expression", From 4685b97f747c01e22e9d6750948621f2851b180f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 20 Jan 2023 16:36:24 +0100 Subject: [PATCH 087/501] Remove hover fallback in favor of ranged hover --- crates/ide/src/hover.rs | 54 ++++-------------- crates/ide/src/hover/tests.rs | 104 ++++++---------------------------- 2 files changed, 27 insertions(+), 131 deletions(-) diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 4a76ac9320d08..2058a4f5f190a 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -15,7 +15,7 @@ use ide_db::{ FxIndexSet, RootDatabase, }; use itertools::Itertools; -use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T}; +use syntax::{ast, AstNode, SyntaxKind::*, SyntaxNode, T}; use crate::{ doc_links::token_as_doc_comment, @@ -203,14 +203,10 @@ fn hover_simple( }) }); - result - .map(|mut res: HoverResult| { - res.actions = dedupe_or_merge_hover_actions(res.actions); - RangeInfo::new(original_token.text_range(), res) - }) - // fallback to type hover if there aren't any other suggestions - // this finds its own range instead of using the closest token's range - .or_else(|| descended().find_map(|token| hover_type_fallback(sema, config, token, token))) + result.map(|mut res: HoverResult| { + res.actions = dedupe_or_merge_hover_actions(res.actions); + RangeInfo::new(original_token.text_range(), res) + }) } fn hover_ranged( @@ -220,8 +216,11 @@ fn hover_ranged( config: &HoverConfig, ) -> Option> { // FIXME: make this work in attributes - let expr_or_pat = - file.covering_element(range).ancestors().find_map(Either::::cast)?; + let expr_or_pat = file + .covering_element(range) + .ancestors() + .take_while(|it| ast::MacroCall::can_cast(it.kind()) || !ast::Item::can_cast(it.kind())) + .find_map(Either::::cast)?; let res = match &expr_or_pat { Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr), Either::Left(ast::Expr::PrefixExpr(prefix_expr)) @@ -268,39 +267,6 @@ pub(crate) fn hover_for_definition( }) } -fn hover_type_fallback( - sema: &Semantics<'_, RootDatabase>, - config: &HoverConfig, - token: &SyntaxToken, - original_token: &SyntaxToken, -) -> Option> { - let node = - token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| { - ast::Expr::can_cast(n.kind()) - || ast::Pat::can_cast(n.kind()) - || ast::Type::can_cast(n.kind()) - })?; - - let expr_or_pat = match_ast! { - match node { - ast::Expr(it) => Either::Left(it), - ast::Pat(it) => Either::Right(it), - // If this node is a MACRO_CALL, it means that `descend_into_macros_many` failed to resolve. - // (e.g expanding a builtin macro). So we give up here. - ast::MacroCall(_it) => return None, - _ => return None, - } - }; - - let res = render::type_info_of(sema, config, &expr_or_pat)?; - - let range = sema - .original_range_opt(&node) - .map(|frange| frange.range) - .unwrap_or_else(|| original_token.text_range()); - Some(RangeInfo::new(range, res)) -} - fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option { fn to_action(nav_target: NavigationTarget) -> HoverAction { HoverAction::Implementation(FilePosition { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 2930aab68a987..2830212add8eb 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -213,25 +213,6 @@ m!(ab$0c); ); } -#[test] -fn hover_shows_type_of_an_expression() { - check( - r#" -pub fn foo() -> u32 { 1 } - -fn main() { - let foo_test = foo()$0; -} -"#, - expect![[r#" - *foo()* - ```rust - u32 - ``` - "#]], - ); -} - #[test] fn hover_remove_markdown_if_configured() { check_hover_no_markdown( @@ -239,12 +220,14 @@ fn hover_remove_markdown_if_configured() { pub fn foo() -> u32 { 1 } fn main() { - let foo_test = foo()$0; + let foo_test = foo$0(); } "#, expect![[r#" - *foo()* - u32 + *foo* + test + + pub fn foo() -> u32 "#]], ); } @@ -304,33 +287,6 @@ fn main() { let foo_test = fo$0o(); } "#]], ); - // Multiple candidates but results are ambiguous. - check( - r#" -//- /a.rs -pub fn foo() -> u32 { 1 } - -//- /b.rs -pub fn foo() -> &str { "" } - -//- /c.rs -pub fn foo(a: u32, b: u32) {} - -//- /main.rs -mod a; -mod b; -mod c; - -fn main() { let foo_test = fo$0o(); } - "#, - expect![[r#" - *foo* - ```rust - {unknown} - ``` - "#]], - ); - // Use literal `crate` in path check( r#" @@ -1194,33 +1150,19 @@ fn test_hover_through_func_in_macro_recursive() { macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } } macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } } fn bar() -> u32 { 0 } -fn foo() { let a = id!([0u32, bar($0)] ); } +fn foo() { let a = id!([0u32, bar$0()] ); } "#, expect![[r#" - *bar()* - ```rust - u32 - ``` - "#]], - ); -} + *bar* -#[test] -fn test_hover_through_literal_string_in_macro() { - check( - r#" -macro_rules! arr { ($($tt:tt)*) => { [$($tt)*] } } -fn foo() { - let mastered_for_itunes = ""; - let _ = arr!("Tr$0acks", &mastered_for_itunes); -} -"#, - expect![[r#" - *"Tracks"* - ```rust - &str - ``` - "#]], + ```rust + test + ``` + + ```rust + fn bar() -> u32 + ``` + "#]], ); } @@ -5655,30 +5597,18 @@ fn main() { #[test] fn hover_underscore_type() { - check( + check_hover_no_result( r#" fn main() { let x: _$0 = 0; } "#, - expect![[r#" - *_* - ```rust - {unknown} - ``` - "#]], ); - check( + check_hover_no_result( r#" fn main() { let x: (_$0,) = (0,); } "#, - expect![[r#" - *_* - ```rust - {unknown} - ``` - "#]], ); } From 7198cd04ac14a31fc9e401e6a4e411e401acc326 Mon Sep 17 00:00:00 2001 From: OmarTawfik <15987992+OmarTawfik@users.noreply.github.com> Date: Fri, 20 Jan 2023 13:22:11 -0800 Subject: [PATCH 088/501] allow using vscode variables in `config.serverPath` --- editors/code/src/bootstrap.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/editors/code/src/bootstrap.ts b/editors/code/src/bootstrap.ts index 374c3b8144c38..cabc740717216 100644 --- a/editors/code/src/bootstrap.ts +++ b/editors/code/src/bootstrap.ts @@ -1,6 +1,6 @@ import * as vscode from "vscode"; import * as os from "os"; -import { Config } from "./config"; +import { Config, substituteVSCodeVariables } from "./config"; import { log, isValidExecutable } from "./util"; import { PersistentState } from "./persistent_state"; import { exec } from "child_process"; @@ -132,7 +132,7 @@ async function getServer( return undefined; } function serverPath(config: Config): string | null { - return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath; + return process.env.__RA_LSP_SERVER_DEBUG ?? substituteVSCodeVariables(config.serverPath); } async function isNixOs(): Promise { From 9e63b9ba101f28b6fd84dbb7ed799ee20a660d70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 08:44:25 +0200 Subject: [PATCH 089/501] Bump memmap2 --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 258e202cc7ee7..676207d68ea10 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -972,9 +972,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498" +checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" dependencies = [ "libc", ] From 938aa9d0bce93e1b52dcf02b82ce103e99adc60c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 08:45:47 +0200 Subject: [PATCH 090/501] Bump semver --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 676207d68ea10..63e6ddb6e7507 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1554,9 +1554,9 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "semver" -version = "1.0.14" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" +checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" dependencies = [ "serde", ] From 392a6ee422f5f89fb2c1a23d4e306f71850a05f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 08:48:46 +0200 Subject: [PATCH 091/501] Bump once_cell --- Cargo.lock | 4 ++-- crates/hir-def/Cargo.toml | 2 +- crates/hir-ty/Cargo.toml | 2 +- crates/hir/Cargo.toml | 2 +- crates/ide-completion/Cargo.toml | 2 +- crates/ide-db/Cargo.toml | 4 ++-- crates/intern/Cargo.toml | 2 +- crates/profile/Cargo.toml | 2 +- crates/syntax/Cargo.toml | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 63e6ddb6e7507..393b2cb6ccf26 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1078,9 +1078,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.15.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" +checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" [[package]] name = "oorandom" diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 094111424e4f5..1daf0428c242f 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -25,7 +25,7 @@ hashbrown = { version = "0.12.1", default-features = false } indexmap = "1.9.1" itertools = "0.10.5" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } -once_cell = "1.15.0" +once_cell = "1.17.0" rustc-hash = "1.1.0" smallvec = "1.10.0" tracing = "0.1.35" diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index f16c0244e3595..8b762bf829bcf 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -26,7 +26,7 @@ chalk-ir = "0.88.0" chalk-recursive = { version = "0.88.0", default-features = false } chalk-derive = "0.88.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } -once_cell = "1.15.0" +once_cell = "1.17.0" typed-arena = "2.0.1" rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false } diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 154794b3b4524..32cde8a77325c 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -17,7 +17,7 @@ either = "1.7.0" arrayvec = "0.7.2" itertools = "0.10.5" smallvec = "1.10.0" -once_cell = "1.15.0" +once_cell = "1.17.0" # local deps base-db.workspace = true diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index d0d0de09660e9..34ef092cfc44c 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -15,7 +15,7 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.5" -once_cell = "1.15.0" +once_cell = "1.17.0" smallvec = "1.10.0" diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index 539873a198d5b..bedf5ffd9347f 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -17,7 +17,7 @@ tracing = "0.1.35" rayon = "1.5.3" fst = { version = "0.4.7", default-features = false } rustc-hash = "1.1.0" -once_cell = "1.15.0" +once_cell = "1.17.0" either = "1.7.0" itertools = "0.10.5" arrayvec = "0.7.2" @@ -31,7 +31,7 @@ parser.workspace = true profile.workspace = true stdx.workspace = true syntax.workspace = true -text-edit .workspace = true +text-edit.workspace = true # ide should depend only on the top-level `hir` package. if you need # something from some `hir-xxx` subpackage, reexport the API via `hir`. hir.workspace = true diff --git a/crates/intern/Cargo.toml b/crates/intern/Cargo.toml index 6fd35c429c4cb..c73c368a14e0c 100644 --- a/crates/intern/Cargo.toml +++ b/crates/intern/Cargo.toml @@ -16,5 +16,5 @@ doctest = false # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.4.0", features = ["raw-api"] } hashbrown = { version = "0.12.1", default-features = false } -once_cell = "1.15.0" +once_cell = "1.17.0" rustc-hash = "1.1.0" diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml index a54becf29a782..6273ea51db839 100644 --- a/crates/profile/Cargo.toml +++ b/crates/profile/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -once_cell = "1.15.0" +once_cell = "1.17.0" cfg-if = "1.0.0" libc = "0.2.135" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 9f16eb87b99b8..26e5acebe0fe9 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -18,7 +18,7 @@ itertools = "0.10.5" rowan = "0.15.10" rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" -once_cell = "1.15.0" +once_cell = "1.17.0" indexmap = "1.9.1" smol_str = "0.1.23" From d26c8ccc89a1b40fe99714bca2b1f08fa433f440 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 08:49:50 +0200 Subject: [PATCH 092/501] Bump num_cpus --- Cargo.lock | 17 +++++++++++++---- crates/rust-analyzer/Cargo.toml | 2 +- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 393b2cb6ccf26..1afcc92b04dfc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,7 +65,7 @@ version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ - "hermit-abi", + "hermit-abi 0.1.19", "libc", "winapi", ] @@ -478,6 +478,15 @@ dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + [[package]] name = "hir" version = "0.0.0" @@ -1059,11 +1068,11 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.13.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" dependencies = [ - "hermit-abi", + "hermit-abi 0.2.6", "libc", ] diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index b8e15519ea01c..c4d055252d3d6 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -33,7 +33,7 @@ serde = { version = "1.0.137", features = ["derive"] } serde_json = { version = "1.0.81", features = ["preserve_order"] } threadpool = "1.8.1" rayon = "1.5.3" -num_cpus = "1.13.1" +num_cpus = "1.15.0" mimalloc = { version = "0.1.30", default-features = false, optional = true } lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" } tracing = "0.1.35" From da85d43f02de4519a85295d1673514a75632b635 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 08:50:34 +0200 Subject: [PATCH 093/501] Bump dissimilar --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1afcc92b04dfc..a91a3866c2ddd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -332,9 +332,9 @@ dependencies = [ [[package]] name = "dissimilar" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5" +checksum = "210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e" [[package]] name = "dot" From 047bc6f0370e4407a344481f84dff57a81ab3dd3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:31:38 +0200 Subject: [PATCH 094/501] Bump tracing-tree --- Cargo.lock | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a91a3866c2ddd..a57d7824da165 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,15 +26,6 @@ dependencies = [ "log", ] -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - [[package]] name = "anyhow" version = "1.0.68" @@ -1066,6 +1057,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num_cpus" version = "1.15.0" @@ -1097,6 +1098,12 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "parking_lot" version = "0.11.2" @@ -1921,12 +1928,12 @@ dependencies = [ [[package]] name = "tracing-tree" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453" +checksum = "758e983ab7c54fee18403994507e7f212b9005e957ce7984996fac8d11facedb" dependencies = [ - "ansi_term", "atty", + "nu-ansi-term", "tracing-core", "tracing-log", "tracing-subscriber", From 84c38d0456c819f0699f4cf078645bce546421a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:33:36 +0200 Subject: [PATCH 095/501] Bump scoped-tls --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a57d7824da165..7703f3965e175 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1558,9 +1558,9 @@ dependencies = [ [[package]] name = "scoped-tls" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" [[package]] name = "scopeguard" From 1dd1f41ef91c2286ebde82a9f147753228bdbb1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:33:52 +0200 Subject: [PATCH 096/501] Bump typed-arena --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7703f3965e175..afcb6b46f4b91 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1949,9 +1949,9 @@ dependencies = [ [[package]] name = "typed-arena" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae" +checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" [[package]] name = "ungrammar" From ee4710d922bbe073972248dbca0eedbdc83aea0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:35:28 +0200 Subject: [PATCH 097/501] Bump cargo_metadata --- Cargo.lock | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index afcb6b46f4b91..ed11202bd6de2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -129,15 +129,16 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36" +checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a" dependencies = [ "camino", "cargo-platform", "semver", "serde", "serde_json", + "thiserror", ] [[package]] From 703ff60d9f885dfe317e3d271d9f341509efac92 Mon Sep 17 00:00:00 2001 From: Lukas Bergdoll Date: Sat, 21 Jan 2023 10:17:06 +0100 Subject: [PATCH 098/501] Use NonNull in merge_sort This is more clear about the intent of the pointer and avoids problems if the allocation returns a null pointer. --- library/core/src/slice/sort.rs | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index 2181f9a811855..7f8895b150fe7 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -1203,7 +1203,7 @@ pub fn merge_sort( // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run, // which will always have length at most `len / 2`. let buf = BufGuard::new(len / 2, elem_alloc_fn, elem_dealloc_fn); - let buf_ptr = buf.buf_ptr; + let buf_ptr = buf.buf_ptr.as_ptr(); let mut runs = RunVec::new(run_alloc_fn, run_dealloc_fn); @@ -1298,7 +1298,7 @@ pub fn merge_sort( where ElemDeallocF: Fn(*mut T, usize), { - buf_ptr: *mut T, + buf_ptr: ptr::NonNull, capacity: usize, elem_dealloc_fn: ElemDeallocF, } @@ -1315,7 +1315,11 @@ pub fn merge_sort( where ElemAllocF: Fn(usize) -> *mut T, { - Self { buf_ptr: elem_alloc_fn(len), capacity: len, elem_dealloc_fn } + Self { + buf_ptr: ptr::NonNull::new(elem_alloc_fn(len)).unwrap(), + capacity: len, + elem_dealloc_fn, + } } } @@ -1324,7 +1328,7 @@ pub fn merge_sort( ElemDeallocF: Fn(*mut T, usize), { fn drop(&mut self) { - (self.elem_dealloc_fn)(self.buf_ptr, self.capacity); + (self.elem_dealloc_fn)(self.buf_ptr.as_ptr(), self.capacity); } } @@ -1333,7 +1337,7 @@ pub fn merge_sort( RunAllocF: Fn(usize) -> *mut TimSortRun, RunDeallocF: Fn(*mut TimSortRun, usize), { - buf_ptr: *mut TimSortRun, + buf_ptr: ptr::NonNull, capacity: usize, len: usize, run_alloc_fn: RunAllocF, @@ -1350,7 +1354,7 @@ pub fn merge_sort( const START_RUN_CAPACITY: usize = 16; Self { - buf_ptr: run_alloc_fn(START_RUN_CAPACITY), + buf_ptr: ptr::NonNull::new(run_alloc_fn(START_RUN_CAPACITY)).unwrap(), capacity: START_RUN_CAPACITY, len: 0, run_alloc_fn, @@ -1361,15 +1365,15 @@ pub fn merge_sort( fn push(&mut self, val: TimSortRun) { if self.len == self.capacity { let old_capacity = self.capacity; - let old_buf_ptr = self.buf_ptr; + let old_buf_ptr = self.buf_ptr.as_ptr(); self.capacity = self.capacity * 2; - self.buf_ptr = (self.run_alloc_fn)(self.capacity); + self.buf_ptr = ptr::NonNull::new((self.run_alloc_fn)(self.capacity)).unwrap(); // SAFETY: buf_ptr new and old were correctly allocated and old_buf_ptr has // old_capacity valid elements. unsafe { - ptr::copy_nonoverlapping(old_buf_ptr, self.buf_ptr, old_capacity); + ptr::copy_nonoverlapping(old_buf_ptr, self.buf_ptr.as_ptr(), old_capacity); } (self.run_dealloc_fn)(old_buf_ptr, old_capacity); @@ -1377,7 +1381,7 @@ pub fn merge_sort( // SAFETY: The invariant was just checked. unsafe { - self.buf_ptr.add(self.len).write(val); + self.buf_ptr.as_ptr().add(self.len).write(val); } self.len += 1; } @@ -1390,7 +1394,7 @@ pub fn merge_sort( // SAFETY: buf_ptr needs to be valid and len invariant upheld. unsafe { // the place we are taking from. - let ptr = self.buf_ptr.add(index); + let ptr = self.buf_ptr.as_ptr().add(index); // Shift everything down to fill in that spot. ptr::copy(ptr.add(1), ptr, self.len - index - 1); @@ -1400,7 +1404,7 @@ pub fn merge_sort( fn as_slice(&self) -> &[TimSortRun] { // SAFETY: Safe as long as buf_ptr is valid and len invariant was upheld. - unsafe { &*ptr::slice_from_raw_parts(self.buf_ptr, self.len) } + unsafe { &*ptr::slice_from_raw_parts(self.buf_ptr.as_ptr(), self.len) } } fn len(&self) -> usize { @@ -1419,7 +1423,7 @@ pub fn merge_sort( if index < self.len { // SAFETY: buf_ptr and len invariant must be upheld. unsafe { - return &*(self.buf_ptr.add(index)); + return &*(self.buf_ptr.as_ptr().add(index)); } } @@ -1436,7 +1440,7 @@ pub fn merge_sort( if index < self.len { // SAFETY: buf_ptr and len invariant must be upheld. unsafe { - return &mut *(self.buf_ptr.add(index)); + return &mut *(self.buf_ptr.as_ptr().add(index)); } } @@ -1452,7 +1456,7 @@ pub fn merge_sort( fn drop(&mut self) { // As long as TimSortRun is Copy we don't need to drop them individually but just the // whole allocation. - (self.run_dealloc_fn)(self.buf_ptr, self.capacity); + (self.run_dealloc_fn)(self.buf_ptr.as_ptr(), self.capacity); } } } From f2397638f5b48ae99fdc2ece1f347f213e547577 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:39:37 +0200 Subject: [PATCH 099/501] Bump arbitrary and derive-arbitrary --- Cargo.lock | 8 ++++---- crates/cfg/Cargo.toml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ed11202bd6de2..7e7ac2e7e1e97 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -40,9 +40,9 @@ checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72" [[package]] name = "arbitrary" -version = "1.1.7" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86fd10d912cab78764cc44307d9cd5f164e09abbeb87fb19fb6d95937e8da5f" +checksum = "b0224938f92e7aef515fac2ff2d18bd1115c1394ddf4a092e0c87e8be9499ee5" [[package]] name = "arrayvec" @@ -313,9 +313,9 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.1.6" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "226ad66541d865d7a7173ad6a9e691c33fdb910ac723f4bc734b3e5294a1f931" +checksum = "cf460bbff5f571bfc762da5102729f59f338be7db17a21fade44c5c4f5005350" dependencies = [ "proc-macro2", "quote", diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index 9091c40d1cce8..0880bc239d83f 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -23,8 +23,8 @@ oorandom = "11.1.3" # We depend on both individually instead of using `features = ["derive"]` to microoptimize the # build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr` # supports `arbitrary`. This way, we avoid feature unification. -arbitrary = "1.1.7" -derive_arbitrary = "1.1.6" +arbitrary = "1.2.2" +derive_arbitrary = "1.2.2" # local deps mbe.workspace = true From 452e1e54ef5b9b58e44e7e66f9f72fc794eafa8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:43:54 +0200 Subject: [PATCH 100/501] Bump backtrace --- Cargo.lock | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7e7ac2e7e1e97..0c4014a34bbc5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.17.0" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" dependencies = [ "gimli", ] @@ -69,16 +69,16 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.66" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" dependencies = [ "addr2line", "cc", "cfg-if", "libc", - "miniz_oxide", - "object", + "miniz_oxide 0.6.2", + "object 0.30.2", "rustc-demangle", ] @@ -390,7 +390,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.5.4", ] [[package]] @@ -442,9 +442,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a" [[package]] name = "gimli" -version = "0.26.2" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" +checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" [[package]] name = "hashbrown" @@ -1007,6 +1007,15 @@ dependencies = [ "adler", ] +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + [[package]] name = "mio" version = "0.8.4" @@ -1087,6 +1096,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "object" +version = "0.30.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83" +dependencies = [ + "memchr", +] + [[package]] name = "once_cell" version = "1.17.0" @@ -1220,7 +1238,7 @@ name = "proc-macro-api" version = "0.0.0" dependencies = [ "memmap2", - "object", + "object 0.29.0", "paths", "profile", "serde", @@ -1239,7 +1257,7 @@ dependencies = [ "libloading", "mbe", "memmap2", - "object", + "object 0.29.0", "paths", "proc-macro-api", "proc-macro-test", From 11e4baba1ac803f4ae460af06d2903914faa70eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:46:31 +0200 Subject: [PATCH 101/501] Bump indexmap --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c4014a34bbc5..3e2ceee2143ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -771,9 +771,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.9.1" +version = "1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" +checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" dependencies = [ "autocfg", "hashbrown", From 25a5bd9b13b59fa81b2b47621c667f2e09af7351 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:48:38 +0200 Subject: [PATCH 102/501] Bump object --- Cargo.lock | 15 +++------------ crates/proc-macro-api/Cargo.toml | 2 +- crates/proc-macro-srv/Cargo.toml | 2 +- 3 files changed, 5 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3e2ceee2143ee..e7afad5f15399 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -78,7 +78,7 @@ dependencies = [ "cfg-if", "libc", "miniz_oxide 0.6.2", - "object 0.30.2", + "object", "rustc-demangle", ] @@ -1087,15 +1087,6 @@ dependencies = [ "libc", ] -[[package]] -name = "object" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" -dependencies = [ - "memchr", -] - [[package]] name = "object" version = "0.30.2" @@ -1238,7 +1229,7 @@ name = "proc-macro-api" version = "0.0.0" dependencies = [ "memmap2", - "object 0.29.0", + "object", "paths", "profile", "serde", @@ -1257,7 +1248,7 @@ dependencies = [ "libloading", "mbe", "memmap2", - "object 0.29.0", + "object", "paths", "proc-macro-api", "proc-macro-test", diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 21e524b3c17f2..d852f65c70ada 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -object = { version = "0.29.0", default-features = false, features = [ +object = { version = "0.30.2", default-features = false, features = [ "std", "read_core", "elf", diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index 5b3d6c8eca3bf..f7f07cfcb2e27 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -object = { version = "0.29.0", default-features = false, features = [ +object = { version = "0.30.2", default-features = false, features = [ "std", "read_core", "elf", From e90b0b65047431c4d8f7480da231a10ddc044d90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:49:17 +0200 Subject: [PATCH 103/501] Bump quote --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e7afad5f15399..c4d4e44c0d5df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1361,9 +1361,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.21" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" +checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" dependencies = [ "proc-macro2", ] From ff0b15cafac25defca7d2cd6b4773b1c18e0a194 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:49:51 +0200 Subject: [PATCH 104/501] Bump xflags --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c4d4e44c0d5df..f7e377fd89e3c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2215,18 +2215,18 @@ checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3" [[package]] name = "xflags" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf19f5031a1a812e96fede16f8161218883079946cea87619d3613db1efd268" +checksum = "c4554b580522d0ca238369c16b8f6ce34524d61dafe7244993754bbd05f2c2ea" dependencies = [ "xflags-macros", ] [[package]] name = "xflags-macros" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809" +checksum = "f58e7b3ca8977093aae6b87b6a7730216fc4c53a6530bab5c43a783cd810c1a8" [[package]] name = "xshell" From 11652799054619e7a5e7c4def686343e173edb02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:50:08 +0200 Subject: [PATCH 105/501] Bump xshell --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f7e377fd89e3c..317bdda919c8e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2230,18 +2230,18 @@ checksum = "f58e7b3ca8977093aae6b87b6a7730216fc4c53a6530bab5c43a783cd810c1a8" [[package]] name = "xshell" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981" +checksum = "962c039b3a7b16cf4e9a4248397c6585c07547412e7d6a6e035389a802dcfe90" dependencies = [ "xshell-macros", ] [[package]] name = "xshell-macros" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a" +checksum = "1dbabb1cbd15a1d6d12d9ed6b35cc6777d4af87ab3ba155ea37215f20beab80c" [[package]] name = "xtask" From f9598522fbe86c0bbeadc14f735adec54404074a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:51:35 +0200 Subject: [PATCH 106/501] Bump libloading --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 317bdda919c8e..6d8694759eda1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -877,9 +877,9 @@ checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" [[package]] name = "libloading" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ "cfg-if", "winapi", From 7bbe4c299ca48f1d32fb48fa745c1bf23f494055 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:52:19 +0200 Subject: [PATCH 107/501] Bump proc-macro2 --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6d8694759eda1..af1d92168aa92 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1277,9 +1277,9 @@ version = "0.0.0" [[package]] name = "proc-macro2" -version = "1.0.47" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" +checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" dependencies = [ "unicode-ident", ] From c5a182c56b64c97da0984403fa444685fe63032b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:53:13 +0200 Subject: [PATCH 108/501] Bump mimalloc --- Cargo.lock | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index af1d92168aa92..af13703db5c42 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -887,11 +887,12 @@ dependencies = [ [[package]] name = "libmimalloc-sys" -version = "0.1.26" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc093ab289b0bfda3aa1bdfab9c9542be29c7ef385cfcbe77f8c9813588eb48" +checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174" dependencies = [ "cc", + "libc", ] [[package]] @@ -991,9 +992,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.30" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76ce6a4b40d3bff9eb3ce9881ca0737a85072f9f975886082640cd46a75cdb35" +checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1" dependencies = [ "libmimalloc-sys", ] From 1431264646cffebebe636abc7ec5a72dc391736b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:54:19 +0200 Subject: [PATCH 109/501] Bump rayon --- Cargo.lock | 10 ++++------ crates/ide-db/Cargo.toml | 2 +- crates/rust-analyzer/Cargo.toml | 2 +- crates/syntax/Cargo.toml | 2 +- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index af13703db5c42..50fc54db5106a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1371,21 +1371,19 @@ dependencies = [ [[package]] name = "rayon" -version = "1.5.3" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d" +checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" dependencies = [ - "autocfg", - "crossbeam-deque", "either", "rayon-core", ] [[package]] name = "rayon-core" -version = "1.9.3" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" +checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" dependencies = [ "crossbeam-channel", "crossbeam-deque", diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index bedf5ffd9347f..9672bb9b7b59a 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" tracing = "0.1.35" -rayon = "1.5.3" +rayon = "1.6.1" fst = { version = "0.4.7", default-features = false } rustc-hash = "1.1.0" once_cell = "1.17.0" diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index c4d055252d3d6..e3aa880d00583 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -32,7 +32,7 @@ rustc-hash = "1.1.0" serde = { version = "1.0.137", features = ["derive"] } serde_json = { version = "1.0.81", features = ["preserve_order"] } threadpool = "1.8.1" -rayon = "1.5.3" +rayon = "1.6.1" num_cpus = "1.15.0" mimalloc = { version = "0.1.30", default-features = false, optional = true } lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" } diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 26e5acebe0fe9..1a845a6ee713c 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -28,7 +28,7 @@ stdx.workspace = true text-edit.workspace = true [dev-dependencies] -rayon = "1.5.3" +rayon = "1.6.1" expect-test = "1.4.0" proc-macro2 = "1.0.47" quote = "1.0.20" From 8f678a0169d377a00e558f96970f4184eba65300 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:56:07 +0200 Subject: [PATCH 110/501] Bump snap --- Cargo.lock | 4 ++-- crates/proc-macro-api/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 50fc54db5106a..f84c649524a63 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1655,9 +1655,9 @@ dependencies = [ [[package]] name = "snap" -version = "1.0.5" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451" +checksum = "5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831" [[package]] name = "sourcegen" diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index d852f65c70ada..28469b832468b 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -23,7 +23,7 @@ serde = { version = "1.0.137", features = ["derive"] } serde_json = { version = "1.0.81", features = ["unbounded_depth"] } tracing = "0.1.37" memmap2 = "0.5.4" -snap = "1.0.5" +snap = "1.1.0" # local deps paths.workspace = true From 7a285e29330227281ebec843d75863bb101d7067 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:56:46 +0200 Subject: [PATCH 111/501] Bump flate2 --- Cargo.lock | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f84c649524a63..54bd231357d62 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -77,7 +77,7 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide 0.6.2", + "miniz_oxide", "object", "rustc-demangle", ] @@ -385,12 +385,12 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" [[package]] name = "flate2" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" +checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" dependencies = [ "crc32fast", - "miniz_oxide 0.5.4", + "miniz_oxide", ] [[package]] @@ -999,15 +999,6 @@ dependencies = [ "libmimalloc-sys", ] -[[package]] -name = "miniz_oxide" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.6.2" From a9b96e1efa0f6d41f6cffcb5bbb009a56eb90e6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 21 Jan 2023 09:57:51 +0200 Subject: [PATCH 112/501] Bump the rest of the deps --- Cargo.lock | 188 ++++++++++++++++++++++------------------------------- 1 file changed, 77 insertions(+), 111 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 54bd231357d62..b83c3778c2ca6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -111,9 +111,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "camino" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e" +checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055" dependencies = [ "serde", ] @@ -143,9 +143,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.73" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" +checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" [[package]] name = "cfg" @@ -278,22 +278,22 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.11" +version = "0.9.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" +checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset", + "memoffset 0.7.1", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.12" +version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" +checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" dependencies = [ "cfg-if", ] @@ -308,7 +308,7 @@ dependencies = [ "hashbrown", "lock_api", "once_cell", - "parking_lot_core 0.9.4", + "parking_lot_core 0.9.6", ] [[package]] @@ -367,14 +367,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3" +checksum = "4e884668cd0c7480504233e951174ddc3b382f7c2666e3b7310b5c4e7b0c37f9" dependencies = [ "cfg-if", "libc", "redox_syscall", - "windows-sys 0.42.0", + "windows-sys", ] [[package]] @@ -829,9 +829,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" +checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" [[package]] name = "jod-thread" @@ -841,9 +841,9 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae" [[package]] name = "kqueue" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6112e8f37b59803ac47a42d14f1f3a59bbf72fc6857ffc5be455e28a691f8e" +checksum = "2c8fc60ba15bf51257aa9807a48a61013db043fcf3a78cb0d916e8e396dcad98" dependencies = [ "kqueue-sys", "libc", @@ -990,6 +990,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg", +] + [[package]] name = "mimalloc" version = "0.1.34" @@ -1010,14 +1019,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" +checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" dependencies = [ "libc", "log", "wasi", - "windows-sys 0.36.1", + "windows-sys", ] [[package]] @@ -1026,14 +1035,14 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123" dependencies = [ - "windows-sys 0.42.0", + "windows-sys", ] [[package]] name = "nix" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46a58d1d356c6597d08cde02c2f09d785b09e28711837b1ed667dc652c08a694" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" dependencies = [ "bitflags", "cfg-if", @@ -1114,7 +1123,7 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" dependencies = [ "instant", "lock_api", - "parking_lot_core 0.8.5", + "parking_lot_core 0.8.6", ] [[package]] @@ -1124,14 +1133,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.4", + "parking_lot_core 0.9.6", ] [[package]] name = "parking_lot_core" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" dependencies = [ "cfg-if", "instant", @@ -1143,15 +1152,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0" +checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-sys 0.42.0", + "windows-sys", ] [[package]] @@ -1167,9 +1176,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" +checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" [[package]] name = "paths" @@ -1393,9 +1402,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.6.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" +checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" dependencies = [ "regex-syntax", ] @@ -1411,9 +1420,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.27" +version = "0.6.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" +checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" [[package]] name = "rowan" @@ -1423,7 +1432,7 @@ checksum = "5811547e7ba31e903fe48c8ceab10d40d70a101f3d15523c847cce91aa71f332" dependencies = [ "countme", "hashbrown", - "memoffset", + "memoffset 0.6.5", "rustc-hash", "text-size", ] @@ -1505,9 +1514,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "ryu" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" +checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" [[package]] name = "salsa" @@ -1611,9 +1620,9 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca" +checksum = "9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e" dependencies = [ "proc-macro2", "quote", @@ -1749,18 +1758,18 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" [[package]] name = "thiserror" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" +checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" +checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" dependencies = [ "proc-macro2", "quote", @@ -1970,15 +1979,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" +checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58" [[package]] name = "unicode-ident" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" +checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" [[package]] name = "unicode-normalization" @@ -2097,19 +2106,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-sys" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2" -dependencies = [ - "windows_aarch64_msvc 0.36.1", - "windows_i686_gnu 0.36.1", - "windows_i686_msvc 0.36.1", - "windows_x86_64_gnu 0.36.1", - "windows_x86_64_msvc 0.36.1", -] - [[package]] name = "windows-sys" version = "0.42.0" @@ -2117,85 +2113,55 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ "windows_aarch64_gnullvm", - "windows_aarch64_msvc 0.42.0", - "windows_i686_gnu 0.42.0", - "windows_i686_msvc 0.42.0", - "windows_x86_64_gnu 0.42.0", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", "windows_x86_64_gnullvm", - "windows_x86_64_msvc 0.42.0", + "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" +checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608" [[package]] name = "windows_aarch64_msvc" -version = "0.36.1" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" +checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7" [[package]] name = "windows_i686_gnu" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" - -[[package]] -name = "windows_i686_msvc" -version = "0.36.1" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024" +checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640" [[package]] name = "windows_i686_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" +checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605" [[package]] name = "windows_x86_64_gnu" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" +checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.36.1" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680" +checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463" [[package]] name = "windows_x86_64_msvc" -version = "0.42.0" +version = "0.42.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd" [[package]] name = "write-json" From 15358818363f66fc8b99e3711e6eb99a5e8684b9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 21 Jan 2023 17:29:07 +0100 Subject: [PATCH 113/501] Replace SmolStr usage with lang item enum for lang items --- crates/hir-def/src/db.rs | 6 +- crates/hir-def/src/lang_item.rs | 298 ++++++++++++++++++++++--- crates/hir-def/src/lib.rs | 1 - crates/hir-expand/src/name.rs | 8 + crates/hir-ty/src/autoderef.rs | 7 +- crates/hir-ty/src/chalk_db.rs | 81 ++++--- crates/hir-ty/src/chalk_ext.rs | 7 +- crates/hir-ty/src/display.rs | 7 +- crates/hir-ty/src/infer.rs | 20 +- crates/hir-ty/src/infer/coerce.rs | 15 +- crates/hir-ty/src/infer/expr.rs | 4 +- crates/hir-ty/src/lang_items.rs | 11 +- crates/hir-ty/src/lower.rs | 10 +- crates/hir-ty/src/method_resolution.rs | 59 ++--- crates/hir-ty/src/traits.rs | 18 +- crates/hir-ty/src/utils.rs | 8 +- crates/hir/src/display.rs | 5 +- crates/hir/src/lib.rs | 13 +- crates/hir/src/source_analyzer.rs | 36 ++- crates/project-model/src/sysroot.rs | 7 +- crates/project-model/src/workspace.rs | 4 +- 21 files changed, 432 insertions(+), 193 deletions(-) diff --git a/crates/hir-def/src/db.rs b/crates/hir-def/src/db.rs index 65cdd1b69b17d..b23427a73b345 100644 --- a/crates/hir-def/src/db.rs +++ b/crates/hir-def/src/db.rs @@ -6,7 +6,7 @@ use either::Either; use hir_expand::{db::AstDatabase, HirFileId}; use intern::Interned; use la_arena::ArenaMap; -use syntax::{ast, AstPtr, SmolStr}; +use syntax::{ast, AstPtr}; use crate::{ adt::{EnumData, StructData}, @@ -19,7 +19,7 @@ use crate::{ generics::GenericParams, import_map::ImportMap, item_tree::{AttrOwner, ItemTree}, - lang_item::{LangItemTarget, LangItems}, + lang_item::{LangItem, LangItemTarget, LangItems}, nameres::{diagnostics::DefDiagnostic, DefMap}, visibility::{self, Visibility}, AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId, @@ -183,7 +183,7 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast { fn crate_lang_items(&self, krate: CrateId) -> Arc; #[salsa::invoke(LangItems::lang_item_query)] - fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option; + fn lang_item(&self, start_crate: CrateId, item: LangItem) -> Option; #[salsa::invoke(ImportMap::import_map_query)] fn import_map(&self, krate: CrateId) -> Arc; diff --git a/crates/hir-def/src/lang_item.rs b/crates/hir-def/src/lang_item.rs index 8778501845876..ab9bc615daf55 100644 --- a/crates/hir-def/src/lang_item.rs +++ b/crates/hir-def/src/lang_item.rs @@ -8,19 +8,21 @@ use rustc_hash::FxHashMap; use syntax::SmolStr; use crate::{ - db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, ImplId, - ModuleDefId, StaticId, StructId, TraitId, + db::DefDatabase, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, + ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum LangItemTarget { EnumId(EnumId), - FunctionId(FunctionId), - ImplDefId(ImplId), - StaticId(StaticId), - StructId(StructId), - TraitId(TraitId), - EnumVariantId(EnumVariantId), + Function(FunctionId), + ImplDef(ImplId), + Static(StaticId), + Struct(StructId), + Union(UnionId), + TypeAlias(TypeAliasId), + Trait(TraitId), + EnumVariant(EnumVariantId), } impl LangItemTarget { @@ -33,42 +35,42 @@ impl LangItemTarget { pub fn as_function(self) -> Option { match self { - LangItemTarget::FunctionId(id) => Some(id), + LangItemTarget::Function(id) => Some(id), _ => None, } } pub fn as_impl_def(self) -> Option { match self { - LangItemTarget::ImplDefId(id) => Some(id), + LangItemTarget::ImplDef(id) => Some(id), _ => None, } } pub fn as_static(self) -> Option { match self { - LangItemTarget::StaticId(id) => Some(id), + LangItemTarget::Static(id) => Some(id), _ => None, } } pub fn as_struct(self) -> Option { match self { - LangItemTarget::StructId(id) => Some(id), + LangItemTarget::Struct(id) => Some(id), _ => None, } } pub fn as_trait(self) -> Option { match self { - LangItemTarget::TraitId(id) => Some(id), + LangItemTarget::Trait(id) => Some(id), _ => None, } } pub fn as_enum_variant(self) -> Option { match self { - LangItemTarget::EnumVariantId(id) => Some(id), + LangItemTarget::EnumVariant(id) => Some(id), _ => None, } } @@ -76,12 +78,12 @@ impl LangItemTarget { #[derive(Default, Debug, Clone, PartialEq, Eq)] pub struct LangItems { - items: FxHashMap, + items: FxHashMap, } impl LangItems { - pub fn target(&self, item: &str) -> Option { - self.items.get(item).copied() + pub fn target(&self, item: LangItem) -> Option { + self.items.get(&item).copied() } /// Salsa query. This will look for lang items in a specific crate. @@ -94,16 +96,27 @@ impl LangItems { for (_, module_data) in crate_def_map.modules() { for impl_def in module_data.scope.impls() { - lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId) + lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef); + for assoc in db.impl_data(impl_def).items.iter().copied() { + match assoc { + AssocItemId::FunctionId(f) => { + lang_items.collect_lang_item(db, f, LangItemTarget::Function) + } + AssocItemId::TypeAliasId(t) => { + lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias) + } + AssocItemId::ConstId(_) => (), + } + } } for def in module_data.scope.declarations() { match def { ModuleDefId::TraitId(trait_) => { - lang_items.collect_lang_item(db, trait_, LangItemTarget::TraitId); + lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait); db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| { - if let crate::AssocItemId::FunctionId(f) = assoc_id { - lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId); + if let AssocItemId::FunctionId(f) = assoc_id { + lang_items.collect_lang_item(db, f, LangItemTarget::Function); } }); } @@ -113,18 +126,24 @@ impl LangItems { lang_items.collect_lang_item( db, EnumVariantId { parent: e, local_id }, - LangItemTarget::EnumVariantId, + LangItemTarget::EnumVariant, ); }); } ModuleDefId::AdtId(AdtId::StructId(s)) => { - lang_items.collect_lang_item(db, s, LangItemTarget::StructId); + lang_items.collect_lang_item(db, s, LangItemTarget::Struct); + } + ModuleDefId::AdtId(AdtId::UnionId(u)) => { + lang_items.collect_lang_item(db, u, LangItemTarget::Union); } ModuleDefId::FunctionId(f) => { - lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId); + lang_items.collect_lang_item(db, f, LangItemTarget::Function); } ModuleDefId::StaticId(s) => { - lang_items.collect_lang_item(db, s, LangItemTarget::StaticId); + lang_items.collect_lang_item(db, s, LangItemTarget::Static); + } + ModuleDefId::TypeAliasId(t) => { + lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias); } _ => {} } @@ -139,7 +158,7 @@ impl LangItems { pub(crate) fn lang_item_query( db: &dyn DefDatabase, start_crate: CrateId, - item: SmolStr, + item: LangItem, ) -> Option { let _p = profile::span("lang_item_query"); let lang_items = db.crate_lang_items(start_crate); @@ -150,7 +169,7 @@ impl LangItems { db.crate_graph()[start_crate] .dependencies .iter() - .find_map(|dep| db.lang_item(dep.crate_id, item.clone())) + .find_map(|dep| db.lang_item(dep.crate_id, item)) } fn collect_lang_item( @@ -162,8 +181,8 @@ impl LangItems { T: Into + Copy, { let _p = profile::span("collect_lang_item"); - if let Some(lang_item_name) = lang_attr(db, item) { - self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); + if let Some(lang_item) = lang_attr(db, item).and_then(|it| LangItem::from_str(&it)) { + self.items.entry(lang_item).or_insert_with(|| constructor(item)); } } } @@ -172,3 +191,224 @@ pub fn lang_attr(db: &dyn DefDatabase, item: impl Into + Copy) -> Opt let attrs = db.attrs(item.into()); attrs.by_key("lang").string_value().cloned() } + +pub enum GenericRequirement { + None, + Minimum(usize), + Exact(usize), +} + +macro_rules! language_item_table { + ( + $( $(#[$attr:meta])* $variant:ident, $name:ident, $method:ident, $target:expr, $generics:expr; )* + ) => { + + /// A representation of all the valid language items in Rust. + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub enum LangItem { + $( + #[doc = concat!("The `", stringify!($name), "` lang item.")] + $(#[$attr])* + $variant, + )* + } + + impl LangItem { + pub fn name(self) -> SmolStr { + match self { + $( LangItem::$variant => SmolStr::new(stringify!($name)), )* + } + } + + /// Opposite of [`LangItem::name`] + pub fn from_name(name: &hir_expand::name::Name) -> Option { + Self::from_str(name.as_str()?) + } + + /// Opposite of [`LangItem::name`] + pub fn from_str(name: &str) -> Option { + match name { + $( stringify!($name) => Some(LangItem::$variant), )* + _ => None, + } + } + } + } +} + +language_item_table! { +// Variant name, Name, Getter method name, Target Generic requirements; + Sized, sized, sized_trait, Target::Trait, GenericRequirement::Exact(0); + Unsize, unsize, unsize_trait, Target::Trait, GenericRequirement::Minimum(1); + /// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ"). + StructuralPeq, structural_peq, structural_peq_trait, Target::Trait, GenericRequirement::None; + /// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize). + StructuralTeq, structural_teq, structural_teq_trait, Target::Trait, GenericRequirement::None; + Copy, copy, copy_trait, Target::Trait, GenericRequirement::Exact(0); + Clone, clone, clone_trait, Target::Trait, GenericRequirement::None; + Sync, sync, sync_trait, Target::Trait, GenericRequirement::Exact(0); + DiscriminantKind, discriminant_kind, discriminant_kind_trait, Target::Trait, GenericRequirement::None; + /// The associated item of the [`DiscriminantKind`] trait. + Discriminant, discriminant_type, discriminant_type, Target::AssocTy, GenericRequirement::None; + + PointeeTrait, pointee_trait, pointee_trait, Target::Trait, GenericRequirement::None; + Metadata, metadata_type, metadata_type, Target::AssocTy, GenericRequirement::None; + DynMetadata, dyn_metadata, dyn_metadata, Target::Struct, GenericRequirement::None; + + Freeze, freeze, freeze_trait, Target::Trait, GenericRequirement::Exact(0); + + Drop, drop, drop_trait, Target::Trait, GenericRequirement::None; + Destruct, destruct, destruct_trait, Target::Trait, GenericRequirement::None; + + CoerceUnsized, coerce_unsized, coerce_unsized_trait, Target::Trait, GenericRequirement::Minimum(1); + DispatchFromDyn, dispatch_from_dyn, dispatch_from_dyn_trait, Target::Trait, GenericRequirement::Minimum(1); + + // language items relating to transmutability + TransmuteOpts, transmute_opts, transmute_opts, Target::Struct, GenericRequirement::Exact(0); + TransmuteTrait, transmute_trait, transmute_trait, Target::Trait, GenericRequirement::Exact(3); + + Add, add, add_trait, Target::Trait, GenericRequirement::Exact(1); + Sub, sub, sub_trait, Target::Trait, GenericRequirement::Exact(1); + Mul, mul, mul_trait, Target::Trait, GenericRequirement::Exact(1); + Div, div, div_trait, Target::Trait, GenericRequirement::Exact(1); + Rem, rem, rem_trait, Target::Trait, GenericRequirement::Exact(1); + Neg, neg, neg_trait, Target::Trait, GenericRequirement::Exact(0); + Not, not, not_trait, Target::Trait, GenericRequirement::Exact(0); + BitXor, bitxor, bitxor_trait, Target::Trait, GenericRequirement::Exact(1); + BitAnd, bitand, bitand_trait, Target::Trait, GenericRequirement::Exact(1); + BitOr, bitor, bitor_trait, Target::Trait, GenericRequirement::Exact(1); + Shl, shl, shl_trait, Target::Trait, GenericRequirement::Exact(1); + Shr, shr, shr_trait, Target::Trait, GenericRequirement::Exact(1); + AddAssign, add_assign, add_assign_trait, Target::Trait, GenericRequirement::Exact(1); + SubAssign, sub_assign, sub_assign_trait, Target::Trait, GenericRequirement::Exact(1); + MulAssign, mul_assign, mul_assign_trait, Target::Trait, GenericRequirement::Exact(1); + DivAssign, div_assign, div_assign_trait, Target::Trait, GenericRequirement::Exact(1); + RemAssign, rem_assign, rem_assign_trait, Target::Trait, GenericRequirement::Exact(1); + BitXorAssign, bitxor_assign, bitxor_assign_trait, Target::Trait, GenericRequirement::Exact(1); + BitAndAssign, bitand_assign, bitand_assign_trait, Target::Trait, GenericRequirement::Exact(1); + BitOrAssign, bitor_assign, bitor_assign_trait, Target::Trait, GenericRequirement::Exact(1); + ShlAssign, shl_assign, shl_assign_trait, Target::Trait, GenericRequirement::Exact(1); + ShrAssign, shr_assign, shr_assign_trait, Target::Trait, GenericRequirement::Exact(1); + Index, index, index_trait, Target::Trait, GenericRequirement::Exact(1); + IndexMut, index_mut, index_mut_trait, Target::Trait, GenericRequirement::Exact(1); + + UnsafeCell, unsafe_cell, unsafe_cell_type, Target::Struct, GenericRequirement::None; + VaList, va_list, va_list, Target::Struct, GenericRequirement::None; + + Deref, deref, deref_trait, Target::Trait, GenericRequirement::Exact(0); + DerefMut, deref_mut, deref_mut_trait, Target::Trait, GenericRequirement::Exact(0); + DerefTarget, deref_target, deref_target, Target::AssocTy, GenericRequirement::None; + Receiver, receiver, receiver_trait, Target::Trait, GenericRequirement::None; + + Fn, fn, fn_trait, Target::Trait, GenericRequirement::Exact(1); + FnMut, fn_mut, fn_mut_trait, Target::Trait, GenericRequirement::Exact(1); + FnOnce, fn_once, fn_once_trait, Target::Trait, GenericRequirement::Exact(1); + + FnOnceOutput, fn_once_output, fn_once_output, Target::AssocTy, GenericRequirement::None; + + Future, future_trait, future_trait, Target::Trait, GenericRequirement::Exact(0); + GeneratorState, generator_state, gen_state, Target::Enum, GenericRequirement::None; + Generator, generator, gen_trait, Target::Trait, GenericRequirement::Minimum(1); + Unpin, unpin, unpin_trait, Target::Trait, GenericRequirement::None; + Pin, pin, pin_type, Target::Struct, GenericRequirement::None; + + PartialEq, eq, eq_trait, Target::Trait, GenericRequirement::Exact(1); + PartialOrd, partial_ord, partial_ord_trait, Target::Trait, GenericRequirement::Exact(1); + + // A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and + // various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays. + // + // The `begin_unwind` lang item has a predefined symbol name and is sort of a "weak lang item" + // in the sense that a crate is not required to have it defined to use it, but a final product + // is required to define it somewhere. Additionally, there are restrictions on crates that use + // a weak lang item, but do not have it defined. + Panic, panic, panic_fn, Target::Fn, GenericRequirement::Exact(0); + PanicNounwind, panic_nounwind, panic_nounwind, Target::Fn, GenericRequirement::Exact(0); + PanicFmt, panic_fmt, panic_fmt, Target::Fn, GenericRequirement::None; + PanicDisplay, panic_display, panic_display, Target::Fn, GenericRequirement::None; + ConstPanicFmt, const_panic_fmt, const_panic_fmt, Target::Fn, GenericRequirement::None; + PanicBoundsCheck, panic_bounds_check, panic_bounds_check_fn, Target::Fn, GenericRequirement::Exact(0); + PanicInfo, panic_info, panic_info, Target::Struct, GenericRequirement::None; + PanicLocation, panic_location, panic_location, Target::Struct, GenericRequirement::None; + PanicImpl, panic_impl, panic_impl, Target::Fn, GenericRequirement::None; + PanicCannotUnwind, panic_cannot_unwind, panic_cannot_unwind, Target::Fn, GenericRequirement::Exact(0); + /// libstd panic entry point. Necessary for const eval to be able to catch it + BeginPanic, begin_panic, begin_panic_fn, Target::Fn, GenericRequirement::None; + + ExchangeMalloc, exchange_malloc, exchange_malloc_fn, Target::Fn, GenericRequirement::None; + BoxFree, box_free, box_free_fn, Target::Fn, GenericRequirement::Minimum(1); + DropInPlace, drop_in_place, drop_in_place_fn, Target::Fn, GenericRequirement::Minimum(1); + AllocLayout, alloc_layout, alloc_layout, Target::Struct, GenericRequirement::None; + + Start, start, start_fn, Target::Fn, GenericRequirement::Exact(1); + + EhPersonality, eh_personality, eh_personality, Target::Fn, GenericRequirement::None; + EhCatchTypeinfo, eh_catch_typeinfo, eh_catch_typeinfo, Target::Static, GenericRequirement::None; + + OwnedBox, owned_box, owned_box, Target::Struct, GenericRequirement::Minimum(1); + + PhantomData, phantom_data, phantom_data, Target::Struct, GenericRequirement::Exact(1); + + ManuallyDrop, manually_drop, manually_drop, Target::Struct, GenericRequirement::None; + + MaybeUninit, maybe_uninit, maybe_uninit, Target::Union, GenericRequirement::None; + + /// Align offset for stride != 1; must not panic. + AlignOffset, align_offset, align_offset_fn, Target::Fn, GenericRequirement::None; + + Termination, termination, termination, Target::Trait, GenericRequirement::None; + + Try, Try, try_trait, Target::Trait, GenericRequirement::None; + + Tuple, tuple_trait, tuple_trait, Target::Trait, GenericRequirement::Exact(0); + + SliceLen, slice_len_fn, slice_len_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None; + + // Language items from AST lowering + TryTraitFromResidual, from_residual, from_residual_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + TryTraitFromOutput, from_output, from_output_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + TryTraitBranch, branch, branch_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + TryTraitFromYeet, from_yeet, from_yeet_fn, Target::Fn, GenericRequirement::None; + + PointerSized, pointer_sized, pointer_sized, Target::Trait, GenericRequirement::Exact(0); + + Poll, Poll, poll, Target::Enum, GenericRequirement::None; + PollReady, Ready, poll_ready_variant, Target::Variant, GenericRequirement::None; + PollPending, Pending, poll_pending_variant, Target::Variant, GenericRequirement::None; + + // FIXME(swatinem): the following lang items are used for async lowering and + // should become obsolete eventually. + ResumeTy, ResumeTy, resume_ty, Target::Struct, GenericRequirement::None; + IdentityFuture, identity_future, identity_future_fn, Target::Fn, GenericRequirement::None; + GetContext, get_context, get_context_fn, Target::Fn, GenericRequirement::None; + + Context, Context, context, Target::Struct, GenericRequirement::None; + FuturePoll, poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + + FromFrom, from, from_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + + OptionSome, Some, option_some_variant, Target::Variant, GenericRequirement::None; + OptionNone, None, option_none_variant, Target::Variant, GenericRequirement::None; + + ResultOk, Ok, result_ok_variant, Target::Variant, GenericRequirement::None; + ResultErr, Err, result_err_variant, Target::Variant, GenericRequirement::None; + + ControlFlowContinue, Continue, cf_continue_variant, Target::Variant, GenericRequirement::None; + ControlFlowBreak, Break, cf_break_variant, Target::Variant, GenericRequirement::None; + + IntoFutureIntoFuture, into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + IntoIterIntoIter, into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None; + IteratorNext, next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None; + + PinNewUnchecked, new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None; + + RangeFrom, RangeFrom, range_from_struct, Target::Struct, GenericRequirement::None; + RangeFull, RangeFull, range_full_struct, Target::Struct, GenericRequirement::None; + RangeInclusiveStruct, RangeInclusive, range_inclusive_struct, Target::Struct, GenericRequirement::None; + RangeInclusiveNew, range_inclusive_new, range_inclusive_new_method, Target::Method(MethodKind::Inherent), GenericRequirement::None; + Range, Range, range_struct, Target::Struct, GenericRequirement::None; + RangeToInclusive, RangeToInclusive, range_to_inclusive_struct, Target::Struct, GenericRequirement::None; + RangeTo, RangeTo, range_to_struct, Target::Struct, GenericRequirement::None; + + String, String, string, Target::Struct, GenericRequirement::None; +} diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 8eae2e92f4262..9e4e0dcc96cc6 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -634,7 +634,6 @@ pub trait Lookup { pub trait HasModule { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; } - impl HasModule for ItemContainerId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { match *self { diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index e8b3e312aab7a..b62f4fe770160 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -133,6 +133,14 @@ impl Name { } } + /// Returns the text this name represents if it isn't a tuple field. + pub fn as_str(&self) -> Option<&str> { + match &self.0 { + Repr::Text(it) => Some(it), + _ => None, + } + } + /// Returns the textual representation of this name as a [`SmolStr`]. /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in /// the general case. diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index cbcf8f74c556d..caddca6d9152e 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -6,9 +6,9 @@ use std::sync::Arc; use chalk_ir::cast::Cast; +use hir_def::lang_item::LangItem; use hir_expand::name::name; use limit::Limit; -use syntax::SmolStr; use crate::{ db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt, @@ -117,9 +117,8 @@ fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option { } let db = table.db; - let deref_trait = db - .lang_item(table.trait_env.krate, SmolStr::new_inline("deref")) - .and_then(|l| l.as_trait())?; + let deref_trait = + db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait())?; let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?; let projection = { diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 1c2b8de7f784f..bbb6625855d32 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -3,7 +3,6 @@ use std::sync::Arc; use cov_mark::hit; -use syntax::SmolStr; use tracing::debug; use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds}; @@ -12,7 +11,7 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; use base_db::CrateId; use hir_def::{ expr::Movability, - lang_item::{lang_attr, LangItemTarget}, + lang_item::{lang_attr, LangItem, LangItemTarget}, AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId, }; use hir_expand::name::name; @@ -182,9 +181,9 @@ impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { &self, well_known_trait: rust_ir::WellKnownTrait, ) -> Option> { - let lang_attr = lang_attr_from_well_known_trait(well_known_trait); + let lang_attr = lang_item_from_well_known_trait(well_known_trait); let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) { - Some(LangItemTarget::TraitId(trait_)) => trait_, + Some(LangItemTarget::Trait(trait_)) => trait_, _ => return None, }; Some(to_chalk_trait_id(trait_)) @@ -216,7 +215,7 @@ impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => { if let Some((future_trait, future_output)) = self .db - .lang_item(self.krate, SmolStr::new_inline("future_trait")) + .lang_item(self.krate, LangItem::Future) .and_then(|item| item.as_trait()) .and_then(|trait_| { let alias = @@ -246,7 +245,7 @@ impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { binder.push(crate::wrap_empty_binders(impl_bound)); let sized_trait = self .db - .lang_item(self.krate, SmolStr::new_inline("sized")) + .lang_item(self.krate, LangItem::Sized) .and_then(|item| item.as_trait()); if let Some(sized_trait_) = sized_trait { let sized_bound = WhereClause::Implemented(TraitRef { @@ -493,7 +492,7 @@ pub(crate) fn associated_ty_data_query( if !ctx.unsized_types.borrow().contains(&self_ty) { let sized_trait = db - .lang_item(resolver.krate(), SmolStr::new_inline("sized")) + .lang_item(resolver.krate(), LangItem::Sized) .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id)); let sized_bound = sized_trait.into_iter().map(|sized_trait| { let trait_bound = @@ -541,8 +540,8 @@ pub(crate) fn trait_datum_query( let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect(); let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses }; - let well_known = - lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name)); + let well_known = lang_attr(db.upcast(), trait_) + .and_then(|name| well_known_trait_from_lang_item(LangItem::from_str(&name)?)); let trait_datum = TraitDatum { id: trait_id, binders: make_binders(db, &generic_params, trait_datum_bound), @@ -553,42 +552,42 @@ pub(crate) fn trait_datum_query( Arc::new(trait_datum) } -fn well_known_trait_from_lang_attr(name: &str) -> Option { - Some(match name { - "clone" => WellKnownTrait::Clone, - "coerce_unsized" => WellKnownTrait::CoerceUnsized, - "copy" => WellKnownTrait::Copy, - "discriminant_kind" => WellKnownTrait::DiscriminantKind, - "dispatch_from_dyn" => WellKnownTrait::DispatchFromDyn, - "drop" => WellKnownTrait::Drop, - "fn" => WellKnownTrait::Fn, - "fn_mut" => WellKnownTrait::FnMut, - "fn_once" => WellKnownTrait::FnOnce, - "generator" => WellKnownTrait::Generator, - "sized" => WellKnownTrait::Sized, - "unpin" => WellKnownTrait::Unpin, - "unsize" => WellKnownTrait::Unsize, - "tuple_trait" => WellKnownTrait::Tuple, +fn well_known_trait_from_lang_item(item: LangItem) -> Option { + Some(match item { + LangItem::Clone => WellKnownTrait::Clone, + LangItem::CoerceUnsized => WellKnownTrait::CoerceUnsized, + LangItem::Copy => WellKnownTrait::Copy, + LangItem::DiscriminantKind => WellKnownTrait::DiscriminantKind, + LangItem::DispatchFromDyn => WellKnownTrait::DispatchFromDyn, + LangItem::Drop => WellKnownTrait::Drop, + LangItem::Fn => WellKnownTrait::Fn, + LangItem::FnMut => WellKnownTrait::FnMut, + LangItem::FnOnce => WellKnownTrait::FnOnce, + LangItem::Generator => WellKnownTrait::Generator, + LangItem::Sized => WellKnownTrait::Sized, + LangItem::Unpin => WellKnownTrait::Unpin, + LangItem::Unsize => WellKnownTrait::Unsize, + LangItem::Tuple => WellKnownTrait::Tuple, _ => return None, }) } -fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str { - match attr { - WellKnownTrait::Clone => "clone", - WellKnownTrait::CoerceUnsized => "coerce_unsized", - WellKnownTrait::Copy => "copy", - WellKnownTrait::DiscriminantKind => "discriminant_kind", - WellKnownTrait::DispatchFromDyn => "dispatch_from_dyn", - WellKnownTrait::Drop => "drop", - WellKnownTrait::Fn => "fn", - WellKnownTrait::FnMut => "fn_mut", - WellKnownTrait::FnOnce => "fn_once", - WellKnownTrait::Generator => "generator", - WellKnownTrait::Sized => "sized", - WellKnownTrait::Tuple => "tuple_trait", - WellKnownTrait::Unpin => "unpin", - WellKnownTrait::Unsize => "unsize", +fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { + match trait_ { + WellKnownTrait::Clone => LangItem::Clone, + WellKnownTrait::CoerceUnsized => LangItem::CoerceUnsized, + WellKnownTrait::Copy => LangItem::Copy, + WellKnownTrait::DiscriminantKind => LangItem::DiscriminantKind, + WellKnownTrait::DispatchFromDyn => LangItem::DispatchFromDyn, + WellKnownTrait::Drop => LangItem::Drop, + WellKnownTrait::Fn => LangItem::Fn, + WellKnownTrait::FnMut => LangItem::FnMut, + WellKnownTrait::FnOnce => LangItem::FnOnce, + WellKnownTrait::Generator => LangItem::Generator, + WellKnownTrait::Sized => LangItem::Sized, + WellKnownTrait::Tuple => LangItem::Tuple, + WellKnownTrait::Unpin => LangItem::Unpin, + WellKnownTrait::Unsize => LangItem::Unsize, } } diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 0244b6c653e2d..329c87c74e97e 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -4,10 +4,10 @@ use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy}; use hir_def::{ builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint}, generics::TypeOrConstParamData, + lang_item::LangItem, type_ref::Rawness, FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId, }; -use syntax::SmolStr; use crate::{ db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, @@ -214,9 +214,8 @@ impl TyExt for Ty { match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) { ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => { let krate = def.module(db.upcast()).krate(); - if let Some(future_trait) = db - .lang_item(krate, SmolStr::new_inline("future_trait")) - .and_then(|item| item.as_trait()) + if let Some(future_trait) = + db.lang_item(krate, LangItem::Future).and_then(|item| item.as_trait()) { // This is only used by type walking. // Parameters will be walked outside, and projection predicate is not used. diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 3a96e53d719e9..ae2162dd7cd24 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -12,6 +12,7 @@ use hir_def::{ find_path, generics::{TypeOrConstParamData, TypeParamProvenance}, item_scope::ItemInNs, + lang_item::LangItem, path::{Path, PathKind}, type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef}, visibility::Visibility, @@ -21,7 +22,6 @@ use hir_expand::{hygiene::Hygiene, name::Name}; use intern::{Internable, Interned}; use itertools::Itertools; use smallvec::SmallVec; -use syntax::SmolStr; use crate::{ db::HirDatabase, @@ -925,7 +925,7 @@ impl SizedByDefault { Self::NotSized => false, Self::Sized { anchor } => { let sized_trait = db - .lang_item(anchor, SmolStr::new_inline("sized")) + .lang_item(anchor, LangItem::Sized) .and_then(|lang_item| lang_item.as_trait()); Some(trait_) == sized_trait } @@ -1057,8 +1057,7 @@ fn write_bounds_like_dyn_trait( } if let SizedByDefault::Sized { anchor } = default_sized { let sized_trait = - f.db.lang_item(anchor, SmolStr::new_inline("sized")) - .and_then(|lang_item| lang_item.as_trait()); + f.db.lang_item(anchor, LangItem::Sized).and_then(|lang_item| lang_item.as_trait()); if !is_sized { if !first { write!(f, " + ")?; diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 0e177db7726a4..d06b22fff9a3d 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -22,7 +22,7 @@ use hir_def::{ builtin_type::{BuiltinInt, BuiltinType, BuiltinUint}, data::{ConstData, StaticData}, expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId}, - lang_item::LangItemTarget, + lang_item::{LangItem, LangItemTarget}, layout::Integer, path::{path, Path}, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, @@ -30,7 +30,7 @@ use hir_def::{ AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId, }; -use hir_expand::name::{name, Name}; +use hir_expand::name::name; use itertools::Either; use la_arena::ArenaMap; use rustc_hash::FxHashMap; @@ -917,9 +917,9 @@ impl<'a> InferenceContext<'a> { } } - fn resolve_lang_item(&self, name: Name) -> Option { + fn resolve_lang_item(&self, item: LangItem) -> Option { let krate = self.resolver.krate(); - self.db.lang_item(krate, name.to_smol_str()) + self.db.lang_item(krate, item) } fn resolve_into_iter_item(&self) -> Option { @@ -946,12 +946,12 @@ impl<'a> InferenceContext<'a> { } fn resolve_ops_neg_output(&self) -> Option { - let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?; + let trait_ = self.resolve_lang_item(LangItem::Neg)?.as_trait()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } fn resolve_ops_not_output(&self) -> Option { - let trait_ = self.resolve_lang_item(name![not])?.as_trait()?; + let trait_ = self.resolve_lang_item(LangItem::Not)?.as_trait()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } @@ -959,12 +959,12 @@ impl<'a> InferenceContext<'a> { let trait_ = self .resolver .resolve_known_trait(self.db.upcast(), &path![core::future::IntoFuture]) - .or_else(|| self.resolve_lang_item(name![future_trait])?.as_trait())?; + .or_else(|| self.resolve_lang_item(LangItem::Future)?.as_trait())?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } fn resolve_boxed_box(&self) -> Option { - let struct_ = self.resolve_lang_item(name![owned_box])?.as_struct()?; + let struct_ = self.resolve_lang_item(LangItem::OwnedBox)?.as_struct()?; Some(struct_.into()) } @@ -1005,7 +1005,7 @@ impl<'a> InferenceContext<'a> { } fn resolve_ops_index(&self) -> Option { - self.resolve_lang_item(name![index])?.as_trait() + self.resolve_lang_item(LangItem::Index)?.as_trait() } fn resolve_ops_index_output(&self) -> Option { @@ -1014,7 +1014,7 @@ impl<'a> InferenceContext<'a> { } fn resolve_va_list(&self) -> Option { - let struct_ = self.resolve_lang_item(name![va_list])?.as_struct()?; + let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?; Some(struct_.into()) } } diff --git a/crates/hir-ty/src/infer/coerce.rs b/crates/hir-ty/src/infer/coerce.rs index 8df25c83c6eb7..3293534a068bd 100644 --- a/crates/hir-ty/src/infer/coerce.rs +++ b/crates/hir-ty/src/infer/coerce.rs @@ -8,9 +8,11 @@ use std::{iter, sync::Arc}; use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind}; -use hir_def::{expr::ExprId, lang_item::LangItemTarget}; +use hir_def::{ + expr::ExprId, + lang_item::{LangItem, LangItemTarget}, +}; use stdx::always; -use syntax::SmolStr; use crate::{ autoderef::{Autoderef, AutoderefKind}, @@ -570,11 +572,10 @@ impl<'a> InferenceTable<'a> { reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone()); let krate = self.trait_env.krate; - let coerce_unsized_trait = - match self.db.lang_item(krate, SmolStr::new_inline("coerce_unsized")) { - Some(LangItemTarget::TraitId(trait_)) => trait_, - _ => return Err(TypeError), - }; + let coerce_unsized_trait = match self.db.lang_item(krate, LangItem::CoerceUnsized) { + Some(LangItemTarget::Trait(trait_)) => trait_, + _ => return Err(TypeError), + }; let coerce_unsized_tref = { let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 6f347f6757b00..7ae85d20611a8 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -29,7 +29,7 @@ use crate::{ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, }, mapping::{from_chalk, ToChalk}, - method_resolution::{self, lang_names_for_bin_op, VisibleFromModule}, + method_resolution::{self, lang_items_for_bin_op, VisibleFromModule}, primitive::{self, UintTy}, static_lifetime, to_chalk_trait_id, utils::{generics, Generics}, @@ -1008,7 +1008,7 @@ impl<'a> InferenceContext<'a> { let lhs_ty = self.infer_expr(lhs, &lhs_expectation); let rhs_ty = self.table.new_type_var(); - let trait_func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| { + let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| { let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?; let func = self.db.trait_data(trait_id).method_by_name(&name)?; Some((trait_id, func)) diff --git a/crates/hir-ty/src/lang_items.rs b/crates/hir-ty/src/lang_items.rs index afc54e729f9c3..5308c72161b26 100644 --- a/crates/hir-ty/src/lang_items.rs +++ b/crates/hir-ty/src/lang_items.rs @@ -1,20 +1,19 @@ //! Functions to detect special lang items -use hir_def::{AdtId, HasModule}; -use hir_expand::name; +use hir_def::{lang_item::LangItem, AdtId, HasModule}; use crate::db::HirDatabase; pub fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool { - let owned_box = name![owned_box].to_smol_str(); let krate = adt.module(db.upcast()).krate(); - let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); + let box_adt = + db.lang_item(krate, LangItem::OwnedBox).and_then(|it| it.as_struct()).map(AdtId::from); Some(adt) == box_adt } pub fn is_unsafe_cell(adt: AdtId, db: &dyn HirDatabase) -> bool { - let owned_box = name![unsafe_cell].to_smol_str(); let krate = adt.module(db.upcast()).krate(); - let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from); + let box_adt = + db.lang_item(krate, LangItem::UnsafeCell).and_then(|it| it.as_struct()).map(AdtId::from); Some(adt) == box_adt } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 24973afb9cea6..b1a7ad3e940ed 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -23,7 +23,7 @@ use hir_def::{ generics::{ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, - lang_item::lang_attr, + lang_item::{lang_attr, LangItem}, path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments}, resolver::{HasResolver, Resolver, TypeNs}, type_ref::{ @@ -40,7 +40,7 @@ use la_arena::ArenaMap; use rustc_hash::FxHashSet; use smallvec::SmallVec; use stdx::{impl_from, never}; -use syntax::{ast, SmolStr}; +use syntax::ast; use crate::{ all_super_traits, @@ -954,7 +954,7 @@ impl<'a> TyLoweringContext<'a> { TypeBound::Path(path, TraitBoundModifier::Maybe) => { let sized_trait = self .db - .lang_item(self.resolver.krate(), SmolStr::new_inline("sized")) + .lang_item(self.resolver.krate(), LangItem::Sized) .and_then(|lang_item| lang_item.as_trait()); // Don't lower associated type bindings as the only possible relaxed trait bound // `?Sized` has no of them. @@ -1150,7 +1150,7 @@ impl<'a> TyLoweringContext<'a> { let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate(); let sized_trait = ctx .db - .lang_item(krate, SmolStr::new_inline("sized")) + .lang_item(krate, LangItem::Sized) .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id)); let sized_clause = sized_trait.map(|trait_id| { let clause = WhereClause::Implemented(TraitRef { @@ -1489,7 +1489,7 @@ fn implicitly_sized_clauses<'a>( let is_trait_def = matches!(def, GenericDefId::TraitId(..)); let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..]; let sized_trait = db - .lang_item(resolver.krate(), SmolStr::new_inline("sized")) + .lang_item(resolver.krate(), LangItem::Sized) .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id)); sized_trait.into_iter().flat_map(move |sized_trait| { diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 64376e10bcc5b..858de0005962c 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -7,8 +7,9 @@ use std::{ops::ControlFlow, sync::Arc}; use base_db::{CrateId, Edition}; use chalk_ir::{cast::Cast, Mutability, UniverseIndex}; use hir_def::{ - data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId, - FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId, + data::ImplData, item_scope::ItemScope, lang_item::LangItem, nameres::DefMap, AssocItemId, + BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, + ModuleId, TraitId, }; use hir_expand::name::Name; use rustc_hash::{FxHashMap, FxHashSet}; @@ -437,49 +438,49 @@ pub fn def_crates( } } -pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> { +pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> { use hir_expand::name; use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering}; Some(match op { BinaryOp::LogicOp(_) => return None, BinaryOp::ArithOp(aop) => match aop { - ArithOp::Add => (name!(add), name!(add)), - ArithOp::Mul => (name!(mul), name!(mul)), - ArithOp::Sub => (name!(sub), name!(sub)), - ArithOp::Div => (name!(div), name!(div)), - ArithOp::Rem => (name!(rem), name!(rem)), - ArithOp::Shl => (name!(shl), name!(shl)), - ArithOp::Shr => (name!(shr), name!(shr)), - ArithOp::BitXor => (name!(bitxor), name!(bitxor)), - ArithOp::BitOr => (name!(bitor), name!(bitor)), - ArithOp::BitAnd => (name!(bitand), name!(bitand)), + ArithOp::Add => (name![add], LangItem::Add), + ArithOp::Mul => (name![mul], LangItem::Mul), + ArithOp::Sub => (name![sub], LangItem::Sub), + ArithOp::Div => (name![div], LangItem::Div), + ArithOp::Rem => (name![rem], LangItem::Rem), + ArithOp::Shl => (name![shl], LangItem::Shl), + ArithOp::Shr => (name![shr], LangItem::Shr), + ArithOp::BitXor => (name![bitxor], LangItem::BitXor), + ArithOp::BitOr => (name![bitor], LangItem::BitOr), + ArithOp::BitAnd => (name![bitand], LangItem::BitAnd), }, BinaryOp::Assignment { op: Some(aop) } => match aop { - ArithOp::Add => (name!(add_assign), name!(add_assign)), - ArithOp::Mul => (name!(mul_assign), name!(mul_assign)), - ArithOp::Sub => (name!(sub_assign), name!(sub_assign)), - ArithOp::Div => (name!(div_assign), name!(div_assign)), - ArithOp::Rem => (name!(rem_assign), name!(rem_assign)), - ArithOp::Shl => (name!(shl_assign), name!(shl_assign)), - ArithOp::Shr => (name!(shr_assign), name!(shr_assign)), - ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)), - ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)), - ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)), + ArithOp::Add => (name![add_assign], LangItem::AddAssign), + ArithOp::Mul => (name![mul_assign], LangItem::MulAssign), + ArithOp::Sub => (name![sub_assign], LangItem::SubAssign), + ArithOp::Div => (name![div_assign], LangItem::DivAssign), + ArithOp::Rem => (name![rem_assign], LangItem::RemAssign), + ArithOp::Shl => (name![shl_assign], LangItem::ShlAssign), + ArithOp::Shr => (name![shr_assign], LangItem::ShrAssign), + ArithOp::BitXor => (name![bitxor_assign], LangItem::BitXorAssign), + ArithOp::BitOr => (name![bitor_assign], LangItem::BitOrAssign), + ArithOp::BitAnd => (name![bitand_assign], LangItem::BitAndAssign), }, BinaryOp::CmpOp(cop) => match cop { - CmpOp::Eq { negated: false } => (name!(eq), name!(eq)), - CmpOp::Eq { negated: true } => (name!(ne), name!(eq)), + CmpOp::Eq { negated: false } => (name![eq], LangItem::PartialEq), + CmpOp::Eq { negated: true } => (name![ne], LangItem::PartialEq), CmpOp::Ord { ordering: Ordering::Less, strict: false } => { - (name!(le), name!(partial_ord)) + (name![le], LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Less, strict: true } => { - (name!(lt), name!(partial_ord)) + (name![lt], LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: false } => { - (name!(ge), name!(partial_ord)) + (name![ge], LangItem::PartialOrd) } CmpOp::Ord { ordering: Ordering::Greater, strict: true } => { - (name!(gt), name!(partial_ord)) + (name![gt], LangItem::PartialOrd) } }, BinaryOp::Assignment { op: None } => return None, diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 778a6b82047ef..3ab85c68f5b9d 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -7,9 +7,11 @@ use chalk_recursive::Cache; use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver}; use base_db::CrateId; -use hir_def::{lang_item::LangItemTarget, TraitId}; +use hir_def::{ + lang_item::{LangItem, LangItemTarget}, + TraitId, +}; use stdx::panic_context; -use syntax::SmolStr; use crate::{ db::HirDatabase, infer::unify::InferenceTable, AliasEq, AliasTy, Canonical, DomainGoal, Goal, @@ -177,18 +179,18 @@ pub enum FnTrait { } impl FnTrait { - const fn lang_item_name(self) -> &'static str { + const fn lang_item(self) -> LangItem { match self { - FnTrait::FnOnce => "fn_once", - FnTrait::FnMut => "fn_mut", - FnTrait::Fn => "fn", + FnTrait::FnOnce => LangItem::FnOnce, + FnTrait::FnMut => LangItem::FnMut, + FnTrait::Fn => LangItem::Fn, } } pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option { - let target = db.lang_item(krate, SmolStr::new_inline(self.lang_item_name()))?; + let target = db.lang_item(krate, self.lang_item())?; match target { - LangItemTarget::TraitId(t) => Some(t), + LangItemTarget::Trait(t) => Some(t), _ => None, } } diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 4f516e18be6e0..396cba89b67d7 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -11,6 +11,7 @@ use hir_def::{ GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, + lang_item::LangItem, resolver::{HasResolver, TypeNs}, type_ref::{TraitBoundModifier, TypeRef}, ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId, @@ -21,7 +22,6 @@ use intern::Interned; use itertools::Either; use rustc_hash::FxHashSet; use smallvec::{smallvec, SmallVec}; -use syntax::SmolStr; use crate::{ db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, WhereClause, @@ -29,9 +29,9 @@ use crate::{ pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator { [ - db.lang_item(krate, SmolStr::new_inline("fn")), - db.lang_item(krate, SmolStr::new_inline("fn_mut")), - db.lang_item(krate, SmolStr::new_inline("fn_once")), + db.lang_item(krate, LangItem::Fn), + db.lang_item(krate, LangItem::FnMut), + db.lang_item(krate, LangItem::FnOnce), ] .into_iter() .flatten() diff --git a/crates/hir/src/display.rs b/crates/hir/src/display.rs index 55d3b6097685b..0d19420127f54 100644 --- a/crates/hir/src/display.rs +++ b/crates/hir/src/display.rs @@ -4,6 +4,7 @@ use hir_def::{ generics::{ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, + lang_item::LangItem, type_ref::{TypeBound, TypeRef}, AdtId, GenericDefId, }; @@ -14,7 +15,6 @@ use hir_ty::{ }, Interner, TraitRefExt, WhereClause, }; -use syntax::SmolStr; use crate::{ Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility, @@ -261,8 +261,7 @@ impl HirDisplay for TypeParam { bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect(); let krate = self.id.parent().krate(f.db).id; let sized_trait = - f.db.lang_item(krate, SmolStr::new_inline("sized")) - .and_then(|lang_item| lang_item.as_trait()); + f.db.lang_item(krate, LangItem::Sized).and_then(|lang_item| lang_item.as_trait()); let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() { WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait, _ => false, diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 012812cea24aa..6ec39dfd73fd5 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -44,7 +44,7 @@ use hir_def::{ expr::{BindingAnnotation, ExprOrPatId, LabelId, Pat, PatId}, generics::{TypeOrConstParamData, TypeParamProvenance}, item_tree::ItemTreeNode, - lang_item::LangItemTarget, + lang_item::{LangItem, LangItemTarget}, layout::{Layout, LayoutError, ReprOptions}, nameres::{self, diagnostics::DefDiagnostic}, per_ns::PerNs, @@ -1836,7 +1836,7 @@ pub struct Trait { impl Trait { pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option { - db.lang_item(krate.into(), name.to_smol_str()) + db.lang_item(krate.into(), LangItem::from_name(name)?) .and_then(LangItemTarget::as_trait) .map(Into::into) } @@ -3009,7 +3009,7 @@ impl Type { /// This function is used in `.await` syntax completion. pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool { let trait_ = db - .lang_item(self.env.krate, SmolStr::new_inline("into_future")) + .lang_item(self.env.krate, LangItem::IntoFutureIntoFuture) .and_then(|it| { let into_future_fn = it.as_function()?; let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?; @@ -3017,8 +3017,7 @@ impl Type { Some(into_future_trait.id) }) .or_else(|| { - let future_trait = - db.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))?; + let future_trait = db.lang_item(self.env.krate, LangItem::Future)?; future_trait.as_trait() }); @@ -3111,9 +3110,9 @@ impl Type { } pub fn is_copy(&self, db: &dyn HirDatabase) -> bool { - let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy")); + let lang_item = db.lang_item(self.env.krate, LangItem::Copy); let copy_trait = match lang_item { - Some(LangItemTarget::TraitId(it)) => it, + Some(LangItemTarget::Trait(it)) => it, _ => return false, }; self.impls_trait(db, copy_trait.into(), &[]) diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 2354eb2c9ccaa..5e0c9933a7b6b 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -17,6 +17,7 @@ use hir_def::{ Body, BodySourceMap, }, expr::{ExprId, Pat, PatId}, + lang_item::LangItem, macro_id_to_def_id, path::{ModPath, Path, PathKind}, resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs}, @@ -37,7 +38,7 @@ use hir_ty::{ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions, UnsafeExpr, }, - method_resolution::{self, lang_names_for_bin_op}, + method_resolution::{self, lang_items_for_bin_op}, Adjustment, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, TyLoweringContext, }; use itertools::Itertools; @@ -294,12 +295,8 @@ impl SourceAnalyzer { } } - let future_trait = db - .lang_item(self.resolver.krate(), hir_expand::name![future_trait].to_smol_str())? - .as_trait()?; - let poll_fn = db - .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())? - .as_function()?; + let future_trait = db.lang_item(self.resolver.krate(), LangItem::Future)?.as_trait()?; + let poll_fn = db.lang_item(self.resolver.krate(), LangItem::FuturePoll)?.as_function()?; // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself // doesn't have any generic parameters, so we skip building another subst for `poll()`. let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build(); @@ -311,14 +308,14 @@ impl SourceAnalyzer { db: &dyn HirDatabase, prefix_expr: &ast::PrefixExpr, ) -> Option { - let lang_item_name = match prefix_expr.op_kind()? { - ast::UnaryOp::Deref => name![deref], - ast::UnaryOp::Not => name![not], - ast::UnaryOp::Neg => name![neg], + let (lang_item, fn_name) = match prefix_expr.op_kind()? { + ast::UnaryOp::Deref => (LangItem::Deref, name![deref]), + ast::UnaryOp::Not => (LangItem::Not, name![not]), + ast::UnaryOp::Neg => (LangItem::Neg, name![neg]), }; let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?; - let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; + let (op_trait, op_fn) = self.lang_trait_fn(db, lang_item, &fn_name)?; // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build(); @@ -334,9 +331,7 @@ impl SourceAnalyzer { let base_ty = self.ty_of_expr(db, &index_expr.base()?)?; let index_ty = self.ty_of_expr(db, &index_expr.index()?)?; - let lang_item_name = name![index]; - - let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?; + let (op_trait, op_fn) = self.lang_trait_fn(db, LangItem::Index, &name![index])?; // HACK: subst for all methods coincides with that for their trait because the methods // don't have any generic parameters, so we skip building another subst for the methods. let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None) @@ -355,8 +350,8 @@ impl SourceAnalyzer { let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?; let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?; - let (op_trait, op_fn) = lang_names_for_bin_op(op) - .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?; + let (op_trait, op_fn) = lang_items_for_bin_op(op) + .and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?; // HACK: subst for `index()` coincides with that for `Index` because `index()` itself // doesn't have any generic parameters, so we skip building another subst for `index()`. let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None) @@ -374,8 +369,7 @@ impl SourceAnalyzer { ) -> Option { let ty = self.ty_of_expr(db, &try_expr.expr()?)?; - let op_fn = - db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?; + let op_fn = db.lang_item(self.resolver.krate(), LangItem::TryTraitBranch)?.as_function()?; let op_trait = match op_fn.lookup(db.upcast()).container { ItemContainerId::TraitId(id) => id, _ => return None, @@ -821,10 +815,10 @@ impl SourceAnalyzer { fn lang_trait_fn( &self, db: &dyn HirDatabase, - lang_trait: &Name, + lang_trait: LangItem, method_name: &Name, ) -> Option<(TraitId, FunctionId)> { - let trait_id = db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?; + let trait_id = db.lang_item(self.resolver.krate(), lang_trait)?.as_trait()?; let fn_id = db.trait_data(trait_id).method_by_name(method_name)?; Some((trait_id, fn_id)) } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 8d5ab0061e518..046786580e7a4 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -7,6 +7,7 @@ use std::{env, fs, iter, ops, path::PathBuf, process::Command}; use anyhow::{format_err, Result}; +use base_db::CrateName; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; @@ -50,14 +51,16 @@ impl Sysroot { &self.src_root } - pub fn public_deps(&self) -> impl Iterator + '_ { + pub fn public_deps(&self) -> impl Iterator + '_ { // core is added as a dependency before std in order to // mimic rustcs dependency order ["core", "alloc", "std"] .into_iter() .zip(iter::repeat(true)) .chain(iter::once(("test", false))) - .filter_map(move |(name, prelude)| Some((name, self.by_name(name)?, prelude))) + .filter_map(move |(name, prelude)| { + Some((CrateName::new(name).unwrap(), self.by_name(name)?, prelude)) + }) } pub fn proc_macro(&self) -> Option { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 9aa04eaa75a42..755bf95199906 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -1162,9 +1162,7 @@ fn sysroot_to_crate_graph( let public_deps = SysrootPublicDeps { deps: sysroot .public_deps() - .map(|(name, idx, prelude)| { - (CrateName::new(name).unwrap(), sysroot_crates[&idx], prelude) - }) + .map(|(name, idx, prelude)| (name, sysroot_crates[&idx], prelude)) .collect::>(), }; From f8ed4d7ae44e7a730f597f1f6618fb3b4aead01b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 21 Jan 2023 18:47:37 +0100 Subject: [PATCH 114/501] Use lang item resolution instead of known paths --- crates/hir-def/src/lib.rs | 1 + crates/hir-ty/src/diagnostics/expr.rs | 41 +++--- crates/hir-ty/src/infer.rs | 52 ++++--- crates/hir-ty/src/tests/traits.rs | 138 ++----------------- crates/ide-completion/src/tests/flyimport.rs | 6 +- crates/test-utils/src/minicore.rs | 8 +- 6 files changed, 66 insertions(+), 180 deletions(-) diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 9e4e0dcc96cc6..8eae2e92f4262 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -634,6 +634,7 @@ pub trait Lookup { pub trait HasModule { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId; } + impl HasModule for ItemContainerId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { match *self { diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index c8df4c796efca..3286dcb5afd7e 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -5,7 +5,9 @@ use std::fmt; use std::sync::Arc; -use hir_def::{path::path, resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule}; +use hir_def::lang_item::LangItem; +use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule}; +use hir_def::{ItemContainerId, Lookup}; use hir_expand::name; use itertools::Either; use itertools::Itertools; @@ -245,26 +247,25 @@ struct FilterMapNextChecker { impl FilterMapNextChecker { fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self { // Find and store the FunctionIds for Iterator::filter_map and Iterator::next - let iterator_path = path![core::iter::Iterator]; - let mut filter_map_function_id = None; - let mut next_function_id = None; - - if let Some(iterator_trait_id) = resolver.resolve_known_trait(db.upcast(), &iterator_path) { - let iterator_trait_items = &db.trait_data(iterator_trait_id).items; - for item in iterator_trait_items.iter() { - if let (name, AssocItemId::FunctionId(id)) = item { - if *name == name![filter_map] { - filter_map_function_id = Some(*id); + let (next_function_id, filter_map_function_id) = match db + .lang_item(resolver.krate(), LangItem::IteratorNext) + .and_then(|it| it.as_function()) + { + Some(next_function_id) => ( + Some(next_function_id), + match next_function_id.lookup(db.upcast()).container { + ItemContainerId::TraitId(iterator_trait_id) => { + let iterator_trait_items = &db.trait_data(iterator_trait_id).items; + iterator_trait_items.iter().find_map(|(name, it)| match it { + &AssocItemId::FunctionId(id) if *name == name![filter_map] => Some(id), + _ => None, + }) } - if *name == name![next] { - next_function_id = Some(*id); - } - } - if filter_map_function_id.is_some() && next_function_id.is_some() { - break; - } - } - } + _ => None, + }, + ), + None => (None, None), + }; Self { filter_map_function_id, next_function_id, prev_filter_map_expr_id: None } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index d06b22fff9a3d..4402c75947c7b 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -24,7 +24,7 @@ use hir_def::{ expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId}, lang_item::{LangItem, LangItemTarget}, layout::Integer, - path::{path, Path}, + path::Path, resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs}, type_ref::TypeRef, AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule, @@ -923,26 +923,24 @@ impl<'a> InferenceContext<'a> { } fn resolve_into_iter_item(&self) -> Option { - let path = path![core::iter::IntoIterator]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IntoIterIntoIter)? + .as_function()? + .lookup(self.db.upcast()).container + else { return None }; self.db.trait_data(trait_).associated_type_by_name(&name![IntoIter]) } fn resolve_iterator_item(&self) -> Option { - let path = path![core::iter::Iterator]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IteratorNext)? + .as_function()? + .lookup(self.db.upcast()).container + else { return None }; self.db.trait_data(trait_).associated_type_by_name(&name![Item]) } fn resolve_ops_try_ok(&self) -> Option { - // FIXME resolve via lang_item once try v2 is stable - let path = path![core::ops::Try]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; - let trait_data = self.db.trait_data(trait_); - trait_data - // FIXME remove once try v2 is stable - .associated_type_by_name(&name![Ok]) - .or_else(|| trait_data.associated_type_by_name(&name![Output])) + let trait_ = self.resolve_lang_item(LangItem::Try)?.as_trait()?; + self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } fn resolve_ops_neg_output(&self) -> Option { @@ -956,10 +954,12 @@ impl<'a> InferenceContext<'a> { } fn resolve_future_future_output(&self) -> Option { - let trait_ = self - .resolver - .resolve_known_trait(self.db.upcast(), &path![core::future::IntoFuture]) - .or_else(|| self.resolve_lang_item(LangItem::Future)?.as_trait())?; + let ItemContainerId::TraitId(trait_) = self + .resolve_lang_item(LangItem::IntoFutureIntoFuture)? + .as_function()? + .lookup(self.db.upcast()) + .container + else { return None }; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } @@ -969,38 +969,32 @@ impl<'a> InferenceContext<'a> { } fn resolve_range_full(&self) -> Option { - let path = path![core::ops::RangeFull]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + let struct_ = self.resolve_lang_item(LangItem::RangeFull)?.as_struct()?; Some(struct_.into()) } fn resolve_range(&self) -> Option { - let path = path![core::ops::Range]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + let struct_ = self.resolve_lang_item(LangItem::Range)?.as_struct()?; Some(struct_.into()) } fn resolve_range_inclusive(&self) -> Option { - let path = path![core::ops::RangeInclusive]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + let struct_ = self.resolve_lang_item(LangItem::RangeInclusiveStruct)?.as_struct()?; Some(struct_.into()) } fn resolve_range_from(&self) -> Option { - let path = path![core::ops::RangeFrom]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + let struct_ = self.resolve_lang_item(LangItem::RangeFrom)?.as_struct()?; Some(struct_.into()) } fn resolve_range_to(&self) -> Option { - let path = path![core::ops::RangeTo]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + let struct_ = self.resolve_lang_item(LangItem::RangeTo)?.as_struct()?; Some(struct_.into()) } fn resolve_range_to_inclusive(&self) -> Option { - let path = path![core::ops::RangeToInclusive]; - let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?; + let struct_ = self.resolve_lang_item(LangItem::RangeToInclusive)?.as_struct()?; Some(struct_.into()) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 4c560702a1b45..88670364bde05 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -163,98 +163,22 @@ fn test() { } #[test] -fn infer_try() { +fn infer_try_trait() { check_types( r#" -//- /main.rs crate:main deps:core +//- minicore: try, result fn test() { let r: Result = Result::Ok(1); let v = r?; v; } //^ i32 -//- /core.rs crate:core -pub mod ops { - pub trait Try { - type Ok; - type Error; - } +impl core::ops::Try for Result { + type Output = O; + type Error = Result; } -pub mod result { - pub enum Result { - Ok(O), - Err(E) - } - - impl crate::ops::Try for Result { - type Ok = O; - type Error = E; - } -} - -pub mod prelude { - pub mod rust_2018 { - pub use crate::{result::*, ops::*}; - } -} -"#, - ); -} - -#[test] -fn infer_try_trait_v2() { - check_types( - r#" -//- /main.rs crate:main deps:core -fn test() { - let r: Result = Result::Ok(1); - let v = r?; - v; -} //^ i32 - -//- /core.rs crate:core -mod ops { - mod try_trait { - pub trait Try: FromResidual { - type Output; - type Residual; - } - pub trait FromResidual::Residual> {} - } - - pub use self::try_trait::FromResidual; - pub use self::try_trait::Try; -} - -mod convert { - pub trait From {} - impl From for T {} -} - -pub mod result { - use crate::convert::From; - use crate::ops::{Try, FromResidual}; - - pub enum Infallible {} - pub enum Result { - Ok(O), - Err(E) - } - - impl Try for Result { - type Output = O; - type Error = Result; - } - - impl> FromResidual> for Result {} -} - -pub mod prelude { - pub mod rust_2018 { - pub use crate::result::*; - } -} +impl> core::ops::FromResidual> for Result {} "#, ); } @@ -263,7 +187,8 @@ pub mod prelude { fn infer_for_loop() { check_types( r#" -//- /main.rs crate:main deps:core,alloc +//- minicore: iterator +//- /main.rs crate:main deps:alloc #![no_std] use alloc::collections::Vec; @@ -275,23 +200,7 @@ fn test() { } //^ &str } -//- /core.rs crate:core -pub mod iter { - pub trait IntoIterator { - type Item; - type IntoIter: Iterator; - } - pub trait Iterator { - type Item; - } -} -pub mod prelude { - pub mod rust_2018 { - pub use crate::iter::*; - } -} - -//- /alloc.rs crate:alloc deps:core +//- /alloc.rs crate:alloc #![no_std] pub mod collections { pub struct Vec {} @@ -2999,40 +2908,17 @@ fn test() { fn integer_range_iterate() { check_types( r#" -//- /main.rs crate:main deps:core +//- minicore: range, iterator +//- /main.rs crate:main fn test() { for x in 0..100 { x; } } //^ i32 -//- /core.rs crate:core -pub mod ops { - pub struct Range { - pub start: Idx, - pub end: Idx, - } -} - -pub mod iter { - pub trait Iterator { - type Item; - } - - pub trait IntoIterator { - type Item; - type IntoIter: Iterator; - } - - impl IntoIterator for T where T: Iterator { - type Item = ::Item; - type IntoIter = Self; - } -} - trait Step {} impl Step for i32 {} impl Step for i64 {} -impl iter::Iterator for ops::Range { +impl core::iter::Iterator for core::ops::Range { type Item = A; } "#, diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index a63ef006875bc..0b485eb776d8a 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -541,9 +541,9 @@ fn main() { } "#, expect![[r#" - fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED - ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED - "#]], + ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED + fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED + "#]], ); } diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 3ca63fcab90d6..dff60914409a5 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -28,6 +28,7 @@ //! generator: pin //! hash: //! index: sized +//! infallible: //! iterator: option //! iterators: iterator, fn //! non_zero: @@ -40,7 +41,7 @@ //! sized: //! slice: //! sync: sized -//! try: +//! try: infallible //! unsize: sized pub mod marker { @@ -172,6 +173,9 @@ pub mod convert { fn as_ref(&self) -> &T; } // endregion:as_ref + // region:infallible + pub enum Infallibe {} + // endregion:infallible } pub mod ops { @@ -352,7 +356,7 @@ pub mod ops { #[lang = "from_residual"] fn from_residual(residual: R) -> Self; } - #[lang = "try"] + #[lang = "Try"] pub trait Try: FromResidual { type Output; type Residual; From 2a4837089e8c8551c43aeada433d61352a135ce5 Mon Sep 17 00:00:00 2001 From: Alex Vasilev Date: Sat, 21 Jan 2023 23:42:11 +0530 Subject: [PATCH 115/501] fixes --- crates/ide-assists/src/handlers/add_braces.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/crates/ide-assists/src/handlers/add_braces.rs b/crates/ide-assists/src/handlers/add_braces.rs index ccdac3074e605..2f4a263ee0700 100644 --- a/crates/ide-assists/src/handlers/add_braces.rs +++ b/crates/ide-assists/src/handlers/add_braces.rs @@ -34,7 +34,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( acc.add( AssistId("add_braces", AssistKind::RefactorRewrite), match expr_type { - ParentType::ClosureExpr => "Add braces to lambda expression", + ParentType::ClosureExpr => "Add braces to closure body", ParentType::MatchArmExpr => "Add braces to arm expression", }, expr.syntax().text_range(), @@ -46,9 +46,7 @@ pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( builder.replace(expr.syntax().text_range(), block_expr.syntax().text()); }, - ); - - Some(()) + ) } enum ParentType { @@ -58,7 +56,7 @@ enum ParentType { fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> { if let Some(match_arm) = ctx.find_node_at_offset::() { - let match_arm_expr = match_arm.syntax().children().find_map(ast::Expr::cast)?; + let match_arm_expr = match_arm.expr()?; if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) { return None; From 8df27d07aed4e12ce6e767f8675599aa0a8e46b9 Mon Sep 17 00:00:00 2001 From: Michael Benfield Date: Fri, 20 Jan 2023 20:56:16 -0800 Subject: [PATCH 116/501] Remove some superfluous type parameters from layout.rs. Specifically remove V, which can always be VariantIdx, and F, which can always be Layout. --- compiler/rustc_abi/src/layout.rs | 146 +++++++++++------------ compiler/rustc_abi/src/lib.rs | 71 +++++++++-- compiler/rustc_middle/src/arena.rs | 2 +- compiler/rustc_middle/src/ty/context.rs | 4 +- compiler/rustc_target/src/abi/mod.rs | 48 +------- compiler/rustc_ty_utils/src/layout.rs | 22 ++-- src/librustdoc/html/render/print_item.rs | 4 +- 7 files changed, 148 insertions(+), 149 deletions(-) diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index 9c2cf58efed4a..54858b52008f9 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -1,11 +1,5 @@ use super::*; -use std::{ - borrow::Borrow, - cmp, - fmt::Debug, - iter, - ops::{Bound, Deref}, -}; +use std::{borrow::Borrow, cmp, iter, ops::Bound}; #[cfg(feature = "randomize")] use rand::{seq::SliceRandom, SeedableRng}; @@ -33,7 +27,7 @@ pub trait LayoutCalculator { fn delay_bug(&self, txt: &str); fn current_data_layout(&self) -> Self::TargetDataLayoutRef; - fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutS { + fn scalar_pair(&self, a: Scalar, b: Scalar) -> LayoutS { let dl = self.current_data_layout(); let dl = dl.borrow(); let b_align = b.align(dl); @@ -49,7 +43,7 @@ pub trait LayoutCalculator { .max_by_key(|niche| niche.available(dl)); LayoutS { - variants: Variants::Single { index: V::new(0) }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Arbitrary { offsets: vec![Size::ZERO, b_offset], memory_index: vec![0, 1], @@ -61,13 +55,13 @@ pub trait LayoutCalculator { } } - fn univariant<'a, V: Idx, F: Deref> + Debug>( + fn univariant( &self, dl: &TargetDataLayout, - fields: &[F], + fields: &[Layout<'_>], repr: &ReprOptions, kind: StructKind, - ) -> Option> { + ) -> Option { let pack = repr.pack; let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; let mut inverse_memory_index: Vec = (0..fields.len() as u32).collect(); @@ -76,17 +70,17 @@ pub trait LayoutCalculator { let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; let optimizing = &mut inverse_memory_index[..end]; - let effective_field_align = |f: &F| { + let effective_field_align = |layout: Layout<'_>| { if let Some(pack) = pack { // return the packed alignment in bytes - f.align.abi.min(pack).bytes() + layout.align().abi.min(pack).bytes() } else { // returns log2(effective-align). // This is ok since `pack` applies to all fields equally. // The calculation assumes that size is an integer multiple of align, except for ZSTs. // // group [u8; 4] with align-4 or [u8; 6] with align-2 fields - f.align.abi.bytes().max(f.size.bytes()).trailing_zeros() as u64 + layout.align().abi.bytes().max(layout.size().bytes()).trailing_zeros() as u64 } }; @@ -111,9 +105,9 @@ pub trait LayoutCalculator { // Place ZSTs first to avoid "interesting offsets", // especially with only one or two non-ZST fields. // Then place largest alignments first, largest niches within an alignment group last - let f = &fields[x as usize]; - let niche_size = f.largest_niche.map_or(0, |n| n.available(dl)); - (!f.is_zst(), cmp::Reverse(effective_field_align(f)), niche_size) + let f = fields[x as usize]; + let niche_size = f.largest_niche().map_or(0, |n| n.available(dl)); + (!f.0.is_zst(), cmp::Reverse(effective_field_align(f)), niche_size) }); } @@ -123,8 +117,8 @@ pub trait LayoutCalculator { // And put the largest niche in an alignment group at the end // so it can be used as discriminant in jagged enums optimizing.sort_by_key(|&x| { - let f = &fields[x as usize]; - let niche_size = f.largest_niche.map_or(0, |n| n.available(dl)); + let f = fields[x as usize]; + let niche_size = f.largest_niche().map_or(0, |n| n.available(dl)); (effective_field_align(f), niche_size) }); } @@ -160,15 +154,15 @@ pub trait LayoutCalculator { )); } - if field.is_unsized() { + if field.0.is_unsized() { sized = false; } // Invariant: offset < dl.obj_size_bound() <= 1<<61 let field_align = if let Some(pack) = pack { - field.align.min(AbiAndPrefAlign::new(pack)) + field.align().min(AbiAndPrefAlign::new(pack)) } else { - field.align + field.align() }; offset = offset.align_to(field_align.abi); align = align.max(field_align); @@ -176,7 +170,7 @@ pub trait LayoutCalculator { debug!("univariant offset: {:?} field: {:#?}", offset, field); offsets[i as usize] = offset; - if let Some(mut niche) = field.largest_niche { + if let Some(mut niche) = field.largest_niche() { let available = niche.available(dl); if available > largest_niche_available { largest_niche_available = available; @@ -185,7 +179,7 @@ pub trait LayoutCalculator { } } - offset = offset.checked_add(field.size, dl)?; + offset = offset.checked_add(field.size(), dl)?; } if let Some(repr_align) = repr.align { align = align.max(AbiAndPrefAlign::new(repr_align)); @@ -205,24 +199,26 @@ pub trait LayoutCalculator { // Unpack newtype ABIs and find scalar pairs. if sized && size.bytes() > 0 { // All other fields must be ZSTs. - let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst()); + let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.0.is_zst()); match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { // We have exactly one non-ZST field. (Some((i, field)), None, None) => { // Field fills the struct and it has a scalar or scalar pair ABI. - if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size + if offsets[i].bytes() == 0 + && align.abi == field.align().abi + && size == field.size() { - match field.abi { + match field.abi() { // For plain scalars, or vectors of them, we can't unpack // newtypes for `#[repr(C)]`, as that affects C ABIs. Abi::Scalar(_) | Abi::Vector { .. } if optimize => { - abi = field.abi; + abi = field.abi(); } // But scalar pairs are Rust-specific and get // treated as aggregates by C ABIs anyway. Abi::ScalarPair(..) => { - abi = field.abi; + abi = field.abi(); } _ => {} } @@ -231,7 +227,7 @@ pub trait LayoutCalculator { // Two non-ZST fields, and they're both scalars. (Some((i, a)), Some((j, b)), None) => { - match (a.abi, b.abi) { + match (a.abi(), b.abi()) { (Abi::Scalar(a), Abi::Scalar(b)) => { // Order by the memory placement, not source order. let ((i, a), (j, b)) = if offsets[i] < offsets[j] { @@ -239,7 +235,7 @@ pub trait LayoutCalculator { } else { ((j, b), (i, a)) }; - let pair = self.scalar_pair::(a, b); + let pair = self.scalar_pair(a, b); let pair_offsets = match pair.fields { FieldsShape::Arbitrary { ref offsets, ref memory_index } => { assert_eq!(memory_index, &[0, 1]); @@ -264,11 +260,11 @@ pub trait LayoutCalculator { _ => {} } } - if fields.iter().any(|f| f.abi.is_uninhabited()) { + if fields.iter().any(|f| f.abi().is_uninhabited()) { abi = Abi::Uninhabited; } Some(LayoutS { - variants: Variants::Single { index: V::new(0) }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Arbitrary { offsets, memory_index }, abi, largest_niche, @@ -277,11 +273,11 @@ pub trait LayoutCalculator { }) } - fn layout_of_never_type(&self) -> LayoutS { + fn layout_of_never_type(&self) -> LayoutS { let dl = self.current_data_layout(); let dl = dl.borrow(); LayoutS { - variants: Variants::Single { index: V::new(0) }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Primitive, abi: Abi::Uninhabited, largest_niche: None, @@ -290,18 +286,18 @@ pub trait LayoutCalculator { } } - fn layout_of_struct_or_enum<'a, V: Idx, F: Deref> + Debug>( + fn layout_of_struct_or_enum( &self, repr: &ReprOptions, - variants: &IndexVec>, + variants: &IndexVec>>, is_enum: bool, is_unsafe_cell: bool, scalar_valid_range: (Bound, Bound), discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool), - discriminants: impl Iterator, + discriminants: impl Iterator, niche_optimize_enum: bool, always_sized: bool, - ) -> Option> { + ) -> Option { let dl = self.current_data_layout(); let dl = dl.borrow(); @@ -316,9 +312,9 @@ pub trait LayoutCalculator { // but *not* an encoding of the discriminant (e.g., a tag value). // See issue #49298 for more details on the need to leave space // for non-ZST uninhabited data (mostly partial initialization). - let absent = |fields: &[F]| { - let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited()); - let is_zst = fields.iter().all(|f| f.is_zst()); + let absent = |fields: &[Layout<'_>]| { + let uninhabited = fields.iter().any(|f| f.abi().is_uninhabited()); + let is_zst = fields.iter().all(|f| f.0.is_zst()); uninhabited && is_zst }; let (present_first, present_second) = { @@ -335,7 +331,7 @@ pub trait LayoutCalculator { } // If it's a struct, still compute a layout so that we can still compute the // field offsets. - None => V::new(0), + None => VariantIdx::new(0), }; let is_struct = !is_enum || @@ -439,12 +435,12 @@ pub trait LayoutCalculator { // variant layouts, so we can't store them in the // overall LayoutS. Store the overall LayoutS // and the variant LayoutSs here until then. - struct TmpLayout { - layout: LayoutS, - variants: IndexVec>, + struct TmpLayout { + layout: LayoutS, + variants: IndexVec, } - let calculate_niche_filling_layout = || -> Option> { + let calculate_niche_filling_layout = || -> Option { if niche_optimize_enum { return None; } @@ -464,15 +460,16 @@ pub trait LayoutCalculator { Some(st) }) - .collect::>>()?; + .collect::>>()?; let largest_variant_index = variant_layouts .iter_enumerated() .max_by_key(|(_i, layout)| layout.size.bytes()) .map(|(i, _layout)| i)?; - let all_indices = (0..=variants.len() - 1).map(V::new); - let needs_disc = |index: V| index != largest_variant_index && !absent(&variants[index]); + let all_indices = (0..=variants.len() - 1).map(VariantIdx::new); + let needs_disc = + |index: VariantIdx| index != largest_variant_index && !absent(&variants[index]); let niche_variants = all_indices.clone().find(|v| needs_disc(*v)).unwrap().index() ..=all_indices.rev().find(|v| needs_disc(*v)).unwrap().index(); @@ -482,7 +479,7 @@ pub trait LayoutCalculator { let (field_index, niche, (niche_start, niche_scalar)) = variants[largest_variant_index] .iter() .enumerate() - .filter_map(|(j, field)| Some((j, field.largest_niche?))) + .filter_map(|(j, field)| Some((j, field.largest_niche()?))) .max_by_key(|(_, niche)| niche.available(dl)) .and_then(|(j, niche)| Some((j, niche, niche.reserve(dl, count)?)))?; let niche_offset = @@ -514,7 +511,7 @@ pub trait LayoutCalculator { match layout.fields { FieldsShape::Arbitrary { ref mut offsets, .. } => { for (j, offset) in offsets.iter_mut().enumerate() { - if !variants[i][j].is_zst() { + if !variants[i][j].0.is_zst() { *offset += this_offset; } } @@ -572,8 +569,8 @@ pub trait LayoutCalculator { tag: niche_scalar, tag_encoding: TagEncoding::Niche { untagged_variant: largest_variant_index, - niche_variants: (V::new(*niche_variants.start()) - ..=V::new(*niche_variants.end())), + niche_variants: (VariantIdx::new(*niche_variants.start()) + ..=VariantIdx::new(*niche_variants.end())), niche_start, }, tag_field: 0, @@ -598,7 +595,7 @@ pub trait LayoutCalculator { let discr_type = repr.discr_type(); let bits = Integer::from_attr(dl, discr_type).size().bits(); for (i, mut val) in discriminants { - if variants[i].iter().any(|f| f.abi.is_uninhabited()) { + if variants[i].iter().any(|f| f.abi().is_uninhabited()) { continue; } if discr_type.is_signed() { @@ -636,7 +633,7 @@ pub trait LayoutCalculator { if repr.c() { for fields in variants { for field in fields { - prefix_align = prefix_align.max(field.align.abi); + prefix_align = prefix_align.max(field.align().abi); } } } @@ -655,8 +652,8 @@ pub trait LayoutCalculator { // Find the first field we can't move later // to make room for a larger discriminant. for field in st.fields.index_by_increasing_offset().map(|j| &field_layouts[j]) { - if !field.is_zst() || field.align.abi.bytes() != 1 { - start_align = start_align.min(field.align.abi); + if !field.0.is_zst() || field.align().abi.bytes() != 1 { + start_align = start_align.min(field.align().abi); break; } } @@ -664,7 +661,7 @@ pub trait LayoutCalculator { align = align.max(st.align); Some(st) }) - .collect::>>()?; + .collect::>>()?; // Align the maximum variant size to the largest alignment. size = size.align_to(align.abi); @@ -759,7 +756,7 @@ pub trait LayoutCalculator { let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else { panic!(); }; - let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst()); + let mut fields = iter::zip(field_layouts, offsets).filter(|p| !p.0.0.is_zst()); let (field, offset) = match (fields.next(), fields.next()) { (None, None) => { common_prim_initialized_in_all_variants = false; @@ -771,7 +768,7 @@ pub trait LayoutCalculator { break; } }; - let prim = match field.abi { + let prim = match field.abi() { Abi::Scalar(scalar) => { common_prim_initialized_in_all_variants &= matches!(scalar, Scalar::Initialized { .. }); @@ -802,7 +799,7 @@ pub trait LayoutCalculator { // Common prim might be uninit. Scalar::Union { value: prim } }; - let pair = self.scalar_pair::(tag, prim_scalar); + let pair = self.scalar_pair(tag, prim_scalar); let pair_offsets = match pair.fields { FieldsShape::Arbitrary { ref offsets, ref memory_index } => { assert_eq!(memory_index, &[0, 1]); @@ -862,9 +859,8 @@ pub trait LayoutCalculator { // pick the layout with the larger niche; otherwise, // pick tagged as it has simpler codegen. use cmp::Ordering::*; - let niche_size = |tmp_l: &TmpLayout| { - tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl)) - }; + let niche_size = + |tmp_l: &TmpLayout| tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl)); match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) { (Greater, _) => nl, (Equal, Less) => nl, @@ -884,11 +880,11 @@ pub trait LayoutCalculator { Some(best_layout.layout) } - fn layout_of_union<'a, V: Idx, F: Deref> + Debug>( + fn layout_of_union( &self, repr: &ReprOptions, - variants: &IndexVec>, - ) -> Option> { + variants: &IndexVec>>, + ) -> Option { let dl = self.current_data_layout(); let dl = dl.borrow(); let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align }; @@ -900,15 +896,15 @@ pub trait LayoutCalculator { let optimize = !repr.inhibit_union_abi_opt(); let mut size = Size::ZERO; let mut abi = Abi::Aggregate { sized: true }; - let index = V::new(0); + let index = VariantIdx::new(0); for field in &variants[index] { - assert!(field.is_sized()); - align = align.max(field.align); + assert!(field.0.is_sized()); + align = align.max(field.align()); // If all non-ZST fields have the same ABI, forward this ABI - if optimize && !field.is_zst() { + if optimize && !field.0.is_zst() { // Discard valid range information and allow undef - let field_abi = match field.abi { + let field_abi = match field.abi() { Abi::Scalar(x) => Abi::Scalar(x.to_union()), Abi::ScalarPair(x, y) => Abi::ScalarPair(x.to_union(), y.to_union()), Abi::Vector { element: x, count } => { @@ -926,7 +922,7 @@ pub trait LayoutCalculator { } } - size = cmp::max(size, field.size); + size = cmp::max(size, field.size()); } if let Some(pack) = repr.pack { diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index f4cb459f32fdd..5cd0aff2d5b71 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -8,6 +8,7 @@ use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub}; use std::str::FromStr; use bitflags::bitflags; +use rustc_data_structures::intern::Interned; #[cfg(feature = "nightly")] use rustc_data_structures::stable_hasher::StableOrd; use rustc_index::vec::{Idx, IndexVec}; @@ -1257,9 +1258,9 @@ impl Abi { #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub enum Variants { +pub enum Variants { /// Single enum variants, structs/tuples, unions, and all non-ADTs. - Single { index: V }, + Single { index: VariantIdx }, /// Enum-likes with more than one inhabited variant: each variant comes with /// a *discriminant* (usually the same as the variant index but the user can @@ -1269,15 +1270,15 @@ pub enum Variants { /// For enums, the tag is the sole field of the layout. Multiple { tag: Scalar, - tag_encoding: TagEncoding, + tag_encoding: TagEncoding, tag_field: usize, - variants: IndexVec>, + variants: IndexVec, }, } #[derive(PartialEq, Eq, Hash, Clone, Debug)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub enum TagEncoding { +pub enum TagEncoding { /// The tag directly stores the discriminant, but possibly with a smaller layout /// (so converting the tag to the discriminant can require sign extension). Direct, @@ -1292,7 +1293,11 @@ pub enum TagEncoding { /// For example, `Option<(usize, &T)>` is represented such that /// `None` has a null pointer for the second tuple field, and /// `Some` is the identity function (with a non-null reference). - Niche { untagged_variant: V, niche_variants: RangeInclusive, niche_start: u128 }, + Niche { + untagged_variant: VariantIdx, + niche_variants: RangeInclusive, + niche_start: u128, + }, } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] @@ -1379,9 +1384,14 @@ impl Niche { } } +rustc_index::newtype_index! { + #[derive(HashStable_Generic)] + pub struct VariantIdx {} +} + #[derive(PartialEq, Eq, Hash, Clone)] #[cfg_attr(feature = "nightly", derive(HashStable_Generic))] -pub struct LayoutS { +pub struct LayoutS { /// Says where the fields are located within the layout. pub fields: FieldsShape, @@ -1392,7 +1402,7 @@ pub struct LayoutS { /// /// To access all fields of this layout, both `fields` and the fields of the active variant /// must be taken into account. - pub variants: Variants, + pub variants: Variants, /// The `abi` defines how this data is passed between functions, and it defines /// value restrictions via `valid_range`. @@ -1411,13 +1421,13 @@ pub struct LayoutS { pub size: Size, } -impl LayoutS { +impl LayoutS { pub fn scalar(cx: &C, scalar: Scalar) -> Self { let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar); let size = scalar.size(cx); let align = scalar.align(cx); LayoutS { - variants: Variants::Single { index: V::new(0) }, + variants: Variants::Single { index: VariantIdx::new(0) }, fields: FieldsShape::Primitive, abi: Abi::Scalar(scalar), largest_niche, @@ -1427,7 +1437,7 @@ impl LayoutS { } } -impl fmt::Debug for LayoutS { +impl fmt::Debug for LayoutS { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { // This is how `Layout` used to print before it become // `Interned`. We print it like this to avoid having to update @@ -1444,6 +1454,43 @@ impl fmt::Debug for LayoutS { } } +#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)] +#[rustc_pass_by_value] +pub struct Layout<'a>(pub Interned<'a, LayoutS>); + +impl<'a> fmt::Debug for Layout<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // See comment on `::fmt` above. + self.0.0.fmt(f) + } +} + +impl<'a> Layout<'a> { + pub fn fields(self) -> &'a FieldsShape { + &self.0.0.fields + } + + pub fn variants(self) -> &'a Variants { + &self.0.0.variants + } + + pub fn abi(self) -> Abi { + self.0.0.abi + } + + pub fn largest_niche(self) -> Option { + self.0.0.largest_niche + } + + pub fn align(self) -> AbiAndPrefAlign { + self.0.0.align + } + + pub fn size(self) -> Size { + self.0.0.size + } +} + #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum PointerKind { /// Most general case, we know no restrictions to tell LLVM. @@ -1479,7 +1526,7 @@ pub enum InitKind { UninitMitigated0x01Fill, } -impl LayoutS { +impl LayoutS { /// Returns `true` if the layout corresponds to an unsized type. pub fn is_unsized(&self) -> bool { self.abi.is_unsized() diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index f816d614500a0..f0a63c243a5e1 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -8,7 +8,7 @@ macro_rules! arena_types { ($macro:path) => ( $macro!([ - [] layout: rustc_target::abi::LayoutS, + [] layout: rustc_target::abi::LayoutS, [] fn_abi: rustc_target::abi::call::FnAbi<'tcx, rustc_middle::ty::Ty<'tcx>>, // AdtDef are interned and compared by address [decode] adt_def: rustc_middle::ty::AdtDefData, diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index ce04d8d21f4cd..c207d639f4a1f 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -142,7 +142,7 @@ pub struct CtxtInterners<'tcx> { const_: InternedSet<'tcx, ConstData<'tcx>>, const_allocation: InternedSet<'tcx, Allocation>, bound_variable_kinds: InternedSet<'tcx, List>, - layout: InternedSet<'tcx, LayoutS>, + layout: InternedSet<'tcx, LayoutS>, adt_def: InternedSet<'tcx, AdtDefData>, } @@ -1586,7 +1586,7 @@ direct_interners! { region: mk_region(RegionKind<'tcx>): Region -> Region<'tcx>, const_: mk_const_internal(ConstData<'tcx>): Const -> Const<'tcx>, const_allocation: intern_const_alloc(Allocation): ConstAllocation -> ConstAllocation<'tcx>, - layout: intern_layout(LayoutS): Layout -> Layout<'tcx>, + layout: intern_layout(LayoutS): Layout -> Layout<'tcx>, adt_def: intern_adt_def(AdtDefData): AdtDef -> AdtDef<'tcx>, } diff --git a/compiler/rustc_target/src/abi/mod.rs b/compiler/rustc_target/src/abi/mod.rs index 88a0a1f8ecfde..653f99fb11a10 100644 --- a/compiler/rustc_target/src/abi/mod.rs +++ b/compiler/rustc_target/src/abi/mod.rs @@ -3,10 +3,8 @@ pub use Primitive::*; use crate::json::{Json, ToJson}; -use std::fmt; use std::ops::Deref; -use rustc_data_structures::intern::Interned; use rustc_macros::HashStable_Generic; pub mod call; @@ -19,48 +17,6 @@ impl ToJson for Endian { } } -rustc_index::newtype_index! { - #[derive(HashStable_Generic)] - pub struct VariantIdx {} -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable_Generic)] -#[rustc_pass_by_value] -pub struct Layout<'a>(pub Interned<'a, LayoutS>); - -impl<'a> fmt::Debug for Layout<'a> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // See comment on `::fmt` above. - self.0.0.fmt(f) - } -} - -impl<'a> Layout<'a> { - pub fn fields(self) -> &'a FieldsShape { - &self.0.0.fields - } - - pub fn variants(self) -> &'a Variants { - &self.0.0.variants - } - - pub fn abi(self) -> Abi { - self.0.0.abi - } - - pub fn largest_niche(self) -> Option { - self.0.0.largest_niche - } - - pub fn align(self) -> AbiAndPrefAlign { - self.0.0.align - } - - pub fn size(self) -> Size { - self.0.0.size - } -} - /// The layout of a type, alongside the type itself. /// Provides various type traversal APIs (e.g., recursing into fields). /// @@ -75,8 +31,8 @@ pub struct TyAndLayout<'a, Ty> { } impl<'a, Ty> Deref for TyAndLayout<'a, Ty> { - type Target = &'a LayoutS; - fn deref(&self) -> &&'a LayoutS { + type Target = &'a LayoutS; + fn deref(&self) -> &&'a LayoutS { &self.layout.0.0 } } diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index 6aa016133ca59..68312f8b11c63 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -78,10 +78,10 @@ fn invert_mapping(map: &[u32]) -> Vec { fn univariant_uninterned<'tcx>( cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, ty: Ty<'tcx>, - fields: &[TyAndLayout<'_>], + fields: &[Layout<'_>], repr: &ReprOptions, kind: StructKind, -) -> Result, LayoutError<'tcx>> { +) -> Result> { let dl = cx.data_layout(); let pack = repr.pack; if pack.is_some() && repr.align.is_some() { @@ -106,7 +106,7 @@ fn layout_of_uncached<'tcx>( }; let scalar = |value: Primitive| tcx.intern_layout(LayoutS::scalar(cx, scalar_unit(value))); - let univariant = |fields: &[TyAndLayout<'_>], repr: &ReprOptions, kind| { + let univariant = |fields: &[Layout<'_>], repr: &ReprOptions, kind| { Ok(tcx.intern_layout(univariant_uninterned(cx, ty, fields, repr, kind)?)) }; debug_assert!(!ty.has_non_region_infer()); @@ -272,7 +272,7 @@ fn layout_of_uncached<'tcx>( ty::Closure(_, ref substs) => { let tys = substs.as_closure().upvar_tys(); univariant( - &tys.map(|ty| cx.layout_of(ty)).collect::, _>>()?, + &tys.map(|ty| Ok(cx.layout_of(ty)?.layout)).collect::, _>>()?, &ReprOptions::default(), StructKind::AlwaysSized, )? @@ -283,7 +283,7 @@ fn layout_of_uncached<'tcx>( if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; univariant( - &tys.iter().map(|k| cx.layout_of(k)).collect::, _>>()?, + &tys.iter().map(|k| Ok(cx.layout_of(k)?.layout)).collect::, _>>()?, &ReprOptions::default(), kind, )? @@ -412,7 +412,7 @@ fn layout_of_uncached<'tcx>( .map(|v| { v.fields .iter() - .map(|field| cx.layout_of(field.ty(tcx, substs))) + .map(|field| Ok(cx.layout_of(field.ty(tcx, substs))?.layout)) .collect::, _>>() }) .collect::, _>>()?; @@ -630,23 +630,21 @@ fn generator_layout<'tcx>( // `info.variant_fields` already accounts for the reserved variants, so no need to add them. let max_discr = (info.variant_fields.len() - 1) as u128; let discr_int = Integer::fit_unsigned(max_discr); - let discr_int_ty = discr_int.to_ty(tcx, false); let tag = Scalar::Initialized { value: Primitive::Int(discr_int, false), valid_range: WrappingRange { start: 0, end: max_discr }, }; let tag_layout = cx.tcx.intern_layout(LayoutS::scalar(cx, tag)); - let tag_layout = TyAndLayout { ty: discr_int_ty, layout: tag_layout }; let promoted_layouts = ineligible_locals .iter() .map(|local| subst_field(info.field_tys[local])) .map(|ty| tcx.mk_maybe_uninit(ty)) - .map(|ty| cx.layout_of(ty)); + .map(|ty| Ok(cx.layout_of(ty)?.layout)); let prefix_layouts = substs .as_generator() .prefix_tys() - .map(|ty| cx.layout_of(ty)) + .map(|ty| Ok(cx.layout_of(ty)?.layout)) .chain(iter::once(Ok(tag_layout))) .chain(promoted_layouts) .collect::, _>>()?; @@ -715,7 +713,9 @@ fn generator_layout<'tcx>( let mut variant = univariant_uninterned( cx, ty, - &variant_only_tys.map(|ty| cx.layout_of(ty)).collect::, _>>()?, + &variant_only_tys + .map(|ty| Ok(cx.layout_of(ty)?.layout)) + .collect::, _>>()?, &ReprOptions::default(), StructKind::Prefixed(prefix_size, prefix_align.abi), )?; diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index f824c9e3ad2bd..e57f087642702 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -10,7 +10,7 @@ use rustc_middle::ty::layout::LayoutError; use rustc_middle::ty::{self, Adt, TyCtxt}; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Symbol}; -use rustc_target::abi::{LayoutS, Primitive, TagEncoding, VariantIdx, Variants}; +use rustc_target::abi::{LayoutS, Primitive, TagEncoding, Variants}; use std::cmp::Ordering; use std::fmt; use std::rc::Rc; @@ -1887,7 +1887,7 @@ fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) { } fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { - fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) { + fn write_size_of_layout(w: &mut Buffer, layout: &LayoutS, tag_size: u64) { if layout.abi.is_unsized() { write!(w, "(unsized)"); } else { From e813132e4f8b0c469c9959c2efa1b0629067b3b8 Mon Sep 17 00:00:00 2001 From: SpanishPear Date: Mon, 24 Oct 2022 00:52:59 +1100 Subject: [PATCH 117/501] --wip-- [skip ci] --wip-- [skip ci] get the generic text and put it int he suggestion, but suggestion not working on derive subdiagnostic refactor away from derives and use span_suggestion() instead. Show's the correct(?) generic contents, but overwrites the fn name :( x fmt drop commented code and s/todo/fixme get the correct diagnostic for functions, at least x fmt remove some debugs remove format remove debugs remove useless change remove useless change remove legacy approach correct lookahead + error message contains the ident name fmt refactor code tests add tests remoev debug remove comment --- .../rustc_parse/src/parser/diagnostics.rs | 57 ++++++++++++++++++- .../suggest_misplaced_generics/enum.fixed | 10 ++++ .../parser/suggest_misplaced_generics/enum.rs | 10 ++++ .../suggest_misplaced_generics/enum.stderr | 13 +++++ .../existing_generics.rs | 9 +++ .../existing_generics.stderr | 10 ++++ .../fn-complex-generics.fixed | 10 ++++ .../fn-complex-generics.rs | 10 ++++ .../fn-complex-generics.stderr | 13 +++++ .../fn-invalid-generics.rs | 8 +++ .../fn-invalid-generics.stderr | 8 +++ .../fn-simple.fixed | 10 ++++ .../suggest_misplaced_generics/fn-simple.rs | 10 ++++ .../fn-simple.stderr | 13 +++++ .../suggest_misplaced_generics/struct.fixed | 10 ++++ .../suggest_misplaced_generics/struct.rs | 10 ++++ .../suggest_misplaced_generics/struct.stderr | 13 +++++ .../suggest_misplaced_generics/trait.fixed | 12 ++++ .../suggest_misplaced_generics/trait.rs | 12 ++++ .../suggest_misplaced_generics/trait.stderr | 13 +++++ .../suggest_misplaced_generics/type.fixed | 10 ++++ .../parser/suggest_misplaced_generics/type.rs | 10 ++++ .../suggest_misplaced_generics/type.stderr | 13 +++++ 23 files changed, 292 insertions(+), 2 deletions(-) create mode 100644 src/test/ui/parser/suggest_misplaced_generics/enum.fixed create mode 100644 src/test/ui/parser/suggest_misplaced_generics/enum.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/enum.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/existing_generics.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/existing_generics.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-simple.fixed create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-simple.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/struct.fixed create mode 100644 src/test/ui/parser/suggest_misplaced_generics/struct.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/struct.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/trait.fixed create mode 100644 src/test/ui/parser/suggest_misplaced_generics/trait.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/trait.stderr create mode 100644 src/test/ui/parser/suggest_misplaced_generics/type.fixed create mode 100644 src/test/ui/parser/suggest_misplaced_generics/type.rs create mode 100644 src/test/ui/parser/suggest_misplaced_generics/type.stderr diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 4c918c6702ed9..6df9cfd3ff4e3 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -284,7 +284,7 @@ impl<'a> Parser<'a> { self.sess.source_map().span_to_snippet(span) } - pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a, ErrorGuaranteed> { + pub(super) fn expected_ident_found(&mut self) -> DiagnosticBuilder<'a, ErrorGuaranteed> { let valid_follow = &[ TokenKind::Eq, TokenKind::Colon, @@ -324,7 +324,60 @@ impl<'a> Parser<'a> { suggest_raw, suggest_remove_comma, }; - err.into_diagnostic(&self.sess.span_diagnostic) + let mut err = err.into_diagnostic(&self.sess.span_diagnostic); + + // if the token we have is a `<` + // it *might* be a misplaced generic + if self.token == token::Lt { + // all keywords that could have generic applied + let valid_prev_keywords = + [kw::Fn, kw::Type, kw::Struct, kw::Enum, kw::Union, kw::Trait]; + + // If we've expected an identifier, + // and the current token is a '<' + // if the previous token is a valid keyword + // that might use a generic, then suggest a correct + // generic placement (later on) + let maybe_keyword = self.prev_token.clone(); + if valid_prev_keywords.into_iter().any(|x| maybe_keyword.is_keyword(x)) { + // if we have a valid keyword, attempt to parse generics + // also obtain the keywords symbol + match self.parse_generics() { + Ok(generic) => { + if let TokenKind::Ident(symbol, _) = maybe_keyword.kind { + let ident_name = symbol.to_string(); + // at this point, we've found something like + // `fn id` + // and current token should be Ident with the item name (i.e. the function name) + // if there is a `<` after the fn name, then don't show a suggestion, show help + + if !self.look_ahead(1, |t| *t == token::Lt) && + let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) && + let Ok(ident) = self.sess.source_map().span_to_snippet(self.token.span) { + err.span_suggestion_verbose( + generic.span.to(self.token.span), + format!("place the generic parameter name after the {ident_name} name"), + format!(" {ident}{snippet}"), + Applicability::MachineApplicable, + ); + } else { + err.help(format!( + "place the generic parameter name after the {ident_name} name" + )); + } + } + } + Err(err) => { + // if there's an error parsing the generics, + // then don't do a misplaced generics suggestion + // and emit the expected ident error instead; + err.cancel(); + } + } + } + } + + err } pub(super) fn expected_one_of_not_found( diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.fixed b/src/test/ui/parser/suggest_misplaced_generics/enum.fixed new file mode 100644 index 0000000000000..a9d3e9f86d09c --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/enum.fixed @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +enum Foo { Variant(T) } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the enum name +//~| SUGGESTION Foo + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.rs b/src/test/ui/parser/suggest_misplaced_generics/enum.rs new file mode 100644 index 0000000000000..2d216ba53cc72 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/enum.rs @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +enum Foo { Variant(T) } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the enum name +//~| SUGGESTION Foo + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.stderr b/src/test/ui/parser/suggest_misplaced_generics/enum.stderr new file mode 100644 index 0000000000000..521cee4f72898 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/enum.stderr @@ -0,0 +1,13 @@ +error: expected identifier, found `<` + --> $DIR/enum.rs:5:5 + | +LL | enum Foo { Variant(T) } + | ^ expected identifier + | +help: place the generic parameter name after the enum name + | +LL | enum Foo { Variant(T) } + | ~~~~~~ + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/existing_generics.rs b/src/test/ui/parser/suggest_misplaced_generics/existing_generics.rs new file mode 100644 index 0000000000000..1dc182398d80a --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/existing_generics.rs @@ -0,0 +1,9 @@ +// Issue: 103366 +// there is already an existing generic on f, so don't show a suggestion + +#[allow(unused)] +fn<'a, B: 'a + std::ops::Add> f(_x: B) { } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the fn name + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/existing_generics.stderr b/src/test/ui/parser/suggest_misplaced_generics/existing_generics.stderr new file mode 100644 index 0000000000000..89716e6f1ed0a --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/existing_generics.stderr @@ -0,0 +1,10 @@ +error: expected identifier, found `<` + --> $DIR/existing_generics.rs:5:3 + | +LL | fn<'a, B: 'a + std::ops::Add> f(_x: B) { } + | ^ expected identifier + | + = help: place the generic parameter name after the fn name + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed new file mode 100644 index 0000000000000..06947e098ee6a --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +fn f<'a, B: 'a + std::ops::Add>(_x: B) { } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the fn name +//~| SUGGESTION f<'a, B: 'a + std::ops::Add> + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs new file mode 100644 index 0000000000000..cefce8d08806d --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +fn<'a, B: 'a + std::ops::Add> f(_x: B) { } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the fn name +//~| SUGGESTION f<'a, B: 'a + std::ops::Add> + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr new file mode 100644 index 0000000000000..7d1b44c44944c --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr @@ -0,0 +1,13 @@ +error: expected identifier, found `<` + --> $DIR/fn-complex-generics.rs:5:3 + | +LL | fn<'a, B: 'a + std::ops::Add> f(_x: B) { } + | ^ expected identifier + | +help: place the generic parameter name after the fn name + | +LL | fn f<'a, B: 'a + std::ops::Add>(_x: B) { } + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs b/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs new file mode 100644 index 0000000000000..7fcb6a82ce451 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs @@ -0,0 +1,8 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// The generics fail to parse here, so don't make any suggestions/help + +#[allow(unused)] +fn<~>()> id(x: T) -> T { x } +//~^ ERROR expected identifier, found `<` + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr new file mode 100644 index 0000000000000..47e12016938d8 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr @@ -0,0 +1,8 @@ +error: expected identifier, found `<` + --> $DIR/fn-invalid-generics.rs:5:3 + | +LL | fn<~>()> id(x: T) -> T { x } + | ^ expected identifier + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.fixed b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.fixed new file mode 100644 index 0000000000000..31c5429b16b05 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.fixed @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +fn id(x: T) -> T { x } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the fn name +//~| SUGGESTION id + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.rs b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.rs new file mode 100644 index 0000000000000..0a466184e996f --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.rs @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +fn id(x: T) -> T { x } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the fn name +//~| SUGGESTION id + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr new file mode 100644 index 0000000000000..40c4581e513ad --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr @@ -0,0 +1,13 @@ +error: expected identifier, found `<` + --> $DIR/fn-simple.rs:5:3 + | +LL | fn id(x: T) -> T { x } + | ^ expected identifier + | +help: place the generic parameter name after the fn name + | +LL | fn id(x: T) -> T { x } + | ~~~~~ + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.fixed b/src/test/ui/parser/suggest_misplaced_generics/struct.fixed new file mode 100644 index 0000000000000..8627699a83084 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/struct.fixed @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +struct Foo { x: T } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the struct name +//~| SUGGESTION Foo + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.rs b/src/test/ui/parser/suggest_misplaced_generics/struct.rs new file mode 100644 index 0000000000000..15646b06cfc62 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/struct.rs @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +struct Foo { x: T } +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the struct name +//~| SUGGESTION Foo + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.stderr b/src/test/ui/parser/suggest_misplaced_generics/struct.stderr new file mode 100644 index 0000000000000..ab17ee57e0bcd --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/struct.stderr @@ -0,0 +1,13 @@ +error: expected identifier, found `<` + --> $DIR/struct.rs:5:7 + | +LL | struct Foo { x: T } + | ^ expected identifier + | +help: place the generic parameter name after the struct name + | +LL | struct Foo { x: T } + | ~~~~~~ + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.fixed b/src/test/ui/parser/suggest_misplaced_generics/trait.fixed new file mode 100644 index 0000000000000..31ebf1f088fc7 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/trait.fixed @@ -0,0 +1,12 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +trait Foo { + //~^ ERROR expected identifier, found `<` + //~| HELP place the generic parameter name after the trait name + //~| SUGGESTION Foo +} + + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.rs b/src/test/ui/parser/suggest_misplaced_generics/trait.rs new file mode 100644 index 0000000000000..81b6abbd66163 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/trait.rs @@ -0,0 +1,12 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +trait Foo { + //~^ ERROR expected identifier, found `<` + //~| HELP place the generic parameter name after the trait name + //~| SUGGESTION Foo +} + + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.stderr b/src/test/ui/parser/suggest_misplaced_generics/trait.stderr new file mode 100644 index 0000000000000..069683bda1be3 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/trait.stderr @@ -0,0 +1,13 @@ +error: expected identifier, found `<` + --> $DIR/trait.rs:5:6 + | +LL | trait Foo { + | ^ expected identifier + | +help: place the generic parameter name after the trait name + | +LL | trait Foo { + | ~~~~~~ + +error: aborting due to previous error + diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.fixed b/src/test/ui/parser/suggest_misplaced_generics/type.fixed new file mode 100644 index 0000000000000..b04003b803d1c --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/type.fixed @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +type Foo = T; +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the type name +//~| SUGGESTION Foo + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.rs b/src/test/ui/parser/suggest_misplaced_generics/type.rs new file mode 100644 index 0000000000000..2d759a8b1ab61 --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/type.rs @@ -0,0 +1,10 @@ +// Issue: 103366 , Suggest fix for misplaced generic params +// run-rustfix + +#[allow(unused)] +type Foo = T; +//~^ ERROR expected identifier, found `<` +//~| HELP place the generic parameter name after the type name +//~| SUGGESTION Foo + +fn main() {} diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.stderr b/src/test/ui/parser/suggest_misplaced_generics/type.stderr new file mode 100644 index 0000000000000..a2832965c6d0e --- /dev/null +++ b/src/test/ui/parser/suggest_misplaced_generics/type.stderr @@ -0,0 +1,13 @@ +error: expected identifier, found `<` + --> $DIR/type.rs:5:5 + | +LL | type Foo = T; + | ^ expected identifier + | +help: place the generic parameter name after the type name + | +LL | type Foo = T; + | ~~~~~~ + +error: aborting due to previous error + From 5287004aa4f9b0685197cc0c009237812fed7047 Mon Sep 17 00:00:00 2001 From: Shrey Sudhir Date: Fri, 2 Dec 2022 00:40:33 +1100 Subject: [PATCH 118/501] Apply automatic suggestions from code review Co-authored-by: Takayuki Maeda --- compiler/rustc_parse/src/parser/diagnostics.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 6df9cfd3ff4e3..1e1e804c0d30a 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -345,7 +345,7 @@ impl<'a> Parser<'a> { match self.parse_generics() { Ok(generic) => { if let TokenKind::Ident(symbol, _) = maybe_keyword.kind { - let ident_name = symbol.to_string(); + let ident_name = symbol; // at this point, we've found something like // `fn id` // and current token should be Ident with the item name (i.e. the function name) @@ -355,9 +355,9 @@ impl<'a> Parser<'a> { let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) && let Ok(ident) = self.sess.source_map().span_to_snippet(self.token.span) { err.span_suggestion_verbose( - generic.span.to(self.token.span), + self.token.span.shrink_to_hi(), format!("place the generic parameter name after the {ident_name} name"), - format!(" {ident}{snippet}"), + snippet, Applicability::MachineApplicable, ); } else { From 655beb4ece8a116c664ae63f26811ba75aa9e0e7 Mon Sep 17 00:00:00 2001 From: Shrey Sudhir Date: Thu, 1 Dec 2022 14:12:33 +0000 Subject: [PATCH 119/501] Attempt to address review comments via github web... --- compiler/rustc_parse/src/parser/diagnostics.rs | 5 ++--- src/test/ui/parser/suggest_misplaced_generics/enum.stderr | 2 +- .../suggest_misplaced_generics/fn-complex-generics.stderr | 2 +- .../ui/parser/suggest_misplaced_generics/fn-simple.stderr | 2 +- src/test/ui/parser/suggest_misplaced_generics/struct.stderr | 2 +- src/test/ui/parser/suggest_misplaced_generics/trait.stderr | 2 +- src/test/ui/parser/suggest_misplaced_generics/type.stderr | 2 +- 7 files changed, 8 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 1e1e804c0d30a..94bedc07ba155 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -352,13 +352,12 @@ impl<'a> Parser<'a> { // if there is a `<` after the fn name, then don't show a suggestion, show help if !self.look_ahead(1, |t| *t == token::Lt) && - let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) && - let Ok(ident) = self.sess.source_map().span_to_snippet(self.token.span) { + let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) { err.span_suggestion_verbose( self.token.span.shrink_to_hi(), format!("place the generic parameter name after the {ident_name} name"), snippet, - Applicability::MachineApplicable, + Applicability::MaybeIncorrect, ); } else { err.help(format!( diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.stderr b/src/test/ui/parser/suggest_misplaced_generics/enum.stderr index 521cee4f72898..8af94856a4a48 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/enum.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/enum.stderr @@ -7,7 +7,7 @@ LL | enum Foo { Variant(T) } help: place the generic parameter name after the enum name | LL | enum Foo { Variant(T) } - | ~~~~~~ + | ~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr index 7d1b44c44944c..196769cb2b569 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr @@ -7,7 +7,7 @@ LL | fn<'a, B: 'a + std::ops::Add> f(_x: B) { } help: place the generic parameter name after the fn name | LL | fn f<'a, B: 'a + std::ops::Add>(_x: B) { } - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr index 40c4581e513ad..0d09d8967b849 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr @@ -7,7 +7,7 @@ LL | fn id(x: T) -> T { x } help: place the generic parameter name after the fn name | LL | fn id(x: T) -> T { x } - | ~~~~~ + | ~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.stderr b/src/test/ui/parser/suggest_misplaced_generics/struct.stderr index ab17ee57e0bcd..32ffdb5e9c308 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/struct.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/struct.stderr @@ -7,7 +7,7 @@ LL | struct Foo { x: T } help: place the generic parameter name after the struct name | LL | struct Foo { x: T } - | ~~~~~~ + | ~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.stderr b/src/test/ui/parser/suggest_misplaced_generics/trait.stderr index 069683bda1be3..01a31b7a85a9d 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/trait.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/trait.stderr @@ -7,7 +7,7 @@ LL | trait Foo { help: place the generic parameter name after the trait name | LL | trait Foo { - | ~~~~~~ + | ~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.stderr b/src/test/ui/parser/suggest_misplaced_generics/type.stderr index a2832965c6d0e..1ae73fae7f926 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/type.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/type.stderr @@ -7,7 +7,7 @@ LL | type Foo = T; help: place the generic parameter name after the type name | LL | type Foo = T; - | ~~~~~~ + | ~~~ error: aborting due to previous error From 4447949e400822a02cc9945fc39f06842e6b9439 Mon Sep 17 00:00:00 2001 From: SpanishPear Date: Sun, 22 Jan 2023 16:45:56 +1100 Subject: [PATCH 120/501] revert to previous span --- compiler/rustc_parse/src/parser/diagnostics.rs | 7 ++++--- src/test/ui/parser/suggest_misplaced_generics/enum.stderr | 2 +- .../suggest_misplaced_generics/fn-complex-generics.stderr | 2 +- .../ui/parser/suggest_misplaced_generics/fn-simple.stderr | 2 +- .../ui/parser/suggest_misplaced_generics/struct.stderr | 2 +- src/test/ui/parser/suggest_misplaced_generics/trait.stderr | 2 +- src/test/ui/parser/suggest_misplaced_generics/type.stderr | 2 +- 7 files changed, 10 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 94bedc07ba155..9ac3bb946dc42 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -352,11 +352,12 @@ impl<'a> Parser<'a> { // if there is a `<` after the fn name, then don't show a suggestion, show help if !self.look_ahead(1, |t| *t == token::Lt) && - let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) { + let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) && + let Ok(ident) = self.sess.source_map().span_to_snippet(self.token.span) { err.span_suggestion_verbose( - self.token.span.shrink_to_hi(), + generic.span.to(self.token.span), format!("place the generic parameter name after the {ident_name} name"), - snippet, + format!(" {ident}{snippet}"), Applicability::MaybeIncorrect, ); } else { diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.stderr b/src/test/ui/parser/suggest_misplaced_generics/enum.stderr index 8af94856a4a48..521cee4f72898 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/enum.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/enum.stderr @@ -7,7 +7,7 @@ LL | enum Foo { Variant(T) } help: place the generic parameter name after the enum name | LL | enum Foo { Variant(T) } - | ~~~ + | ~~~~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr index 196769cb2b569..7d1b44c44944c 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr @@ -7,7 +7,7 @@ LL | fn<'a, B: 'a + std::ops::Add> f(_x: B) { } help: place the generic parameter name after the fn name | LL | fn f<'a, B: 'a + std::ops::Add>(_x: B) { } - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr index 0d09d8967b849..40c4581e513ad 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr @@ -7,7 +7,7 @@ LL | fn id(x: T) -> T { x } help: place the generic parameter name after the fn name | LL | fn id(x: T) -> T { x } - | ~~~ + | ~~~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.stderr b/src/test/ui/parser/suggest_misplaced_generics/struct.stderr index 32ffdb5e9c308..ab17ee57e0bcd 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/struct.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/struct.stderr @@ -7,7 +7,7 @@ LL | struct Foo { x: T } help: place the generic parameter name after the struct name | LL | struct Foo { x: T } - | ~~~ + | ~~~~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.stderr b/src/test/ui/parser/suggest_misplaced_generics/trait.stderr index 01a31b7a85a9d..069683bda1be3 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/trait.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/trait.stderr @@ -7,7 +7,7 @@ LL | trait Foo { help: place the generic parameter name after the trait name | LL | trait Foo { - | ~~~ + | ~~~~~~ error: aborting due to previous error diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.stderr b/src/test/ui/parser/suggest_misplaced_generics/type.stderr index 1ae73fae7f926..a2832965c6d0e 100644 --- a/src/test/ui/parser/suggest_misplaced_generics/type.stderr +++ b/src/test/ui/parser/suggest_misplaced_generics/type.stderr @@ -7,7 +7,7 @@ LL | type Foo = T; help: place the generic parameter name after the type name | LL | type Foo = T; - | ~~~ + | ~~~~~~ error: aborting due to previous error From 8292d07cc4c9a070a9de808620bb79bab6935f70 Mon Sep 17 00:00:00 2001 From: SpanishPear Date: Sun, 22 Jan 2023 17:16:39 +1100 Subject: [PATCH 121/501] move tests to new rust-lang location --- .../ui/parser/suggest_misplaced_generics/enum.fixed | 0 {src/test => tests}/ui/parser/suggest_misplaced_generics/enum.rs | 0 .../ui/parser/suggest_misplaced_generics/enum.stderr | 0 .../ui/parser/suggest_misplaced_generics/existing_generics.rs | 0 .../ui/parser/suggest_misplaced_generics/existing_generics.stderr | 0 .../parser/suggest_misplaced_generics/fn-complex-generics.fixed | 0 .../ui/parser/suggest_misplaced_generics/fn-complex-generics.rs | 0 .../parser/suggest_misplaced_generics/fn-complex-generics.stderr | 0 .../ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs | 0 .../parser/suggest_misplaced_generics/fn-invalid-generics.stderr | 0 .../ui/parser/suggest_misplaced_generics/fn-simple.fixed | 0 .../ui/parser/suggest_misplaced_generics/fn-simple.rs | 0 .../ui/parser/suggest_misplaced_generics/fn-simple.stderr | 0 .../ui/parser/suggest_misplaced_generics/struct.fixed | 0 .../test => tests}/ui/parser/suggest_misplaced_generics/struct.rs | 0 .../ui/parser/suggest_misplaced_generics/struct.stderr | 0 .../ui/parser/suggest_misplaced_generics/trait.fixed | 0 {src/test => tests}/ui/parser/suggest_misplaced_generics/trait.rs | 0 .../ui/parser/suggest_misplaced_generics/trait.stderr | 0 .../ui/parser/suggest_misplaced_generics/type.fixed | 0 {src/test => tests}/ui/parser/suggest_misplaced_generics/type.rs | 0 .../ui/parser/suggest_misplaced_generics/type.stderr | 0 22 files changed, 0 insertions(+), 0 deletions(-) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/enum.fixed (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/enum.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/enum.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/existing_generics.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/existing_generics.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-simple.fixed (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-simple.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/fn-simple.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/struct.fixed (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/struct.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/struct.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/trait.fixed (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/trait.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/trait.stderr (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/type.fixed (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/type.rs (100%) rename {src/test => tests}/ui/parser/suggest_misplaced_generics/type.stderr (100%) diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.fixed b/tests/ui/parser/suggest_misplaced_generics/enum.fixed similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/enum.fixed rename to tests/ui/parser/suggest_misplaced_generics/enum.fixed diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.rs b/tests/ui/parser/suggest_misplaced_generics/enum.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/enum.rs rename to tests/ui/parser/suggest_misplaced_generics/enum.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/enum.stderr b/tests/ui/parser/suggest_misplaced_generics/enum.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/enum.stderr rename to tests/ui/parser/suggest_misplaced_generics/enum.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/existing_generics.rs b/tests/ui/parser/suggest_misplaced_generics/existing_generics.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/existing_generics.rs rename to tests/ui/parser/suggest_misplaced_generics/existing_generics.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/existing_generics.stderr b/tests/ui/parser/suggest_misplaced_generics/existing_generics.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/existing_generics.stderr rename to tests/ui/parser/suggest_misplaced_generics/existing_generics.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed rename to tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs rename to tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr rename to tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs b/tests/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs rename to tests/ui/parser/suggest_misplaced_generics/fn-invalid-generics.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr b/tests/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr rename to tests/ui/parser/suggest_misplaced_generics/fn-invalid-generics.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.fixed b/tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-simple.fixed rename to tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.rs b/tests/ui/parser/suggest_misplaced_generics/fn-simple.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-simple.rs rename to tests/ui/parser/suggest_misplaced_generics/fn-simple.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr b/tests/ui/parser/suggest_misplaced_generics/fn-simple.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/fn-simple.stderr rename to tests/ui/parser/suggest_misplaced_generics/fn-simple.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.fixed b/tests/ui/parser/suggest_misplaced_generics/struct.fixed similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/struct.fixed rename to tests/ui/parser/suggest_misplaced_generics/struct.fixed diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.rs b/tests/ui/parser/suggest_misplaced_generics/struct.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/struct.rs rename to tests/ui/parser/suggest_misplaced_generics/struct.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/struct.stderr b/tests/ui/parser/suggest_misplaced_generics/struct.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/struct.stderr rename to tests/ui/parser/suggest_misplaced_generics/struct.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.fixed b/tests/ui/parser/suggest_misplaced_generics/trait.fixed similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/trait.fixed rename to tests/ui/parser/suggest_misplaced_generics/trait.fixed diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.rs b/tests/ui/parser/suggest_misplaced_generics/trait.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/trait.rs rename to tests/ui/parser/suggest_misplaced_generics/trait.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/trait.stderr b/tests/ui/parser/suggest_misplaced_generics/trait.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/trait.stderr rename to tests/ui/parser/suggest_misplaced_generics/trait.stderr diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.fixed b/tests/ui/parser/suggest_misplaced_generics/type.fixed similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/type.fixed rename to tests/ui/parser/suggest_misplaced_generics/type.fixed diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.rs b/tests/ui/parser/suggest_misplaced_generics/type.rs similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/type.rs rename to tests/ui/parser/suggest_misplaced_generics/type.rs diff --git a/src/test/ui/parser/suggest_misplaced_generics/type.stderr b/tests/ui/parser/suggest_misplaced_generics/type.stderr similarity index 100% rename from src/test/ui/parser/suggest_misplaced_generics/type.stderr rename to tests/ui/parser/suggest_misplaced_generics/type.stderr From a3065a1a34fe1c0b85bdf3ff1f3d0bd470235e6b Mon Sep 17 00:00:00 2001 From: Lukas Bergdoll Date: Sun, 22 Jan 2023 11:55:35 +0100 Subject: [PATCH 122/501] Unify insertion sort implementations Avoid duplicate insertion sort implementations. Optimize implementations. --- library/core/src/slice/sort.rs | 359 +++++++++++++++++---------------- 1 file changed, 188 insertions(+), 171 deletions(-) diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index 7f8895b150fe7..6bb53b16e6100 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -13,115 +13,178 @@ use crate::cmp; use crate::mem::{self, MaybeUninit, SizedTypeProperties}; use crate::ptr; -/// When dropped, copies from `src` into `dest`. -struct CopyOnDrop { +// When dropped, copies from `src` into `dest`. +struct InsertionHole { src: *const T, dest: *mut T, } -impl Drop for CopyOnDrop { +impl Drop for InsertionHole { fn drop(&mut self) { - // SAFETY: This is a helper class. - // Please refer to its usage for correctness. - // Namely, one must be sure that `src` and `dst` does not overlap as required by `ptr::copy_nonoverlapping`. unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); } } } -/// Shifts the first element to the right until it encounters a greater or equal element. -fn shift_head(v: &mut [T], is_less: &mut F) +/// Inserts `v[v.len() - 1]` into pre-sorted sequence `v[..v.len() - 1]` so that whole `v[..]` +/// becomes sorted. +unsafe fn insert_tail(v: &mut [T], is_less: &mut F) where F: FnMut(&T, &T) -> bool, { - let len = v.len(); - // SAFETY: The unsafe operations below involves indexing without a bounds check (by offsetting a - // pointer) and copying memory (`ptr::copy_nonoverlapping`). - // - // a. Indexing: - // 1. We checked the size of the array to >=2. - // 2. All the indexing that we will do is always between {0 <= index < len} at most. - // - // b. Memory copying - // 1. We are obtaining pointers to references which are guaranteed to be valid. - // 2. They cannot overlap because we obtain pointers to difference indices of the slice. - // Namely, `i` and `i-1`. - // 3. If the slice is properly aligned, the elements are properly aligned. - // It is the caller's responsibility to make sure the slice is properly aligned. - // - // See comments below for further detail. + debug_assert!(v.len() >= 2); + + let arr_ptr = v.as_mut_ptr(); + let i = v.len() - 1; + + // SAFETY: caller must ensure v is at least len 2. unsafe { - // If the first two elements are out-of-order... - if len >= 2 && is_less(v.get_unchecked(1), v.get_unchecked(0)) { - // Read the first element into a stack-allocated variable. If a following comparison - // operation panics, `hole` will get dropped and automatically write the element back - // into the slice. - let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0))); - let v = v.as_mut_ptr(); - let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(1) }; - ptr::copy_nonoverlapping(v.add(1), v.add(0), 1); - - for i in 2..len { - if !is_less(&*v.add(i), &*tmp) { + // See insert_head which talks about why this approach is beneficial. + let i_ptr = arr_ptr.add(i); + + // It's important that we use i_ptr here. If this check is positive and we continue, + // We want to make sure that no other copy of the value was seen by is_less. + // Otherwise we would have to copy it back. + if is_less(&*i_ptr, &*i_ptr.sub(1)) { + // It's important, that we use tmp for comparison from now on. As it is the value that + // will be copied back. And notionally we could have created a divergence if we copy + // back the wrong value. + let tmp = mem::ManuallyDrop::new(ptr::read(i_ptr)); + // Intermediate state of the insertion process is always tracked by `hole`, which + // serves two purposes: + // 1. Protects integrity of `v` from panics in `is_less`. + // 2. Fills the remaining hole in `v` in the end. + // + // Panic safety: + // + // If `is_less` panics at any point during the process, `hole` will get dropped and + // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it + // initially held exactly once. + let mut hole = InsertionHole { src: &*tmp, dest: i_ptr.sub(1) }; + ptr::copy_nonoverlapping(hole.dest, i_ptr, 1); + + // SAFETY: We know i is at least 1. + for j in (0..(i - 1)).rev() { + let j_ptr = arr_ptr.add(j); + if !is_less(&*tmp, &*j_ptr) { break; } - // Move `i`-th element one place to the left, thus shifting the hole to the right. - ptr::copy_nonoverlapping(v.add(i), v.add(i - 1), 1); - hole.dest = v.add(i); + ptr::copy_nonoverlapping(j_ptr, hole.dest, 1); + hole.dest = j_ptr; } // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`. } } } -/// Shifts the last element to the left until it encounters a smaller or equal element. -fn shift_tail(v: &mut [T], is_less: &mut F) +/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted. +/// +/// This is the integral subroutine of insertion sort. +unsafe fn insert_head(v: &mut [T], is_less: &mut F) where F: FnMut(&T, &T) -> bool, { - let len = v.len(); - // SAFETY: The unsafe operations below involves indexing without a bound check (by offsetting a - // pointer) and copying memory (`ptr::copy_nonoverlapping`). - // - // a. Indexing: - // 1. We checked the size of the array to >= 2. - // 2. All the indexing that we will do is always between `0 <= index < len-1` at most. - // - // b. Memory copying - // 1. We are obtaining pointers to references which are guaranteed to be valid. - // 2. They cannot overlap because we obtain pointers to difference indices of the slice. - // Namely, `i` and `i+1`. - // 3. If the slice is properly aligned, the elements are properly aligned. - // It is the caller's responsibility to make sure the slice is properly aligned. - // - // See comments below for further detail. + debug_assert!(v.len() >= 2); + unsafe { - // If the last two elements are out-of-order... - if len >= 2 && is_less(v.get_unchecked(len - 1), v.get_unchecked(len - 2)) { - // Read the last element into a stack-allocated variable. If a following comparison - // operation panics, `hole` will get dropped and automatically write the element back - // into the slice. - let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1))); - let v = v.as_mut_ptr(); - let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(len - 2) }; - ptr::copy_nonoverlapping(v.add(len - 2), v.add(len - 1), 1); - - for i in (0..len - 2).rev() { - if !is_less(&*tmp, &*v.add(i)) { + if is_less(v.get_unchecked(1), v.get_unchecked(0)) { + let arr_ptr = v.as_mut_ptr(); + + // There are three ways to implement insertion here: + // + // 1. Swap adjacent elements until the first one gets to its final destination. + // However, this way we copy data around more than is necessary. If elements are big + // structures (costly to copy), this method will be slow. + // + // 2. Iterate until the right place for the first element is found. Then shift the + // elements succeeding it to make room for it and finally place it into the + // remaining hole. This is a good method. + // + // 3. Copy the first element into a temporary variable. Iterate until the right place + // for it is found. As we go along, copy every traversed element into the slot + // preceding it. Finally, copy data from the temporary variable into the remaining + // hole. This method is very good. Benchmarks demonstrated slightly better + // performance than with the 2nd method. + // + // All methods were benchmarked, and the 3rd showed best results. So we chose that one. + let tmp = mem::ManuallyDrop::new(ptr::read(arr_ptr)); + + // Intermediate state of the insertion process is always tracked by `hole`, which + // serves two purposes: + // 1. Protects integrity of `v` from panics in `is_less`. + // 2. Fills the remaining hole in `v` in the end. + // + // Panic safety: + // + // If `is_less` panics at any point during the process, `hole` will get dropped and + // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it + // initially held exactly once. + let mut hole = InsertionHole { src: &*tmp, dest: arr_ptr.add(1) }; + ptr::copy_nonoverlapping(arr_ptr.add(1), arr_ptr.add(0), 1); + + for i in 2..v.len() { + if !is_less(&v.get_unchecked(i), &*tmp) { break; } - - // Move `i`-th element one place to the right, thus shifting the hole to the left. - ptr::copy_nonoverlapping(v.add(i), v.add(i + 1), 1); - hole.dest = v.add(i); + ptr::copy_nonoverlapping(arr_ptr.add(i), arr_ptr.add(i - 1), 1); + hole.dest = arr_ptr.add(i); } // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`. } } } +/// Sort `v` assuming `v[..offset]` is already sorted. +/// +/// Never inline this function to avoid code bloat. It still optimizes nicely and has practically no +/// performance impact. Even improving performance in some cases. +#[inline(never)] +fn insertion_sort_shift_left(v: &mut [T], offset: usize, is_less: &mut F) +where + F: FnMut(&T, &T) -> bool, +{ + let len = v.len(); + + // Using assert here improves performance. + assert!(offset != 0 && offset <= len); + + // Shift each element of the unsorted region v[i..] as far left as is needed to make v sorted. + for i in offset..len { + // SAFETY: we tested that `offset` must be at least 1, so this loop is only entered if len + // >= 2. + unsafe { + insert_tail(&mut v[..=i], is_less); + } + } +} + +/// Sort `v` assuming `v[offset..]` is already sorted. +/// +/// Never inline this function to avoid code bloat. It still optimizes nicely and has practically no +/// performance impact. Even improving performance in some cases. +#[inline(never)] +fn insertion_sort_shift_right(v: &mut [T], offset: usize, is_less: &mut F) +where + F: FnMut(&T, &T) -> bool, +{ + let len = v.len(); + + // Using assert here improves performance. + assert!(offset != 0 && offset <= len && len >= 2); + + // Shift each element of the unsorted region v[..i] as far left as is needed to make v sorted. + for i in (0..offset).rev() { + // We ensured that the slice length is always at least 2 long. + // We know that start_found will be at least one less than end, + // and the range is exclusive. Which gives us i always <= (end - 2). + unsafe { + insert_head(&mut v[i..len], is_less); + } + } +} + /// Partially sorts a slice by shifting several out-of-order elements around. /// /// Returns `true` if the slice is sorted at the end. This function is *O*(*n*) worst-case. @@ -161,26 +224,19 @@ where // Swap the found pair of elements. This puts them in correct order. v.swap(i - 1, i); - // Shift the smaller element to the left. - shift_tail(&mut v[..i], is_less); - // Shift the greater element to the right. - shift_head(&mut v[i..], is_less); + if i >= 2 { + // Shift the smaller element to the left. + insertion_sort_shift_left(&mut v[..i], i - 1, is_less); + + // Shift the greater element to the right. + insertion_sort_shift_right(&mut v[..i], 1, is_less); + } } // Didn't manage to sort the slice in the limited number of steps. false } -/// Sorts a slice using insertion sort, which is *O*(*n*^2) worst-case. -fn insertion_sort(v: &mut [T], is_less: &mut F) -where - F: FnMut(&T, &T) -> bool, -{ - for i in 1..v.len() { - shift_tail(&mut v[..i + 1], is_less); - } -} - /// Sorts `v` using heapsort, which guarantees *O*(*n* \* log(*n*)) worst-case. #[cold] #[unstable(feature = "sort_internals", reason = "internal to sort module", issue = "none")] @@ -507,7 +563,7 @@ where // SAFETY: `pivot` is a reference to the first element of `v`, so `ptr::read` is safe. let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); - let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot }; + let _pivot_guard = InsertionHole { src: &*tmp, dest: pivot }; let pivot = &*tmp; // Find the first pair of out-of-order elements. @@ -560,7 +616,7 @@ where // operation panics, the pivot will be automatically written back into the slice. // SAFETY: The pointer here is valid because it is obtained from a reference to a slice. let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); - let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot }; + let _pivot_guard = InsertionHole { src: &*tmp, dest: pivot }; let pivot = &*tmp; // Now partition the slice. @@ -742,7 +798,9 @@ where // Very short slices get sorted using insertion sort. if len <= MAX_INSERTION { - insertion_sort(v, is_less); + if len >= 2 { + insertion_sort_shift_left(v, 1, is_less); + } return; } @@ -844,10 +902,14 @@ fn partition_at_index_loop<'a, T, F>( let mut was_balanced = true; loop { + let len = v.len(); + // For slices of up to this length it's probably faster to simply sort them. const MAX_INSERTION: usize = 10; - if v.len() <= MAX_INSERTION { - insertion_sort(v, is_less); + if len <= MAX_INSERTION { + if len >= 2 { + insertion_sort_shift_left(v, 1, is_less); + } return; } @@ -887,7 +949,7 @@ fn partition_at_index_loop<'a, T, F>( } let (mid, _) = partition(v, pivot, is_less); - was_balanced = cmp::min(mid, v.len() - mid) >= v.len() / 8; + was_balanced = cmp::min(mid, len - mid) >= len / 8; // Split the slice into `left`, `pivot`, and `right`. let (left, right) = v.split_at_mut(mid); @@ -954,75 +1016,6 @@ where (left, pivot, right) } -/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted. -/// -/// This is the integral subroutine of insertion sort. -fn insert_head(v: &mut [T], is_less: &mut F) -where - F: FnMut(&T, &T) -> bool, -{ - if v.len() >= 2 && is_less(&v[1], &v[0]) { - // SAFETY: Copy tmp back even if panic, and ensure unique observation. - unsafe { - // There are three ways to implement insertion here: - // - // 1. Swap adjacent elements until the first one gets to its final destination. - // However, this way we copy data around more than is necessary. If elements are big - // structures (costly to copy), this method will be slow. - // - // 2. Iterate until the right place for the first element is found. Then shift the - // elements succeeding it to make room for it and finally place it into the - // remaining hole. This is a good method. - // - // 3. Copy the first element into a temporary variable. Iterate until the right place - // for it is found. As we go along, copy every traversed element into the slot - // preceding it. Finally, copy data from the temporary variable into the remaining - // hole. This method is very good. Benchmarks demonstrated slightly better - // performance than with the 2nd method. - // - // All methods were benchmarked, and the 3rd showed best results. So we chose that one. - let tmp = mem::ManuallyDrop::new(ptr::read(&v[0])); - - // Intermediate state of the insertion process is always tracked by `hole`, which - // serves two purposes: - // 1. Protects integrity of `v` from panics in `is_less`. - // 2. Fills the remaining hole in `v` in the end. - // - // Panic safety: - // - // If `is_less` panics at any point during the process, `hole` will get dropped and - // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it - // initially held exactly once. - let mut hole = InsertionHole { src: &*tmp, dest: &mut v[1] }; - ptr::copy_nonoverlapping(&v[1], &mut v[0], 1); - - for i in 2..v.len() { - if !is_less(&v[i], &*tmp) { - break; - } - ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1); - hole.dest = &mut v[i]; - } - // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`. - } - } - - // When dropped, copies from `src` into `dest`. - struct InsertionHole { - src: *const T, - dest: *mut T, - } - - impl Drop for InsertionHole { - fn drop(&mut self) { - // SAFETY: The caller must ensure that src and dest are correctly set. - unsafe { - ptr::copy_nonoverlapping(self.src, self.dest, 1); - } - } - } -} - /// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and /// stores the result into `v[..]`. /// @@ -1180,8 +1173,6 @@ pub fn merge_sort( { // Slices of up to this length get sorted using insertion sort. const MAX_INSERTION: usize = 20; - // Very short runs are extended using insertion sort to span at least this many elements. - const MIN_RUN: usize = 10; // The caller should have already checked that. debug_assert!(!T::IS_ZST); @@ -1191,9 +1182,7 @@ pub fn merge_sort( // Short arrays get sorted in-place via insertion sort to avoid allocations. if len <= MAX_INSERTION { if len >= 2 { - for i in (0..len - 1).rev() { - insert_head(&mut v[i..], is_less); - } + insertion_sort_shift_left(v, 1, is_less); } return; } @@ -1236,10 +1225,7 @@ pub fn merge_sort( // Insert some more elements into the run if it's too short. Insertion sort is faster than // merge sort on short sequences, so this significantly improves performance. - while start > 0 && end - start < MIN_RUN { - start -= 1; - insert_head(&mut v[start..end], is_less); - } + start = provide_sorted_batch(v, start, end, is_less); // Push this run onto the stack. runs.push(TimSortRun { start, len: end - start }); @@ -1467,3 +1453,34 @@ pub struct TimSortRun { len: usize, start: usize, } + +/// Takes a range as denoted by start and end, that is already sorted and extends it to the left if +/// necessary with sorts optimized for smaller ranges such as insertion sort. +#[cfg(not(no_global_oom_handling))] +fn provide_sorted_batch(v: &mut [T], mut start: usize, end: usize, is_less: &mut F) -> usize +where + F: FnMut(&T, &T) -> bool, +{ + debug_assert!(end > start); + + // This value is a balance between least comparisons and best performance, as + // influenced by for example cache locality. + const MIN_INSERTION_RUN: usize = 10; + + // Insert some more elements into the run if it's too short. Insertion sort is faster than + // merge sort on short sequences, so this significantly improves performance. + let start_found = start; + let start_end_diff = end - start; + + if start_end_diff < MIN_INSERTION_RUN && start != 0 { + // v[start_found..end] are elements that are already sorted in the input. We want to extend + // the sorted region to the left, so we push up MIN_INSERTION_RUN - 1 to the right. Which is + // more efficient that trying to push those already sorted elements to the left. + + start = if end >= MIN_INSERTION_RUN { end - MIN_INSERTION_RUN } else { 0 }; + + insertion_sort_shift_right(&mut v[start..end], start_found - start, is_less); + } + + start +} From f297afa0c91243b17283be17864f2c48f91127d9 Mon Sep 17 00:00:00 2001 From: Lukas Bergdoll Date: Sun, 22 Jan 2023 12:01:06 +0100 Subject: [PATCH 123/501] Flip scanning direction of stable sort Memory pre-fetching prefers forward scanning vs backwards scanning, and the code-gen is usually better. For the most sensitive types such as integers, these are planned to be merged bidirectionally at once. So there is no benefit in scanning backwards. The largest perf gains are seen for full ascending and descending inputs, which see 1.5x speedups. Random inputs benefit too, and some patterns can loose out, but these losses are minimal. --- library/core/src/slice/sort.rs | 112 ++++++++++++++++++++------------- 1 file changed, 67 insertions(+), 45 deletions(-) diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index 6bb53b16e6100..227db51a0b403 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -1196,52 +1196,37 @@ pub fn merge_sort( let mut runs = RunVec::new(run_alloc_fn, run_dealloc_fn); - // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a - // strange decision, but consider the fact that merges more often go in the opposite direction - // (forwards). According to benchmarks, merging forwards is slightly faster than merging - // backwards. To conclude, identifying runs by traversing backwards improves performance. - let mut end = len; - while end > 0 { - // Find the next natural run, and reverse it if it's strictly descending. - let mut start = end - 1; - if start > 0 { - start -= 1; - - // SAFETY: The v.get_unchecked must be fed with correct inbound indicies. - unsafe { - if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) { - while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) { - start -= 1; - } - v[start..end].reverse(); - } else { - while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) - { - start -= 1; - } - } - } + let mut end = 0; + let mut start = 0; + + // Scan forward. Memory pre-fetching prefers forward scanning vs backwards scanning, and the + // code-gen is usually better. For the most sensitive types such as integers, these are merged + // bidirectionally at once. So there is no benefit in scanning backwards. + while end < len { + let (streak_end, was_reversed) = find_streak(&v[start..], is_less); + end += streak_end; + if was_reversed { + v[start..end].reverse(); } // Insert some more elements into the run if it's too short. Insertion sort is faster than // merge sort on short sequences, so this significantly improves performance. - start = provide_sorted_batch(v, start, end, is_less); + end = provide_sorted_batch(v, start, end, is_less); // Push this run onto the stack. runs.push(TimSortRun { start, len: end - start }); - end = start; + start = end; // Merge some pairs of adjacent runs to satisfy the invariants. - while let Some(r) = collapse(runs.as_slice()) { - let left = runs[r + 1]; - let right = runs[r]; - // SAFETY: `buf_ptr` must hold enough capacity for the shorter of the two sides, and - // neither side may be on length 0. + while let Some(r) = collapse(runs.as_slice(), len) { + let left = runs[r]; + let right = runs[r + 1]; + let merge_slice = &mut v[left.start..right.start + right.len]; unsafe { - merge(&mut v[left.start..right.start + right.len], left.len, buf_ptr, is_less); + merge(merge_slice, left.len, buf_ptr, is_less); } - runs[r] = TimSortRun { start: left.start, len: left.len + right.len }; - runs.remove(r + 1); + runs[r + 1] = TimSortRun { start: left.start, len: left.len + right.len }; + runs.remove(r); } } @@ -1263,10 +1248,10 @@ pub fn merge_sort( // run starts at index 0, it will always demand a merge operation until the stack is fully // collapsed, in order to complete the sort. #[inline] - fn collapse(runs: &[TimSortRun]) -> Option { + fn collapse(runs: &[TimSortRun], stop: usize) -> Option { let n = runs.len(); if n >= 2 - && (runs[n - 1].start == 0 + && (runs[n - 1].start + runs[n - 1].len == stop || runs[n - 2].len <= runs[n - 1].len || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) @@ -1454,14 +1439,15 @@ pub struct TimSortRun { start: usize, } -/// Takes a range as denoted by start and end, that is already sorted and extends it to the left if +/// Takes a range as denoted by start and end, that is already sorted and extends it to the right if /// necessary with sorts optimized for smaller ranges such as insertion sort. #[cfg(not(no_global_oom_handling))] -fn provide_sorted_batch(v: &mut [T], mut start: usize, end: usize, is_less: &mut F) -> usize +fn provide_sorted_batch(v: &mut [T], start: usize, mut end: usize, is_less: &mut F) -> usize where F: FnMut(&T, &T) -> bool, { - debug_assert!(end > start); + let len = v.len(); + assert!(end >= start && end <= len); // This value is a balance between least comparisons and best performance, as // influenced by for example cache locality. @@ -1469,18 +1455,54 @@ where // Insert some more elements into the run if it's too short. Insertion sort is faster than // merge sort on short sequences, so this significantly improves performance. - let start_found = start; let start_end_diff = end - start; - if start_end_diff < MIN_INSERTION_RUN && start != 0 { + if start_end_diff < MIN_INSERTION_RUN && end < len { // v[start_found..end] are elements that are already sorted in the input. We want to extend // the sorted region to the left, so we push up MIN_INSERTION_RUN - 1 to the right. Which is // more efficient that trying to push those already sorted elements to the left. + end = cmp::min(start + MIN_INSERTION_RUN, len); + let presorted_start = cmp::max(start_end_diff, 1); - start = if end >= MIN_INSERTION_RUN { end - MIN_INSERTION_RUN } else { 0 }; + insertion_sort_shift_left(&mut v[start..end], presorted_start, is_less); + } - insertion_sort_shift_right(&mut v[start..end], start_found - start, is_less); + end +} + +/// Finds a streak of presorted elements starting at the beginning of the slice. Returns the first +/// value that is not part of said streak, and a bool denoting wether the streak was reversed. +/// Streaks can be increasing or decreasing. +fn find_streak(v: &[T], is_less: &mut F) -> (usize, bool) +where + F: FnMut(&T, &T) -> bool, +{ + let len = v.len(); + + if len < 2 { + return (len, false); } - start + let mut end = 2; + + // SAFETY: See below specific. + unsafe { + // SAFETY: We checked that len >= 2, so 0 and 1 are valid indices. + let assume_reverse = is_less(v.get_unchecked(1), v.get_unchecked(0)); + + // SAFETY: We know end >= 2 and check end < len. + // From that follows that accessing v at end and end - 1 is safe. + if assume_reverse { + while end < len && is_less(v.get_unchecked(end), v.get_unchecked(end - 1)) { + end += 1; + } + + (end, true) + } else { + while end < len && !is_less(v.get_unchecked(end), v.get_unchecked(end - 1)) { + end += 1; + } + (end, false) + } + } } From 01d8b8947c54999eb15d88b60fa17bdde18b4875 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 23 Jan 2023 00:11:16 +0900 Subject: [PATCH 124/501] Don't escape non-snippets in assist --- .../src/handlers/move_const_to_impl.rs | 44 +------------------ crates/ide-assists/src/utils.rs | 17 ------- 2 files changed, 2 insertions(+), 59 deletions(-) diff --git a/crates/ide-assists/src/handlers/move_const_to_impl.rs b/crates/ide-assists/src/handlers/move_const_to_impl.rs index 0e3a1e652b0dc..d848fce4be821 100644 --- a/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -5,10 +5,7 @@ use syntax::{ SyntaxKind, }; -use crate::{ - assist_context::{AssistContext, Assists}, - utils, -}; +use crate::assist_context::{AssistContext, Assists}; // NOTE: Code may break if the self type implements a trait that has associated const with the same // name, but it's pretty expensive to check that (`hir::Impl::all_for_type()`) and we assume that's @@ -130,9 +127,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> let const_ = const_.clone_for_update(); const_.reindent_to(indent); - let mut const_text = format!("\n{indent}{const_}{fixup}"); - utils::escape_non_snippet(&mut const_text); - builder.insert(insert_offset, const_text); + builder.insert(insert_offset, format!("\n{indent}{const_}{fixup}")); }, ) } @@ -443,39 +438,4 @@ impl S { "#, ); } - - #[test] - fn moved_const_body_is_escaped() { - // Note that the last argument is what *lsp clients would see* rather than - // what users would see. Unescaping happens thereafter. - check_assist( - move_const_to_impl, - r#" -struct S; -impl S { - fn f() -> usize { - /// doc comment - /// \\ - /// ${snippet} - const C$0: &str = "\ and $1"; - - C.len() - } -} -"#, - r#" -struct S; -impl S { - /// doc comment - /// \\\\ - /// \${snippet} - const C: &str = "\\ and \$1"; - - fn f() -> usize { - Self::C.len() - } -} -"#, - ) - } } diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 63f467bd30860..f323ebcf7a3bd 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -208,23 +208,6 @@ pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor } } -/// Escapes text that should be rendered as-is, typically those that we're copy-pasting what the -/// users wrote. -/// -/// This function should only be used when the text doesn't contain snippet **AND** the text -/// wouldn't be included in a snippet. -pub(crate) fn escape_non_snippet(text: &mut String) { - // While we *can* escape `}`, we don't really have to in this specific case. We only need to - // escape it inside `${}` to disambiguate it from the ending token of the syntax, but after we - // escape every occurrence of `$`, we wouldn't have `${}` in the first place. - // - // This will break if the text contains snippet or it will be included in a snippet (hence doc - // comment). Compare `fn escape(buf)` in `render_snippet()` above, where the escaped text is - // included in a snippet. - stdx::replace(text, '\\', r"\\"); - stdx::replace(text, '$', r"\$"); -} - pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { node.children_with_tokens() .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) From 6fcf1758feb63c7c70a40241f0ac8931a3ba46a9 Mon Sep 17 00:00:00 2001 From: The 8472 Date: Sat, 21 Jan 2023 19:28:56 +0100 Subject: [PATCH 125/501] simplify layout calculations in rawvec --- library/alloc/src/raw_vec.rs | 17 ++++++++++++----- tests/ui/hygiene/panic-location.run.stderr | 2 +- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index 5a10121bbbe4b..3751f2a245456 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -241,10 +241,15 @@ impl RawVec { if T::IS_ZST || self.cap == 0 { None } else { - // We have an allocated chunk of memory, so we can bypass runtime - // checks to get our current layout. + // We could use Layout::array here which ensures the absence of isize and usize overflows + // and could hypothetically handle differences between stride and size, but this memory + // has already been allocated so we know it can't overflow and currently rust does not + // support such types. So we can do better by skipping some checks and avoid an unwrap. + let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; unsafe { - let layout = Layout::array::(self.cap).unwrap_unchecked(); + let align = mem::align_of::(); + let size = mem::size_of::().unchecked_mul(self.cap); + let layout = Layout::from_size_align_unchecked(size, align); Some((self.ptr.cast().into(), layout)) } } @@ -426,11 +431,13 @@ impl RawVec { assert!(cap <= self.capacity(), "Tried to shrink to a larger capacity"); let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; - + // See current_memory() why this assert is here + let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; let ptr = unsafe { // `Layout::array` cannot overflow here because it would have // overflowed earlier when capacity was larger. - let new_layout = Layout::array::(cap).unwrap_unchecked(); + let new_size = mem::size_of::().unchecked_mul(cap); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); self.alloc .shrink(ptr, layout, new_layout) .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? diff --git a/tests/ui/hygiene/panic-location.run.stderr b/tests/ui/hygiene/panic-location.run.stderr index 0b23b1cc2f451..1c6a7b02f8e77 100644 --- a/tests/ui/hygiene/panic-location.run.stderr +++ b/tests/ui/hygiene/panic-location.run.stderr @@ -1,2 +1,2 @@ -thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:518:5 +thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:525:5 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace From ef5ec771bb4828a8464a4b5ed9ec09b7b48826db Mon Sep 17 00:00:00 2001 From: Erik Desjardins Date: Sun, 22 Jan 2023 23:03:58 -0500 Subject: [PATCH 126/501] abi: add `AddressSpace` field to `Primitive::Pointer` ...and remove it from `PointeeInfo`, which isn't meant for this. There are still various places (marked with FIXMEs) that assume all pointers have the same size and alignment. Fixing this requires parsing non-default address spaces in the data layout string, which will be done in a followup. --- src/common.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/common.rs b/src/common.rs index 2dcd42fbd8f43..63ed10cdfcc59 100644 --- a/src/common.rs +++ b/src/common.rs @@ -35,7 +35,8 @@ pub(crate) fn scalar_to_clif_type(tcx: TyCtxt<'_>, scalar: Scalar) -> Type { }, Primitive::F32 => types::F32, Primitive::F64 => types::F64, - Primitive::Pointer => pointer_ty(tcx), + // FIXME(erikdesjardins): handle non-default addrspace ptr sizes + Primitive::Pointer(_) => pointer_ty(tcx), } } From 5eff2645335e86f714a92a592f81936fead1f6a4 Mon Sep 17 00:00:00 2001 From: Lukas Bergdoll Date: Mon, 23 Jan 2023 09:12:25 +0100 Subject: [PATCH 127/501] Document missing unsafe blocks --- library/core/src/slice/sort.rs | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index 227db51a0b403..fc35c46d58300 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -21,6 +21,9 @@ struct InsertionHole { impl Drop for InsertionHole { fn drop(&mut self) { + // SAFETY: This is a helper class. Please refer to its usage for correctness. Namely, one + // must be sure that `src` and `dst` does not overlap as required by + // `ptr::copy_nonoverlapping` and are both valid for writes. unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); } @@ -88,6 +91,7 @@ where { debug_assert!(v.len() >= 2); + // SAFETY: caller must ensure v is at least len 2. unsafe { if is_less(v.get_unchecked(1), v.get_unchecked(0)) { let arr_ptr = v.as_mut_ptr(); @@ -153,7 +157,8 @@ where // Shift each element of the unsorted region v[i..] as far left as is needed to make v sorted. for i in offset..len { // SAFETY: we tested that `offset` must be at least 1, so this loop is only entered if len - // >= 2. + // >= 2. The range is exclusive and we know `i` must be at least 1 so this slice has at + // >least len 2. unsafe { insert_tail(&mut v[..=i], is_less); } @@ -176,9 +181,10 @@ where // Shift each element of the unsorted region v[..i] as far left as is needed to make v sorted. for i in (0..offset).rev() { - // We ensured that the slice length is always at least 2 long. - // We know that start_found will be at least one less than end, - // and the range is exclusive. Which gives us i always <= (end - 2). + // SAFETY: we tested that `offset` must be at least 1, so this loop is only entered if len + // >= 2.We ensured that the slice length is always at least 2 long. We know that start_found + // will be at least one less than end, and the range is exclusive. Which gives us i always + // <= (end - 2). unsafe { insert_head(&mut v[i..len], is_less); } @@ -1222,6 +1228,8 @@ pub fn merge_sort( let left = runs[r]; let right = runs[r + 1]; let merge_slice = &mut v[left.start..right.start + right.len]; + // SAFETY: `buf_ptr` must hold enough capacity for the shorter of the two sides, and + // neither side may be on length 0. unsafe { merge(merge_slice, left.len, buf_ptr, is_less); } From 84239a14543b5a22a9662bc926c9ae033067c977 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 23 Jan 2023 12:19:41 +0100 Subject: [PATCH 128/501] REplace soft breaks in markdown with spaces --- crates/ide/src/markdown_remove.rs | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/crates/ide/src/markdown_remove.rs b/crates/ide/src/markdown_remove.rs index 07a3fe3f02b10..718868c8747b1 100644 --- a/crates/ide/src/markdown_remove.rs +++ b/crates/ide/src/markdown_remove.rs @@ -11,9 +11,8 @@ pub(crate) fn remove_markdown(markdown: &str) -> String { for event in parser { match event { Event::Text(text) | Event::Code(text) => out.push_str(&text), - Event::SoftBreak | Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => { - out.push('\n') - } + Event::SoftBreak => out.push(' '), + Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => out.push('\n'), Event::End(Tag::Paragraph) => { out.push('\n'); out.push('\n'); @@ -111,13 +110,9 @@ book] or the [Reference]. expect![[r#" A function or function pointer. - Functions are the primary way code is executed within Rust. Function blocks, usually just - called functions, can be defined in a variety of different places and be assigned many - different attributes and modifiers. + Functions are the primary way code is executed within Rust. Function blocks, usually just called functions, can be defined in a variety of different places and be assigned many different attributes and modifiers. - Standalone functions that just sit within a module not attached to anything else are common, - but most functions will end up being inside impl blocks, either on another type itself, or - as a trait impl for that type. + Standalone functions that just sit within a module not attached to anything else are common, but most functions will end up being inside impl blocks, either on another type itself, or as a trait impl for that type. fn standalone_function() { // code @@ -140,9 +135,7 @@ book] or the [Reference]. } } - In addition to presenting fixed types in the form of fn name(arg: type, ..) -> return_type, - functions can also declare a list of type parameters along with trait bounds that they fall - into. + In addition to presenting fixed types in the form of fn name(arg: type, ..) -> return_type, functions can also declare a list of type parameters along with trait bounds that they fall into. fn generic_function(x: T) -> (T, T, T) { (x.clone(), x.clone(), x.clone()) @@ -154,14 +147,10 @@ book] or the [Reference]. x + x + x } - Declaring trait bounds in the angle brackets is functionally identical to using a where - clause. It's up to the programmer to decide which works better in each situation, but where - tends to be better when things get longer than one line. + Declaring trait bounds in the angle brackets is functionally identical to using a where clause. It's up to the programmer to decide which works better in each situation, but where tends to be better when things get longer than one line. - Along with being made public via pub, fn can also have an extern added for use in - FFI. + Along with being made public via pub, fn can also have an extern added for use in FFI. - For more information on the various types of functions and how they're used, consult the Rust - book or the Reference."#]].assert_eq(&res); + For more information on the various types of functions and how they're used, consult the Rust book or the Reference."#]].assert_eq(&res); } } From b9fe5afb30d00e14aaf5d02ff12a74efb774d4f1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 23 Jan 2023 13:10:25 +0100 Subject: [PATCH 129/501] Add a 'open server logs' button to the error notification --- crates/rust-analyzer/src/config.rs | 4 +++ crates/rust-analyzer/src/lsp_ext.rs | 7 ++++ crates/rust-analyzer/src/lsp_utils.rs | 50 +++++++++++++++++++++------ docs/dev/lsp-extensions.md | 2 +- editors/code/src/client.ts | 1 + editors/code/src/ctx.ts | 5 +++ editors/code/src/lsp_ext.ts | 1 + 7 files changed, 58 insertions(+), 12 deletions(-) diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index c4d9ad7dff573..8ea161dbdc4f3 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -989,6 +989,10 @@ impl Config { self.experimental("codeActionGroup") } + pub fn open_server_logs(&self) -> bool { + self.experimental("openServerLogs") + } + pub fn server_status_notification(&self) -> bool { self.experimental("serverStatusNotification") } diff --git a/crates/rust-analyzer/src/lsp_ext.rs b/crates/rust-analyzer/src/lsp_ext.rs index b117acd1b0f7c..08b2c837de370 100644 --- a/crates/rust-analyzer/src/lsp_ext.rs +++ b/crates/rust-analyzer/src/lsp_ext.rs @@ -151,6 +151,13 @@ impl Notification for ClearFlycheck { const METHOD: &'static str = "rust-analyzer/clearFlycheck"; } +pub enum OpenServerLogs {} + +impl Notification for OpenServerLogs { + type Params = (); + const METHOD: &'static str = "rust-analyzer/openServerLogs"; +} + #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct RunFlycheckParams { diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs index dcaee92857abd..baa77a005e226 100644 --- a/crates/rust-analyzer/src/lsp_utils.rs +++ b/crates/rust-analyzer/src/lsp_utils.rs @@ -2,12 +2,13 @@ use std::{mem, ops::Range, sync::Arc}; use lsp_server::Notification; +use lsp_types::request::Request; use crate::{ from_proto, global_state::GlobalState, line_index::{LineEndings, LineIndex, PositionEncoding}, - LspError, + lsp_ext, LspError, }; pub(crate) fn invalid_params_error(message: String) -> LspError { @@ -46,20 +47,47 @@ impl GlobalState { /// If `additional_info` is [`Some`], appends a note to the notification telling to check the logs. /// This will always log `message` + `additional_info` to the server's error log. pub(crate) fn show_and_log_error(&mut self, message: String, additional_info: Option) { - let mut message = message; match additional_info { Some(additional_info) => { - tracing::error!("{}\n\n{}", &message, &additional_info); - if tracing::enabled!(tracing::Level::ERROR) { - message.push_str("\n\nCheck the server logs for additional info."); + tracing::error!("{}:\n{}", &message, &additional_info); + match self.config.open_server_logs() && tracing::enabled!(tracing::Level::ERROR) { + true => self.send_request::( + lsp_types::ShowMessageRequestParams { + typ: lsp_types::MessageType::ERROR, + message, + actions: Some(vec![lsp_types::MessageActionItem { + title: "Open server logs".to_owned(), + properties: Default::default(), + }]), + }, + |this, resp| { + let lsp_server::Response { error: None, result: Some(result), .. } = resp + else { return }; + if let Ok(Some(_item)) = crate::from_json::< + ::Result, + >( + lsp_types::request::ShowMessageRequest::METHOD, &result + ) { + this.send_notification::(()); + } + }, + ), + false => self.send_notification::( + lsp_types::ShowMessageParams { + typ: lsp_types::MessageType::ERROR, + message, + }, + ), } } - None => tracing::error!("{}", &message), - } + None => { + tracing::error!("{}", &message); - self.send_notification::( - lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message }, - ) + self.send_notification::( + lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message }, + ); + } + } } /// rust-analyzer is resilient -- if it fails, this doesn't usually affect @@ -77,7 +105,7 @@ impl GlobalState { let from_source_build = option_env!("POKE_RA_DEVS").is_some(); let profiling_enabled = std::env::var("RA_PROFILE").is_ok(); if from_source_build || profiling_enabled { - self.show_message(lsp_types::MessageType::ERROR, message) + self.show_and_log_error(message, None); } } diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md index 0f24ddbbc00e2..a794e866181df 100644 --- a/docs/dev/lsp-extensions.md +++ b/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ $DIR/unused_io_amount.rs:67:5 | +LL | s.write(b"ok").is_ok(); + | ^^^^^^^^^^^^^^^^^^^^^^ + | + = help: use `Write::write_all` instead, or handle partial writes + +error: written amount is not handled + --> $DIR/unused_io_amount.rs:68:5 + | +LL | s.write(b"err").is_err(); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: use `Write::write_all` instead, or handle partial writes + +error: read amount is not handled + --> $DIR/unused_io_amount.rs:70:5 + | +LL | s.read(&mut buf).is_ok(); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: use `Read::read_exact` instead, or handle partial reads + +error: read amount is not handled + --> $DIR/unused_io_amount.rs:71:5 + | +LL | s.read(&mut buf).is_err(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: use `Read::read_exact` instead, or handle partial reads + +error: written amount is not handled + --> $DIR/unused_io_amount.rs:75:5 + | LL | w.write(b"hello world").await.unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: use `AsyncWriteExt::write_all` instead, or handle partial writes error: read amount is not handled - --> $DIR/unused_io_amount.rs:72:5 + --> $DIR/unused_io_amount.rs:80:5 | LL | r.read(&mut buf[..]).await.unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -96,7 +128,7 @@ LL | r.read(&mut buf[..]).await.unwrap(); = help: use `AsyncReadExt::read_exact` instead, or handle partial reads error: written amount is not handled - --> $DIR/unused_io_amount.rs:85:9 + --> $DIR/unused_io_amount.rs:93:9 | LL | w.write(b"hello world").await?; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -104,7 +136,7 @@ LL | w.write(b"hello world").await?; = help: use `AsyncWriteExt::write_all` instead, or handle partial writes error: read amount is not handled - --> $DIR/unused_io_amount.rs:93:9 + --> $DIR/unused_io_amount.rs:101:9 | LL | r.read(&mut buf[..]).await.or(Err(Error::Kind))?; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -112,7 +144,7 @@ LL | r.read(&mut buf[..]).await.or(Err(Error::Kind))?; = help: use `AsyncReadExt::read_exact` instead, or handle partial reads error: written amount is not handled - --> $DIR/unused_io_amount.rs:101:5 + --> $DIR/unused_io_amount.rs:109:5 | LL | w.write(b"hello world").await.unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -120,12 +152,12 @@ LL | w.write(b"hello world").await.unwrap(); = help: use `AsyncWriteExt::write_all` instead, or handle partial writes error: read amount is not handled - --> $DIR/unused_io_amount.rs:106:5 + --> $DIR/unused_io_amount.rs:114:5 | LL | r.read(&mut buf[..]).await.unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: use `AsyncReadExt::read_exact` instead, or handle partial reads -error: aborting due to 16 previous errors +error: aborting due to 20 previous errors From 90b1222b0bf58df2cdf87c48a3da09dc06d7e5d4 Mon Sep 17 00:00:00 2001 From: bvanjoi Date: Mon, 23 Jan 2023 23:53:30 +0800 Subject: [PATCH 146/501] fix(ide-assists): unwrap block when it parent is let stmt --- .../ide-assists/src/handlers/unwrap_block.rs | 162 +++++++++++++----- 1 file changed, 116 insertions(+), 46 deletions(-) diff --git a/crates/ide-assists/src/handlers/unwrap_block.rs b/crates/ide-assists/src/handlers/unwrap_block.rs index 53cdac03a3358..33b19a354b9aa 100644 --- a/crates/ide-assists/src/handlers/unwrap_block.rs +++ b/crates/ide-assists/src/handlers/unwrap_block.rs @@ -2,6 +2,7 @@ use syntax::{ ast::{ self, edit::{AstNodeEdit, IndentLevel}, + make, }, AstNode, SyntaxKind, TextRange, T, }; @@ -37,61 +38,89 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))? } - if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT | SyntaxKind::LET_STMT) - { - return acc.add(assist_id, assist_label, target, |builder| { + let kind = parent.kind(); + if matches!(kind, SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) { + acc.add(assist_id, assist_label, target, |builder| { builder.replace(block.syntax().text_range(), update_expr_string(block.to_string())); - }); - } - - let parent = ast::Expr::cast(parent)?; - - match parent.clone() { - ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (), - ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)), - ast::Expr::IfExpr(if_expr) => { - let then_branch = if_expr.then_branch()?; - if then_branch == block { - if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) { - // For `else if` blocks - let ancestor_then_branch = ancestor.then_branch()?; - + }) + } else if matches!(kind, SyntaxKind::LET_STMT) { + let parent = ast::LetStmt::cast(parent)?; + let pattern = ast::Pat::cast(parent.syntax().first_child()?)?; + let ty = parent.ty(); + let list = block.stmt_list()?; + let replaced = match list.syntax().last_child() { + Some(last) => { + let stmts: Vec = list.statements().collect(); + let initializer = ast::Expr::cast(last.clone())?; + let let_stmt = make::let_stmt(pattern, ty, Some(initializer)); + if stmts.len() > 0 { + let block = make::block_expr(stmts, None); + format!( + "{}\n {}", + update_expr_string(block.to_string()), + let_stmt.to_string() + ) + } else { + let_stmt.to_string() + } + } + None => { + let empty_tuple = make::expr_tuple([]); + make::let_stmt(pattern, ty, Some(empty_tuple)).to_string() + } + }; + acc.add(assist_id, assist_label, target, |builder| { + builder.replace(parent.syntax().text_range(), replaced); + }) + } else { + let parent = ast::Expr::cast(parent)?; + match parent.clone() { + ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (), + ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)), + ast::Expr::IfExpr(if_expr) => { + let then_branch = if_expr.then_branch()?; + if then_branch == block { + if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) { + // For `else if` blocks + let ancestor_then_branch = ancestor.then_branch()?; + + return acc.add(assist_id, assist_label, target, |edit| { + let range_to_del_else_if = TextRange::new( + ancestor_then_branch.syntax().text_range().end(), + l_curly_token.text_range().start(), + ); + let range_to_del_rest = TextRange::new( + then_branch.syntax().text_range().end(), + if_expr.syntax().text_range().end(), + ); + + edit.delete(range_to_del_rest); + edit.delete(range_to_del_else_if); + edit.replace( + target, + update_expr_string_without_newline(then_branch.to_string()), + ); + }); + } + } else { return acc.add(assist_id, assist_label, target, |edit| { - let range_to_del_else_if = TextRange::new( - ancestor_then_branch.syntax().text_range().end(), - l_curly_token.text_range().start(), - ); - let range_to_del_rest = TextRange::new( + let range_to_del = TextRange::new( then_branch.syntax().text_range().end(), - if_expr.syntax().text_range().end(), + l_curly_token.text_range().start(), ); - edit.delete(range_to_del_rest); - edit.delete(range_to_del_else_if); - edit.replace( - target, - update_expr_string_without_newline(then_branch.to_string()), - ); + edit.delete(range_to_del); + edit.replace(target, update_expr_string_without_newline(block.to_string())); }); } - } else { - return acc.add(assist_id, assist_label, target, |edit| { - let range_to_del = TextRange::new( - then_branch.syntax().text_range().end(), - l_curly_token.text_range().start(), - ); - - edit.delete(range_to_del); - edit.replace(target, update_expr_string_without_newline(block.to_string())); - }); } - } - _ => return None, - }; + _ => return None, + }; - acc.add(assist_id, assist_label, target, |builder| { - builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string())); - }) + acc.add(assist_id, assist_label, target, |builder| { + builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string())); + }) + } } fn update_expr_string(expr_string: String) -> String { @@ -724,6 +753,19 @@ fn main() -> i32 { check_assist( unwrap_block, r#" +fn main() { + let x = {$0}; +} +"#, + r#" +fn main() { + let x = (); +} +"#, + ); + check_assist( + unwrap_block, + r#" fn main() { let x = {$0 bar @@ -734,6 +776,34 @@ fn main() { fn main() { let x = bar; } +"#, + ); + check_assist( + unwrap_block, + r#" +fn main() -> i32 { + let _ = {$01; 2}; +} +"#, + r#" +fn main() -> i32 { + 1; + let _ = 2; +} +"#, + ); + check_assist( + unwrap_block, + r#" +fn main() -> i32 { + let mut a = {$01; 2}; +} +"#, + r#" +fn main() -> i32 { + 1; + let mut a = 2; +} "#, ); } From e6ad8a2edcbe5e9614f71308d43f6fb0cbf7ceef Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 25 Jan 2023 09:17:49 +0100 Subject: [PATCH 147/501] fix: config substitution failing extension activation --- editors/code/src/config.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index ce1142df3a812..114abf062b91c 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -268,8 +268,6 @@ export function substituteVSCodeVariables(resp: T): T { res[key] = substituteVSCodeVariables(val); } return res as T; - } else if (Is.func(resp)) { - throw new Error("Unexpected function type in substitution"); } return resp; } From 18366145b0150be81e39e9c6e93f2cdfe2b18582 Mon Sep 17 00:00:00 2001 From: Michael Eliachevitch Date: Wed, 25 Jan 2023 12:50:58 +0100 Subject: [PATCH 148/501] Update documentation for emacs and split for LSP and Eglot modes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Emacs has now two LSP clients, the more minimalistic and lightweight Eglot and the extensive though a bit bloated LSP-Mode. Eglot will soon be shipped with Emacs29. Both have rust-analyzer enabled by default and require no further setup then just being installed and enabled. `lsp-rust.el` is not required anymore. The base-installation for each of those modes is so easy now that I don't think an enumerated list is necessary, both package can be installed via the standard `M-x package-install` and the installation is a one-liner that I provide. Configuration mostly comes into play for support the rust-analyzer extensions to the LSP protocol, which are built into LSP mode and require an extension-package for Eglot. But for the configuration beyond the base configuration I link against official documentation, quickstart guides and documentation for the lsp extensions, to avoid showing outdated information here. This commit is mostly a duplicate of a PR [1] that I made against the rust-analyzer github project. [1]: https://github.com/rust-analyzer/rust-analyzer.github.io/pull/197, https://github.com/rust-analyzer/rust-analyzer.github.io/pull/197/commits/7ff0113006d71a2114cf233422366bee6a5457c2 Spelling: Space before version number in Emacs 29 in manual Co-authored-by: Laurențiu Nicola --- docs/user/manual.adoc | 41 ++++++++++++++++++++++++++++++++++------- 1 file changed, 34 insertions(+), 7 deletions(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index b0c3f3e63c584..2cd1188c9df33 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -234,16 +234,43 @@ $ brew install rust-analyzer === Emacs -Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm]. - Prerequisites: You have installed the <>. -Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el]. +To use `rust-analyzer`, you need to install and enable one of the two popular two popular LSP client implementations for Emacs, https://github.com/joaotavora/eglot[Eglot] or https://github.com/emacs-lsp/lsp-mode[LSP Mode]. Both enable `rust-analyzer` by default in rust buffers if it is available. + +==== Eglot + +Eglot is the more minimalistic and lightweight LSP client for Emacs, integrates well with existing Emacs functionality and will be built into Emacs starting from release 29. + +After installing Eglot, e.g. via `M-x package-install` (not needed from Emacs 29), you can enable it via the `M-x eglot` command or load it automatically in `rust-mode` via + +[source,emacs-lisp] +---- +(add-hook 'rust-mode-hook 'eglot-ensure) +---- + +For more detailed instructions and options see the https://joaotavora.github.io/eglot[Eglot manual] (also available from Emacs via `M-x info`) and the +https://github.com/joaotavora/eglot/blob/master/README.md[Eglot readme]. + +Eglot does not support the rust-analyzer extensions to the language-server protocol and does not aim to do so in the future. The https://github.com/nemethf/eglot-x#rust-analyzer-extensions[eglot-x] package adds experimental support for those LSP extensions. + +==== LSP Mode + +LSP-mode is the original LSP-client for emacs. Compared to Eglot it has a larger codebase and supports more features, like LSP protocol extensions. +With extension packages like https://github.com/emacs-lsp/lsp-mode[LSP UI] it offers a lot of visual eyecandy. +Further it integrates well with https://github.com/emacs-lsp/dap-mode[DAP mode] for support of the Debug Adapter Protocol. + +You can install LSP-mode via `M-x package-install` and then run it via the `M-x lsp` command or load it automatically in rust buffers with + +[source,emacs-lisp] +---- +(add-hook 'rust-mode-hook 'lsp-deferred) +---- + +For more information on how to set up LSP mode and its extension package see the instructions in the https://emacs-lsp.github.io/lsp-mode/page/installation[LSP mode manual]. +Also see the https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/[rust-analyzer section] for `rust-analyzer` specific options and commands, which you can optionally bind to keys. -1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions]. -2. Set `lsp-rust-server` to `'rust-analyzer`. -3. Run `lsp` in a Rust buffer. -4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys. +Note the excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm] on how to set-up Emacs for Rust development with LSP mode and several other packages. === Vim/NeoVim From d712e529405b0ef5719c81ae620c88a97db78d93 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 25 Jan 2023 14:46:06 +0100 Subject: [PATCH 149/501] fix: Fix process-changes not deduplicating changes correctly --- crates/proc-macro-api/src/lib.rs | 2 +- crates/rust-analyzer/src/global_state.rs | 36 ++++++++++++++++-------- crates/rust-analyzer/src/reload.rs | 4 +-- crates/vfs/src/lib.rs | 3 +- 4 files changed, 30 insertions(+), 15 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 7921fda331eed..52f976e4576af 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -121,7 +121,7 @@ impl ProcMacroServer { } pub fn load_dylib(&self, dylib: MacroDylib) -> Result, ServerError> { - let _p = profile::span("ProcMacroClient::by_dylib_path"); + let _p = profile::span("ProcMacroClient::load_dylib"); let macros = self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?; diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index c6f4e9ce07f07..de11abdcf829f 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -3,7 +3,7 @@ //! //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. -use std::{sync::Arc, time::Instant}; +use std::{mem, sync::Arc, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; @@ -197,29 +197,41 @@ impl GlobalState { // We need to fix up the changed events a bit, if we have a create or modify for a file // id that is followed by a delete we actually no longer observe the file text from the // create or modify which may cause problems later on + let mut collapsed_create_delete = false; changed_files.dedup_by(|a, b| { use vfs::ChangeKind::*; + let has_collapsed_create_delete = mem::replace(&mut collapsed_create_delete, false); + if a.file_id != b.file_id { return false; } - match (a.change_kind, b.change_kind) { + // true => delete the second element (a), we swap them here as they are inverted by dedup_by + match (b.change_kind, a.change_kind) { // duplicate can be merged (Create, Create) | (Modify, Modify) | (Delete, Delete) => true, // just leave the create, modify is irrelevant - (Create, Modify) => { - std::mem::swap(a, b); + (Create, Modify) => true, + // modify becomes irrelevant if the file is deleted + (Modify, Delete) => { + mem::swap(a, b); + true + } + // Remove the create message, and in the following loop, also remove the delete + (Create, Delete) => { + collapsed_create_delete = true; + b.change_kind = Delete; + true + } + // trailing delete from earlier + (Delete, Create | Modify) if has_collapsed_create_delete => { + b.change_kind = Create; true } - // modify becomes irrelevant if the file is deleted - (Modify, Delete) => true, - // we should fully remove this occurrence, - // but leaving just a delete works as well - (Create, Delete) => true, // this is equivalent to a modify (Delete, Create) => { - a.change_kind = Modify; + b.change_kind = Modify; true } // can't really occur @@ -227,7 +239,9 @@ impl GlobalState { (Delete, Modify) => false, } }); - + if collapsed_create_delete { + changed_files.pop(); + } for file in &changed_files { if let Some(path) = vfs.file_path(file.file_id).as_path() { let path = path.to_path_buf(); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 9bbce70ec0a8f..3d7342d191378 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -362,7 +362,7 @@ impl GlobalState { let loader = &mut self.loader; let mem_docs = &self.mem_docs; let mut load = move |path: &AbsPath| { - let _p = profile::span("GlobalState::load"); + let _p = profile::span("switch_workspaces::load"); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); if !mem_docs.contains(&vfs_path) { let contents = loader.handle.load_sync(path); @@ -584,10 +584,10 @@ pub(crate) fn load_proc_macro( path: &AbsPath, dummy_replace: &[Box], ) -> ProcMacroLoadResult { + let server = server.map_err(ToOwned::to_owned)?; let res: Result, String> = (|| { let dylib = MacroDylib::new(path.to_path_buf()) .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?; - let server = server.map_err(ToOwned::to_owned)?; let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?; if vec.is_empty() { return Err("proc macro library returned no proc macros".to_string()); diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs index c61f30387b70c..14972d2907416 100644 --- a/crates/vfs/src/lib.rs +++ b/crates/vfs/src/lib.rs @@ -75,6 +75,7 @@ pub struct Vfs { } /// Changed file in the [`Vfs`]. +#[derive(Debug)] pub struct ChangedFile { /// Id of the changed file pub file_id: FileId, @@ -161,9 +162,9 @@ impl Vfs { let file_id = self.alloc_file_id(path); let change_kind = match (&self.get(file_id), &contents) { (None, None) => return false, + (Some(old), Some(new)) if old == new => return false, (None, Some(_)) => ChangeKind::Create, (Some(_), None) => ChangeKind::Delete, - (Some(old), Some(new)) if old == new => return false, (Some(_), Some(_)) => ChangeKind::Modify, }; From 7cb0cfa3aa08546e78aeff5d5f447d63d81c5378 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Wed, 25 Jan 2023 15:02:52 +0100 Subject: [PATCH 150/501] Try to fix auto-publishing of lib crates --- .../{publish.yml => autopublish.yaml} | 4 +-- .github/workflows/publish-libs.yaml | 35 +++++++++++++++++++ 2 files changed, 37 insertions(+), 2 deletions(-) rename .github/workflows/{publish.yml => autopublish.yaml} (97%) create mode 100644 .github/workflows/publish-libs.yaml diff --git a/.github/workflows/publish.yml b/.github/workflows/autopublish.yaml similarity index 97% rename from .github/workflows/publish.yml rename to .github/workflows/autopublish.yaml index 73e62ab32c6c5..279f86b458dff 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/autopublish.yaml @@ -1,4 +1,4 @@ -name: publish +name: autopublish on: workflow_dispatch: # We can add version input when 1.0 is released and scheduled releases are removed @@ -25,7 +25,7 @@ jobs: - name: Install cargo-workspaces run: cargo install cargo-workspaces - - name: Release + - name: Publish Crates env: CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} PATCH: ${{ github.run_number }} diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml new file mode 100644 index 0000000000000..1b843fff1a4a1 --- /dev/null +++ b/.github/workflows/publish-libs.yaml @@ -0,0 +1,35 @@ +name: publish-libs +on: + workflow_dispatch: + push: + branches: + - main + paths: + - 'lib/**' + +jobs: + publish-libs: + name: publish + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Install Rust toolchain + run: rustup update --no-self-update stable + + - name: Install cargo-workspaces + run: cargo install cargo-workspaces + + - name: Publish Crates + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} + shell: bash + run: | + git config --global user.email "runner@gha.local" + git config --global user.name "Github Action" + # Remove r-a crates from the workspaces so we don't auto-publish them as well + sed -i 's/ "crates\/\*"//' ./Cargo.toml + cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty From 89e90ede57280a7b79d746b08baf5bb32f91de24 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Wed, 25 Jan 2023 15:19:40 +0100 Subject: [PATCH 151/501] Vendor newer version of cranelift-native It fixes a bug that caused compilation on 32bit x86 to fail --- Cargo.lock | 1 - Cargo.toml | 4 +- src/cranelift_native.rs | 268 ++++++++++++++++++++++++++++++++++++++++ src/lib.rs | 2 + 4 files changed, 273 insertions(+), 2 deletions(-) create mode 100644 src/cranelift_native.rs diff --git a/Cargo.lock b/Cargo.lock index 48800725d2c13..50249ea1bdb49 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -333,7 +333,6 @@ dependencies = [ "cranelift-frontend", "cranelift-jit", "cranelift-module", - "cranelift-native", "cranelift-object", "gimli", "indexmap", diff --git a/Cargo.toml b/Cargo.toml index eadb4438bc992..34117c2886feb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,7 +18,9 @@ crate-type = ["dylib"] cranelift-codegen = { version = "0.92", features = ["unwind", "all-arch"] } cranelift-frontend = { version = "0.92" } cranelift-module = { version = "0.92" } -cranelift-native = { version = "0.92" } +# NOTE vendored as src/cranelift_native.rs +# FIXME revert back to the external crate with Cranelift 0.93 +#cranelift-native = { version = "0.92" } cranelift-jit = { version = "0.92", optional = true } cranelift-object = { version = "0.92" } target-lexicon = "0.12.0" diff --git a/src/cranelift_native.rs b/src/cranelift_native.rs new file mode 100644 index 0000000000000..7c0ca1adc2d48 --- /dev/null +++ b/src/cranelift_native.rs @@ -0,0 +1,268 @@ +// Vendored from https://github.com/bytecodealliance/wasmtime/blob/b58a197d33f044193c3d608010f5e6ec394ac07e/cranelift/native/src/lib.rs +// which is licensed as +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception +// unlike rustc_codegen_cranelift itself. +// FIXME revert back to the external crate with Cranelift 0.93 +#![allow(warnings)] + +//! Performs autodetection of the host for the purposes of running +//! Cranelift to generate code to run on the same machine. + +#![deny( + missing_docs, + trivial_numeric_casts, + unused_extern_crates, + unstable_features +)] +#![warn(unused_import_braces)] +#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] +#![cfg_attr( + feature = "cargo-clippy", + warn( + clippy::float_arithmetic, + clippy::mut_mut, + clippy::nonminimal_bool, + clippy::map_unwrap_or, + clippy::clippy::print_stdout, + clippy::unicode_not_nfc, + clippy::use_self + ) +)] + +use cranelift_codegen::isa; +use target_lexicon::Triple; + +/// Return an `isa` builder configured for the current host +/// machine, or `Err(())` if the host machine is not supported +/// in the current configuration. +pub fn builder() -> Result { + builder_with_options(true) +} + +/// Return an `isa` builder configured for the current host +/// machine, or `Err(())` if the host machine is not supported +/// in the current configuration. +/// +/// Selects the given backend variant specifically; this is +/// useful when more than oen backend exists for a given target +/// (e.g., on x86-64). +pub fn builder_with_options(infer_native_flags: bool) -> Result { + let mut isa_builder = isa::lookup(Triple::host()).map_err(|err| match err { + isa::LookupError::SupportDisabled => "support for architecture disabled at compile time", + isa::LookupError::Unsupported => "unsupported architecture", + })?; + + #[cfg(target_arch = "x86_64")] + { + use cranelift_codegen::settings::Configurable; + + if !std::is_x86_feature_detected!("sse2") { + return Err("x86 support requires SSE2"); + } + + if !infer_native_flags { + return Ok(isa_builder); + } + + // These are temporarily enabled by default (see #3810 for + // more) so that a default-constructed `Flags` can work with + // default Wasmtime features. Otherwise, the user must + // explicitly use native flags or turn these on when on x86-64 + // platforms to avoid a configuration panic. In order for the + // "enable if detected" logic below to work, we must turn them + // *off* (differing from the default) and then re-enable below + // if present. + isa_builder.set("has_sse3", "false").unwrap(); + isa_builder.set("has_ssse3", "false").unwrap(); + isa_builder.set("has_sse41", "false").unwrap(); + isa_builder.set("has_sse42", "false").unwrap(); + + if std::is_x86_feature_detected!("sse3") { + isa_builder.enable("has_sse3").unwrap(); + } + if std::is_x86_feature_detected!("ssse3") { + isa_builder.enable("has_ssse3").unwrap(); + } + if std::is_x86_feature_detected!("sse4.1") { + isa_builder.enable("has_sse41").unwrap(); + } + if std::is_x86_feature_detected!("sse4.2") { + isa_builder.enable("has_sse42").unwrap(); + } + if std::is_x86_feature_detected!("popcnt") { + isa_builder.enable("has_popcnt").unwrap(); + } + if std::is_x86_feature_detected!("avx") { + isa_builder.enable("has_avx").unwrap(); + } + if std::is_x86_feature_detected!("avx2") { + isa_builder.enable("has_avx2").unwrap(); + } + if std::is_x86_feature_detected!("fma") { + isa_builder.enable("has_fma").unwrap(); + } + if std::is_x86_feature_detected!("bmi1") { + isa_builder.enable("has_bmi1").unwrap(); + } + if std::is_x86_feature_detected!("bmi2") { + isa_builder.enable("has_bmi2").unwrap(); + } + if std::is_x86_feature_detected!("avx512bitalg") { + isa_builder.enable("has_avx512bitalg").unwrap(); + } + if std::is_x86_feature_detected!("avx512dq") { + isa_builder.enable("has_avx512dq").unwrap(); + } + if std::is_x86_feature_detected!("avx512f") { + isa_builder.enable("has_avx512f").unwrap(); + } + if std::is_x86_feature_detected!("avx512vl") { + isa_builder.enable("has_avx512vl").unwrap(); + } + if std::is_x86_feature_detected!("avx512vbmi") { + isa_builder.enable("has_avx512vbmi").unwrap(); + } + if std::is_x86_feature_detected!("lzcnt") { + isa_builder.enable("has_lzcnt").unwrap(); + } + } + + #[cfg(target_arch = "aarch64")] + { + use cranelift_codegen::settings::Configurable; + + if !infer_native_flags { + return Ok(isa_builder); + } + + if std::arch::is_aarch64_feature_detected!("lse") { + isa_builder.enable("has_lse").unwrap(); + } + + if std::arch::is_aarch64_feature_detected!("paca") { + isa_builder.enable("has_pauth").unwrap(); + } + + if cfg!(target_os = "macos") { + // Pointer authentication is always available on Apple Silicon. + isa_builder.enable("sign_return_address").unwrap(); + // macOS enforces the use of the B key for return addresses. + isa_builder.enable("sign_return_address_with_bkey").unwrap(); + } + } + + // There is no is_s390x_feature_detected macro yet, so for now + // we use getauxval from the libc crate directly. + #[cfg(all(target_arch = "s390x", target_os = "linux"))] + { + use cranelift_codegen::settings::Configurable; + + if !infer_native_flags { + return Ok(isa_builder); + } + + let v = unsafe { libc::getauxval(libc::AT_HWCAP) }; + const HWCAP_S390X_VXRS_EXT2: libc::c_ulong = 32768; + if (v & HWCAP_S390X_VXRS_EXT2) != 0 { + isa_builder.enable("has_vxrs_ext2").unwrap(); + // There is no separate HWCAP bit for mie2, so assume + // that any machine with vxrs_ext2 also has mie2. + isa_builder.enable("has_mie2").unwrap(); + } + } + + // `is_riscv_feature_detected` is nightly only for now, use + // getauxval from the libc crate directly as a temporary measure. + #[cfg(all(target_arch = "riscv64", target_os = "linux"))] + { + use cranelift_codegen::settings::Configurable; + + if !infer_native_flags { + return Ok(isa_builder); + } + + let v = unsafe { libc::getauxval(libc::AT_HWCAP) }; + + const HWCAP_RISCV_EXT_A: libc::c_ulong = 1 << (b'a' - b'a'); + const HWCAP_RISCV_EXT_C: libc::c_ulong = 1 << (b'c' - b'a'); + const HWCAP_RISCV_EXT_D: libc::c_ulong = 1 << (b'd' - b'a'); + const HWCAP_RISCV_EXT_F: libc::c_ulong = 1 << (b'f' - b'a'); + const HWCAP_RISCV_EXT_M: libc::c_ulong = 1 << (b'm' - b'a'); + const HWCAP_RISCV_EXT_V: libc::c_ulong = 1 << (b'v' - b'a'); + + if (v & HWCAP_RISCV_EXT_A) != 0 { + isa_builder.enable("has_a").unwrap(); + } + + if (v & HWCAP_RISCV_EXT_C) != 0 { + isa_builder.enable("has_c").unwrap(); + } + + if (v & HWCAP_RISCV_EXT_D) != 0 { + isa_builder.enable("has_d").unwrap(); + } + + if (v & HWCAP_RISCV_EXT_F) != 0 { + isa_builder.enable("has_f").unwrap(); + + // TODO: There doesn't seem to be a bit associated with this extension + // rust enables it with the `f` extension: + // https://github.com/rust-lang/stdarch/blob/790411f93c4b5eada3c23abb4c9a063fb0b24d99/crates/std_detect/src/detect/os/linux/riscv.rs#L43 + isa_builder.enable("has_zicsr").unwrap(); + } + + if (v & HWCAP_RISCV_EXT_M) != 0 { + isa_builder.enable("has_m").unwrap(); + } + + if (v & HWCAP_RISCV_EXT_V) != 0 { + isa_builder.enable("has_v").unwrap(); + } + + // TODO: ZiFencei does not have a bit associated with it + // TODO: Zbkb does not have a bit associated with it + } + + // squelch warnings about unused mut/variables on some platforms. + drop(&mut isa_builder); + drop(infer_native_flags); + + Ok(isa_builder) +} + +#[cfg(test)] +mod tests { + use super::builder; + use cranelift_codegen::isa::CallConv; + use cranelift_codegen::settings; + + #[test] + fn test() { + if let Ok(isa_builder) = builder() { + let flag_builder = settings::builder(); + let isa = isa_builder + .finish(settings::Flags::new(flag_builder)) + .unwrap(); + + if cfg!(all(target_os = "macos", target_arch = "aarch64")) { + assert_eq!(isa.default_call_conv(), CallConv::AppleAarch64); + } else if cfg!(any(unix, target_os = "nebulet")) { + assert_eq!(isa.default_call_conv(), CallConv::SystemV); + } else if cfg!(windows) { + assert_eq!(isa.default_call_conv(), CallConv::WindowsFastcall); + } + + if cfg!(target_pointer_width = "64") { + assert_eq!(isa.pointer_bits(), 64); + } else if cfg!(target_pointer_width = "32") { + assert_eq!(isa.pointer_bits(), 32); + } else if cfg!(target_pointer_width = "16") { + assert_eq!(isa.pointer_bits(), 16); + } + } + } +} + +/// Version number of this crate. +pub const VERSION: &str = env!("CARGO_PKG_VERSION"); diff --git a/src/lib.rs b/src/lib.rs index 70d0cc339a80c..d3868730557b7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -57,6 +57,8 @@ mod compiler_builtins; mod concurrency_limiter; mod config; mod constant; +// FIXME revert back to the external crate with Cranelift 0.93 +mod cranelift_native; mod debuginfo; mod discriminant; mod driver; From ff56716d034ba6c3c47e177c721bc8dfbc09a057 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Wed, 25 Jan 2023 15:56:22 +0100 Subject: [PATCH 152/501] Fix CI --- src/cranelift_native.rs | 28 ++++------------------------ 1 file changed, 4 insertions(+), 24 deletions(-) diff --git a/src/cranelift_native.rs b/src/cranelift_native.rs index 7c0ca1adc2d48..6c4efca442448 100644 --- a/src/cranelift_native.rs +++ b/src/cranelift_native.rs @@ -1,34 +1,16 @@ // Vendored from https://github.com/bytecodealliance/wasmtime/blob/b58a197d33f044193c3d608010f5e6ec394ac07e/cranelift/native/src/lib.rs // which is licensed as // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception -// unlike rustc_codegen_cranelift itself. +// unlike rustc_codegen_cranelift itself. Also applies a small change to remove #![cfg_attr] that +// rust's CI complains about and to fix formatting to match rustc. // FIXME revert back to the external crate with Cranelift 0.93 #![allow(warnings)] //! Performs autodetection of the host for the purposes of running //! Cranelift to generate code to run on the same machine. -#![deny( - missing_docs, - trivial_numeric_casts, - unused_extern_crates, - unstable_features -)] +#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates, unstable_features)] #![warn(unused_import_braces)] -#![cfg_attr(feature = "clippy", plugin(clippy(conf_file = "../../clippy.toml")))] -#![cfg_attr(feature = "cargo-clippy", allow(clippy::new_without_default))] -#![cfg_attr( - feature = "cargo-clippy", - warn( - clippy::float_arithmetic, - clippy::mut_mut, - clippy::nonminimal_bool, - clippy::map_unwrap_or, - clippy::clippy::print_stdout, - clippy::unicode_not_nfc, - clippy::use_self - ) -)] use cranelift_codegen::isa; use target_lexicon::Triple; @@ -241,9 +223,7 @@ mod tests { fn test() { if let Ok(isa_builder) = builder() { let flag_builder = settings::builder(); - let isa = isa_builder - .finish(settings::Flags::new(flag_builder)) - .unwrap(); + let isa = isa_builder.finish(settings::Flags::new(flag_builder)).unwrap(); if cfg!(all(target_os = "macos", target_arch = "aarch64")) { assert_eq!(isa.default_call_conv(), CallConv::AppleAarch64); From 4a677e91aa70a4f0a06c2b26a5d21afe886fb63e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 25 Jan 2023 21:52:28 +0100 Subject: [PATCH 153/501] minor: Remove unnecessary arg in LowerCtx::ast_id --- crates/hir-def/src/body/lower.rs | 4 ++-- crates/hir-def/src/type_ref.rs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 4c29e16b2b813..a78fa91f53bd0 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -67,9 +67,9 @@ impl<'a> LowerCtx<'a> { Path::from_src(ast, self) } - pub(crate) fn ast_id(&self, db: &dyn DefDatabase, item: &N) -> Option> { + pub(crate) fn ast_id(&self, item: &N) -> Option> { let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?; - let ast_id_map = ast_id_map.get_or_init(|| db.ast_id_map(file_id)); + let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id)); Some(InFile::new(file_id, ast_id_map.ast_id(item))) } } diff --git a/crates/hir-def/src/type_ref.rs b/crates/hir-def/src/type_ref.rs index 0149fdaa43cf3..8fa12c7aafda7 100644 --- a/crates/hir-def/src/type_ref.rs +++ b/crates/hir-def/src/type_ref.rs @@ -240,7 +240,7 @@ impl TypeRef { TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) } ast::Type::MacroType(mt) => match mt.macro_call() { - Some(mc) => ctx.ast_id(ctx.db, &mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error), + Some(mc) => ctx.ast_id(&mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error), None => TypeRef::Error, }, } From 986f40fab0828257016477174f9a6036a4a7f4d3 Mon Sep 17 00:00:00 2001 From: Alex Macleod Date: Thu, 26 Jan 2023 15:30:44 +0000 Subject: [PATCH 154/501] `invalid_regex`: Show full error when string value doesn't match source --- clippy_lints/src/regex.rs | 89 +++++++++++++++++++-------------------- tests/ui/regex.rs | 4 ++ tests/ui/regex.stderr | 66 +++++++++++++++++++++-------- 3 files changed, 97 insertions(+), 62 deletions(-) diff --git a/clippy_lints/src/regex.rs b/clippy_lints/src/regex.rs index 1fda58fa54de1..9e6c6c73d4fe7 100644 --- a/clippy_lints/src/regex.rs +++ b/clippy_lints/src/regex.rs @@ -1,5 +1,8 @@ +use std::fmt::Display; + use clippy_utils::consts::{constant, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; +use clippy_utils::source::snippet_opt; use clippy_utils::{match_def_path, paths}; use if_chain::if_chain; use rustc_ast::ast::{LitKind, StrStyle}; @@ -77,13 +80,45 @@ impl<'tcx> LateLintPass<'tcx> for Regex { } } -#[must_use] -fn str_span(base: Span, c: regex_syntax::ast::Span, offset: u8) -> Span { - let offset = u32::from(offset); - let end = base.lo() + BytePos(u32::try_from(c.end.offset).expect("offset too large") + offset); - let start = base.lo() + BytePos(u32::try_from(c.start.offset).expect("offset too large") + offset); - assert!(start <= end); - Span::new(start, end, base.ctxt(), base.parent()) +fn lint_syntax_error(cx: &LateContext<'_>, error: ®ex_syntax::Error, unescaped: &str, base: Span, offset: u8) { + let parts: Option<(_, _, &dyn Display)> = match &error { + regex_syntax::Error::Parse(e) => Some((e.span(), e.auxiliary_span(), e.kind())), + regex_syntax::Error::Translate(e) => Some((e.span(), None, e.kind())), + _ => None, + }; + + let convert_span = |regex_span: ®ex_syntax::ast::Span| { + let offset = u32::from(offset); + let start = base.lo() + BytePos(u32::try_from(regex_span.start.offset).expect("offset too large") + offset); + let end = base.lo() + BytePos(u32::try_from(regex_span.end.offset).expect("offset too large") + offset); + + Span::new(start, end, base.ctxt(), base.parent()) + }; + + if let Some((primary, auxiliary, kind)) = parts + && let Some(literal_snippet) = snippet_opt(cx, base) + && let Some(inner) = literal_snippet.get(offset as usize..) + // Only convert to native rustc spans if the parsed regex matches the + // source snippet exactly, to ensure the span offsets are correct + && inner.get(..unescaped.len()) == Some(unescaped) + { + let spans = if let Some(auxiliary) = auxiliary { + vec![convert_span(primary), convert_span(auxiliary)] + } else { + vec![convert_span(primary)] + }; + + span_lint(cx, INVALID_REGEX, spans, &format!("regex syntax error: {kind}")); + } else { + span_lint_and_help( + cx, + INVALID_REGEX, + base, + &error.to_string(), + None, + "consider using a raw string literal: `r\"..\"`", + ); + } } fn const_str<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> Option { @@ -155,25 +190,7 @@ fn check_regex<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, utf8: bool) { span_lint_and_help(cx, TRIVIAL_REGEX, expr.span, "trivial regex", None, repl); } }, - Err(regex_syntax::Error::Parse(e)) => { - span_lint( - cx, - INVALID_REGEX, - str_span(expr.span, *e.span(), offset), - &format!("regex syntax error: {}", e.kind()), - ); - }, - Err(regex_syntax::Error::Translate(e)) => { - span_lint( - cx, - INVALID_REGEX, - str_span(expr.span, *e.span(), offset), - &format!("regex syntax error: {}", e.kind()), - ); - }, - Err(e) => { - span_lint(cx, INVALID_REGEX, expr.span, &format!("regex syntax error: {e}")); - }, + Err(e) => lint_syntax_error(cx, &e, r, expr.span, offset), } } } else if let Some(r) = const_str(cx, expr) { @@ -183,25 +200,7 @@ fn check_regex<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, utf8: bool) { span_lint_and_help(cx, TRIVIAL_REGEX, expr.span, "trivial regex", None, repl); } }, - Err(regex_syntax::Error::Parse(e)) => { - span_lint( - cx, - INVALID_REGEX, - expr.span, - &format!("regex syntax error on position {}: {}", e.span().start.offset, e.kind()), - ); - }, - Err(regex_syntax::Error::Translate(e)) => { - span_lint( - cx, - INVALID_REGEX, - expr.span, - &format!("regex syntax error on position {}: {}", e.span().start.offset, e.kind()), - ); - }, - Err(e) => { - span_lint(cx, INVALID_REGEX, expr.span, &format!("regex syntax error: {e}")); - }, + Err(e) => span_lint(cx, INVALID_REGEX, expr.span, &e.to_string()), } } } diff --git a/tests/ui/regex.rs b/tests/ui/regex.rs index f0e1a8128d7c3..ab8ac97a0e707 100644 --- a/tests/ui/regex.rs +++ b/tests/ui/regex.rs @@ -36,6 +36,10 @@ fn syntax_error() { let raw_string_error = Regex::new(r"[...\/...]"); let raw_string_error = Regex::new(r#"[...\/...]"#); + + let escaped_string_span = Regex::new("\\b\\c"); + + let aux_span = Regex::new("(?ixi)"); } fn trivial_regex() { diff --git a/tests/ui/regex.stderr b/tests/ui/regex.stderr index 2424644c6f6b1..c2440f39e0a03 100644 --- a/tests/ui/regex.stderr +++ b/tests/ui/regex.stderr @@ -29,7 +29,10 @@ error: regex syntax error: invalid character class range, the start must be <= t LL | let some_unicode = Regex::new("[é-è]"); | ^^^ -error: regex syntax error on position 0: unclosed group +error: regex parse error: + ( + ^ + error: unclosed group --> $DIR/regex.rs:18:33 | LL | let some_regex = Regex::new(OPENING_PAREN); @@ -43,25 +46,37 @@ LL | let binary_pipe_in_wrong_position = BRegex::new("|"); | = help: the regex is unlikely to be useful as it is -error: regex syntax error on position 0: unclosed group +error: regex parse error: + ( + ^ + error: unclosed group --> $DIR/regex.rs:21:41 | LL | let some_binary_regex = BRegex::new(OPENING_PAREN); | ^^^^^^^^^^^^^ -error: regex syntax error on position 0: unclosed group +error: regex parse error: + ( + ^ + error: unclosed group --> $DIR/regex.rs:22:56 | LL | let some_binary_regex_builder = BRegexBuilder::new(OPENING_PAREN); | ^^^^^^^^^^^^^ -error: regex syntax error on position 0: unclosed group +error: regex parse error: + ( + ^ + error: unclosed group --> $DIR/regex.rs:34:37 | LL | let set_error = RegexSet::new(&[OPENING_PAREN, r"[a-z]+/.(com|org|net)"]); | ^^^^^^^^^^^^^ -error: regex syntax error on position 0: unclosed group +error: regex parse error: + ( + ^ + error: unclosed group --> $DIR/regex.rs:35:39 | LL | let bset_error = BRegexSet::new(&[OPENING_PAREN, r"[a-z]+/.(com|org|net)"]); @@ -79,8 +94,25 @@ error: regex syntax error: unrecognized escape sequence LL | let raw_string_error = Regex::new(r#"[...//...]"#); | ^^ +error: regex parse error: + /b/c + ^^ + error: unrecognized escape sequence + --> $DIR/regex.rs:40:42 + | +LL | let escaped_string_span = Regex::new("/b/c"); + | ^^^^^^^^ + | + = help: consider using a raw string literal: `r".."` + +error: regex syntax error: duplicate flag + --> $DIR/regex.rs:42:34 + | +LL | let aux_span = Regex::new("(?ixi)"); + | ^ ^ + error: trivial regex - --> $DIR/regex.rs:42:33 + --> $DIR/regex.rs:46:33 | LL | let trivial_eq = Regex::new("^foobar$"); | ^^^^^^^^^^ @@ -88,7 +120,7 @@ LL | let trivial_eq = Regex::new("^foobar$"); = help: consider using `==` on `str`s error: trivial regex - --> $DIR/regex.rs:44:48 + --> $DIR/regex.rs:48:48 | LL | let trivial_eq_builder = RegexBuilder::new("^foobar$"); | ^^^^^^^^^^ @@ -96,7 +128,7 @@ LL | let trivial_eq_builder = RegexBuilder::new("^foobar$"); = help: consider using `==` on `str`s error: trivial regex - --> $DIR/regex.rs:46:42 + --> $DIR/regex.rs:50:42 | LL | let trivial_starts_with = Regex::new("^foobar"); | ^^^^^^^^^ @@ -104,7 +136,7 @@ LL | let trivial_starts_with = Regex::new("^foobar"); = help: consider using `str::starts_with` error: trivial regex - --> $DIR/regex.rs:48:40 + --> $DIR/regex.rs:52:40 | LL | let trivial_ends_with = Regex::new("foobar$"); | ^^^^^^^^^ @@ -112,7 +144,7 @@ LL | let trivial_ends_with = Regex::new("foobar$"); = help: consider using `str::ends_with` error: trivial regex - --> $DIR/regex.rs:50:39 + --> $DIR/regex.rs:54:39 | LL | let trivial_contains = Regex::new("foobar"); | ^^^^^^^^ @@ -120,7 +152,7 @@ LL | let trivial_contains = Regex::new("foobar"); = help: consider using `str::contains` error: trivial regex - --> $DIR/regex.rs:52:39 + --> $DIR/regex.rs:56:39 | LL | let trivial_contains = Regex::new(NOT_A_REAL_REGEX); | ^^^^^^^^^^^^^^^^ @@ -128,7 +160,7 @@ LL | let trivial_contains = Regex::new(NOT_A_REAL_REGEX); = help: consider using `str::contains` error: trivial regex - --> $DIR/regex.rs:54:40 + --> $DIR/regex.rs:58:40 | LL | let trivial_backslash = Regex::new("a/.b"); | ^^^^^^^ @@ -136,7 +168,7 @@ LL | let trivial_backslash = Regex::new("a/.b"); = help: consider using `str::contains` error: trivial regex - --> $DIR/regex.rs:57:36 + --> $DIR/regex.rs:61:36 | LL | let trivial_empty = Regex::new(""); | ^^ @@ -144,7 +176,7 @@ LL | let trivial_empty = Regex::new(""); = help: the regex is unlikely to be useful as it is error: trivial regex - --> $DIR/regex.rs:59:36 + --> $DIR/regex.rs:63:36 | LL | let trivial_empty = Regex::new("^"); | ^^^ @@ -152,7 +184,7 @@ LL | let trivial_empty = Regex::new("^"); = help: the regex is unlikely to be useful as it is error: trivial regex - --> $DIR/regex.rs:61:36 + --> $DIR/regex.rs:65:36 | LL | let trivial_empty = Regex::new("^$"); | ^^^^ @@ -160,12 +192,12 @@ LL | let trivial_empty = Regex::new("^$"); = help: consider using `str::is_empty` error: trivial regex - --> $DIR/regex.rs:63:44 + --> $DIR/regex.rs:67:44 | LL | let binary_trivial_empty = BRegex::new("^$"); | ^^^^ | = help: consider using `str::is_empty` -error: aborting due to 23 previous errors +error: aborting due to 25 previous errors From a88ec47d4879fcee7ee71fd41b924bdb0f707d26 Mon Sep 17 00:00:00 2001 From: Kyle Matsuda Date: Wed, 18 Jan 2023 15:43:20 -0700 Subject: [PATCH 155/501] replace usages of fn_sig query with bound_fn_sig --- src/main_shim.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main_shim.rs b/src/main_shim.rs index c10054e7f0d2c..f46e6b6528c88 100644 --- a/src/main_shim.rs +++ b/src/main_shim.rs @@ -46,7 +46,7 @@ pub(crate) fn maybe_create_entry_wrapper( is_main_fn: bool, sigpipe: u8, ) { - let main_ret_ty = tcx.fn_sig(rust_main_def_id).output(); + let main_ret_ty = tcx.bound_fn_sig(rust_main_def_id).subst_identity().output(); // Given that `main()` has no arguments, // then its return type cannot have // late-bound regions, since late-bound From 38899d0c29135f9f89abafb3e0a32bd43bf7817c Mon Sep 17 00:00:00 2001 From: Kyle Matsuda Date: Wed, 18 Jan 2023 15:43:20 -0700 Subject: [PATCH 156/501] replace usages of fn_sig query with bound_fn_sig --- clippy_lints/src/casts/as_ptr_cast_mut.rs | 2 +- clippy_lints/src/default_numeric_fallback.rs | 4 ++-- clippy_lints/src/dereference.rs | 14 +++++++------- .../src/functions/not_unsafe_ptr_arg_deref.rs | 2 +- clippy_lints/src/functions/result.rs | 2 +- clippy_lints/src/inherent_to_string.rs | 2 +- clippy_lints/src/iter_not_returning_iterator.rs | 2 +- clippy_lints/src/len_zero.rs | 10 +++++----- clippy_lints/src/loops/needless_range_loop.rs | 2 +- clippy_lints/src/map_unit_fn.rs | 2 +- clippy_lints/src/methods/expect_fun_call.rs | 4 ++-- clippy_lints/src/methods/mod.rs | 2 +- clippy_lints/src/methods/needless_collect.rs | 4 ++-- clippy_lints/src/methods/unnecessary_to_owned.rs | 4 ++-- clippy_lints/src/mut_key.rs | 2 +- clippy_lints/src/needless_pass_by_value.rs | 2 +- clippy_lints/src/pass_by_ref_or_value.rs | 2 +- clippy_lints/src/ptr.rs | 6 +++--- clippy_lints/src/returns.rs | 3 ++- clippy_lints/src/unit_return_expecting_ord.rs | 2 +- clippy_lints/src/unit_types/let_unit_value.rs | 2 +- clippy_lints/src/use_self.rs | 2 +- clippy_utils/src/eager_or_lazy.rs | 2 +- clippy_utils/src/lib.rs | 6 +++--- clippy_utils/src/qualify_min_const_fn.rs | 2 +- clippy_utils/src/sugg.rs | 2 +- clippy_utils/src/ty.rs | 2 +- clippy_utils/src/visitors.rs | 4 ++-- 28 files changed, 48 insertions(+), 47 deletions(-) diff --git a/clippy_lints/src/casts/as_ptr_cast_mut.rs b/clippy_lints/src/casts/as_ptr_cast_mut.rs index 9409f4844f54b..2a0e0857c5610 100644 --- a/clippy_lints/src/casts/as_ptr_cast_mut.rs +++ b/clippy_lints/src/casts/as_ptr_cast_mut.rs @@ -17,7 +17,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, && let ExprKind::MethodCall(method_name, receiver, [], _) = cast_expr.peel_blocks().kind && method_name.ident.name == rustc_span::sym::as_ptr && let Some(as_ptr_did) = cx.typeck_results().type_dependent_def_id(cast_expr.peel_blocks().hir_id) - && let as_ptr_sig = cx.tcx.fn_sig(as_ptr_did) + && let as_ptr_sig = cx.tcx.bound_fn_sig(as_ptr_did).subst_identity() && let Some(first_param_ty) = as_ptr_sig.skip_binder().inputs().iter().next() && let ty::Ref(_, _, Mutability::Not) = first_param_ty.kind() && let Some(recv) = snippet_opt(cx, receiver.span) diff --git a/clippy_lints/src/default_numeric_fallback.rs b/clippy_lints/src/default_numeric_fallback.rs index 03460689e19ad..9c5a9f583743f 100644 --- a/clippy_lints/src/default_numeric_fallback.rs +++ b/clippy_lints/src/default_numeric_fallback.rs @@ -141,7 +141,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> { ExprKind::MethodCall(_, receiver, args, _) => { if let Some(def_id) = self.cx.typeck_results().type_dependent_def_id(expr.hir_id) { - let fn_sig = self.cx.tcx.fn_sig(def_id).skip_binder(); + let fn_sig = self.cx.tcx.bound_fn_sig(def_id).subst_identity().skip_binder(); for (expr, bound) in iter::zip(std::iter::once(*receiver).chain(args.iter()), fn_sig.inputs()) { self.ty_bounds.push((*bound).into()); self.visit_expr(expr); @@ -215,7 +215,7 @@ fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option Some(cx.tcx.fn_sig(*def_id)), + ty::FnDef(def_id, _) => Some(cx.tcx.bound_fn_sig(*def_id).subst_identity()), ty::FnPtr(fn_sig) => Some(*fn_sig), _ => None, } diff --git a/clippy_lints/src/dereference.rs b/clippy_lints/src/dereference.rs index 05f2b92c03709..25620f45b8a98 100644 --- a/clippy_lints/src/dereference.rs +++ b/clippy_lints/src/dereference.rs @@ -759,7 +759,7 @@ fn walk_parents<'tcx>( }) if span.ctxt() == ctxt => { let output = cx .tcx - .erase_late_bound_regions(cx.tcx.fn_sig(owner_id.to_def_id()).output()); + .erase_late_bound_regions(cx.tcx.bound_fn_sig(owner_id.to_def_id()).subst_identity().output()); Some(ty_auto_deref_stability(cx, output, precedence).position_for_result(cx)) }, @@ -791,7 +791,7 @@ fn walk_parents<'tcx>( } else { let output = cx .tcx - .erase_late_bound_regions(cx.tcx.fn_sig(cx.tcx.hir().local_def_id(owner_id)).output()); + .erase_late_bound_regions(cx.tcx.bound_fn_sig(cx.tcx.hir().local_def_id(owner_id).into()).subst_identity().output()); ty_auto_deref_stability(cx, output, precedence).position_for_result(cx) }, ) @@ -858,7 +858,7 @@ fn walk_parents<'tcx>( && let subs = cx .typeck_results() .node_substs_opt(parent.hir_id).map(|subs| &subs[1..]).unwrap_or_default() - && let impl_ty = if cx.tcx.fn_sig(id).skip_binder().inputs()[0].is_ref() { + && let impl_ty = if cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[0].is_ref() { // Trait methods taking `&self` sub_ty } else { @@ -879,7 +879,7 @@ fn walk_parents<'tcx>( return Some(Position::MethodReceiver); } args.iter().position(|arg| arg.hir_id == child_id).map(|i| { - let ty = cx.tcx.fn_sig(id).skip_binder().inputs()[i + 1]; + let ty = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[i + 1]; // `e.hir_id == child_id` for https://github.com/rust-lang/rust-clippy/issues/9739 // `method.args.is_none()` for https://github.com/rust-lang/rust-clippy/issues/9782 if e.hir_id == child_id && method.args.is_none() && let ty::Param(param_ty) = ty.kind() { @@ -896,7 +896,7 @@ fn walk_parents<'tcx>( } else { ty_auto_deref_stability( cx, - cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(id).input(i + 1)), + cx.tcx.erase_late_bound_regions(cx.tcx.bound_fn_sig(id).subst_identity().input(i + 1)), precedence, ) .position_for_arg() @@ -1093,7 +1093,7 @@ fn needless_borrow_impl_arg_position<'tcx>( let sized_trait_def_id = cx.tcx.lang_items().sized_trait(); let Some(callee_def_id) = fn_def_id(cx, parent) else { return Position::Other(precedence) }; - let fn_sig = cx.tcx.fn_sig(callee_def_id).skip_binder(); + let fn_sig = cx.tcx.bound_fn_sig(callee_def_id).subst_identity().skip_binder(); let substs_with_expr_ty = cx .typeck_results() .node_substs(if let ExprKind::Call(callee, _) = parent.kind { @@ -1221,7 +1221,7 @@ fn has_ref_mut_self_method(cx: &LateContext<'_>, trait_def_id: DefId) -> bool { .in_definition_order() .any(|assoc_item| { if assoc_item.fn_has_self_parameter { - let self_ty = cx.tcx.fn_sig(assoc_item.def_id).skip_binder().inputs()[0]; + let self_ty = cx.tcx.bound_fn_sig(assoc_item.def_id).subst_identity().skip_binder().inputs()[0]; matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Mut)) } else { false diff --git a/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs b/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs index 2c0bf551fd7e2..4f0371c027c25 100644 --- a/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs +++ b/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs @@ -58,7 +58,7 @@ fn check_raw_ptr<'tcx>( }, hir::ExprKind::MethodCall(_, recv, args, _) => { let def_id = typeck.type_dependent_def_id(e.hir_id).unwrap(); - if cx.tcx.fn_sig(def_id).skip_binder().unsafety == hir::Unsafety::Unsafe { + if cx.tcx.bound_fn_sig(def_id).skip_binder().skip_binder().unsafety == hir::Unsafety::Unsafe { check_arg(cx, &raw_ptrs, recv); for arg in args { check_arg(cx, &raw_ptrs, arg); diff --git a/clippy_lints/src/functions/result.rs b/clippy_lints/src/functions/result.rs index 23da145d03825..21de62581f1c3 100644 --- a/clippy_lints/src/functions/result.rs +++ b/clippy_lints/src/functions/result.rs @@ -21,7 +21,7 @@ fn result_err_ty<'tcx>( ) -> Option<(&'tcx hir::Ty<'tcx>, Ty<'tcx>)> { if !in_external_macro(cx.sess(), item_span) && let hir::FnRetTy::Return(hir_ty) = decl.output - && let ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(id).output()) + && let ty = cx.tcx.erase_late_bound_regions(cx.tcx.bound_fn_sig(id.into()).subst_identity().output()) && is_type_diagnostic_item(cx, ty, sym::Result) && let ty::Adt(_, substs) = ty.kind() { diff --git a/clippy_lints/src/inherent_to_string.rs b/clippy_lints/src/inherent_to_string.rs index aaecc4fa8f256..d971684a3aa9c 100644 --- a/clippy_lints/src/inherent_to_string.rs +++ b/clippy_lints/src/inherent_to_string.rs @@ -124,7 +124,7 @@ fn show_lint(cx: &LateContext<'_>, item: &ImplItem<'_>) { .expect("Failed to get trait ID of `Display`!"); // Get the real type of 'self' - let self_type = cx.tcx.fn_sig(item.owner_id).input(0); + let self_type = cx.tcx.bound_fn_sig(item.owner_id.to_def_id()).skip_binder().input(0); let self_type = self_type.skip_binder().peel_refs(); // Emit either a warning or an error diff --git a/clippy_lints/src/iter_not_returning_iterator.rs b/clippy_lints/src/iter_not_returning_iterator.rs index e76de77f195d7..131af2fd9c381 100644 --- a/clippy_lints/src/iter_not_returning_iterator.rs +++ b/clippy_lints/src/iter_not_returning_iterator.rs @@ -66,7 +66,7 @@ impl<'tcx> LateLintPass<'tcx> for IterNotReturningIterator { fn check_sig(cx: &LateContext<'_>, name: &str, sig: &FnSig<'_>, fn_id: LocalDefId) { if sig.decl.implicit_self.has_implicit_self() { - let ret_ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(fn_id).output()); + let ret_ty = cx.tcx.erase_late_bound_regions(cx.tcx.bound_fn_sig(fn_id.into()).subst_identity().output()); let ret_ty = cx .tcx .try_normalize_erasing_regions(cx.param_env, ret_ty) diff --git a/clippy_lints/src/len_zero.rs b/clippy_lints/src/len_zero.rs index 3c70c9cf19a51..121d6b9f0fe7e 100644 --- a/clippy_lints/src/len_zero.rs +++ b/clippy_lints/src/len_zero.rs @@ -144,7 +144,7 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { if let Some(local_id) = ty_id.as_local(); let ty_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id); if !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id); - if let Some(output) = parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).skip_binder()); + if let Some(output) = parse_len_output(cx, cx.tcx.bound_fn_sig(item.owner_id.to_def_id()).subst_identity().skip_binder()); then { let (name, kind) = match cx.tcx.hir().find(ty_hir_id) { Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"), @@ -196,7 +196,7 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, trait_items fn is_named_self(cx: &LateContext<'_>, item: &TraitItemRef, name: Symbol) -> bool { item.ident.name == name && if let AssocItemKind::Fn { has_self } = item.kind { - has_self && { cx.tcx.fn_sig(item.id.owner_id).inputs().skip_binder().len() == 1 } + has_self && { cx.tcx.bound_fn_sig(item.id.owner_id.to_def_id()).skip_binder().inputs().skip_binder().len() == 1 } } else { false } @@ -224,7 +224,7 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, trait_items .any(|i| { i.kind == ty::AssocKind::Fn && i.fn_has_self_parameter - && cx.tcx.fn_sig(i.def_id).inputs().skip_binder().len() == 1 + && cx.tcx.bound_fn_sig(i.def_id).skip_binder().inputs().skip_binder().len() == 1 }); if !is_empty_method_found { @@ -342,7 +342,7 @@ fn check_for_is_empty<'tcx>( ), Some(is_empty) if !(is_empty.fn_has_self_parameter - && check_is_empty_sig(cx.tcx.fn_sig(is_empty.def_id).skip_binder(), self_kind, output)) => + && check_is_empty_sig(cx.tcx.bound_fn_sig(is_empty.def_id).subst_identity().skip_binder(), self_kind, output)) => { ( format!( @@ -473,7 +473,7 @@ fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Gets an `AssocItem` and return true if it matches `is_empty(self)`. fn is_is_empty(cx: &LateContext<'_>, item: &ty::AssocItem) -> bool { if item.kind == ty::AssocKind::Fn { - let sig = cx.tcx.fn_sig(item.def_id); + let sig = cx.tcx.bound_fn_sig(item.def_id).skip_binder(); let ty = sig.skip_binder(); ty.inputs().len() == 1 } else { diff --git a/clippy_lints/src/loops/needless_range_loop.rs b/clippy_lints/src/loops/needless_range_loop.rs index 3bca93d80aa7f..3e025bc0e7160 100644 --- a/clippy_lints/src/loops/needless_range_loop.rs +++ b/clippy_lints/src/loops/needless_range_loop.rs @@ -370,7 +370,7 @@ impl<'a, 'tcx> Visitor<'tcx> for VarVisitor<'a, 'tcx> { ExprKind::MethodCall(_, receiver, args, _) => { let def_id = self.cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap(); for (ty, expr) in iter::zip( - self.cx.tcx.fn_sig(def_id).inputs().skip_binder(), + self.cx.tcx.bound_fn_sig(def_id).subst_identity().inputs().skip_binder(), std::iter::once(receiver).chain(args.iter()), ) { self.prefer_mutable = false; diff --git a/clippy_lints/src/map_unit_fn.rs b/clippy_lints/src/map_unit_fn.rs index 59195d1ae4e0a..a179dd091e421 100644 --- a/clippy_lints/src/map_unit_fn.rs +++ b/clippy_lints/src/map_unit_fn.rs @@ -104,7 +104,7 @@ fn is_unit_function(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool { let ty = cx.typeck_results().expr_ty(expr); if let ty::FnDef(id, _) = *ty.kind() { - if let Some(fn_type) = cx.tcx.fn_sig(id).no_bound_vars() { + if let Some(fn_type) = cx.tcx.bound_fn_sig(id).subst_identity().no_bound_vars() { return is_unit_type(fn_type.output()); } } diff --git a/clippy_lints/src/methods/expect_fun_call.rs b/clippy_lints/src/methods/expect_fun_call.rs index a9189b31c5710..3f670ebc9178b 100644 --- a/clippy_lints/src/methods/expect_fun_call.rs +++ b/clippy_lints/src/methods/expect_fun_call.rs @@ -70,7 +70,7 @@ pub(super) fn check<'tcx>( if let hir::ExprKind::Path(ref p) = fun.kind { match cx.qpath_res(p, fun.hir_id) { hir::def::Res::Def(hir::def::DefKind::Fn | hir::def::DefKind::AssocFn, def_id) => matches!( - cx.tcx.fn_sig(def_id).output().skip_binder().kind(), + cx.tcx.bound_fn_sig(def_id).subst_identity().output().skip_binder().kind(), ty::Ref(re, ..) if re.is_static(), ), _ => false, @@ -84,7 +84,7 @@ pub(super) fn check<'tcx>( .type_dependent_def_id(arg.hir_id) .map_or(false, |method_id| { matches!( - cx.tcx.fn_sig(method_id).output().skip_binder().kind(), + cx.tcx.bound_fn_sig(method_id).subst_identity().output().skip_binder().kind(), ty::Ref(re, ..) if re.is_static() ) }) diff --git a/clippy_lints/src/methods/mod.rs b/clippy_lints/src/methods/mod.rs index 77be61b479340..6002ef1340bea 100644 --- a/clippy_lints/src/methods/mod.rs +++ b/clippy_lints/src/methods/mod.rs @@ -3352,7 +3352,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods { let implements_trait = matches!(item.kind, hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. })); if let hir::ImplItemKind::Fn(ref sig, id) = impl_item.kind { - let method_sig = cx.tcx.fn_sig(impl_item.owner_id); + let method_sig = cx.tcx.bound_fn_sig(impl_item.owner_id.to_def_id()).subst_identity(); let method_sig = cx.tcx.erase_late_bound_regions(method_sig); let first_arg_ty_opt = method_sig.inputs().iter().next().copied(); // if this impl block implements a trait, lint in trait definition instead diff --git a/clippy_lints/src/methods/needless_collect.rs b/clippy_lints/src/methods/needless_collect.rs index f4d3ef3b74250..1a1715d03a7cb 100644 --- a/clippy_lints/src/methods/needless_collect.rs +++ b/clippy_lints/src/methods/needless_collect.rs @@ -137,7 +137,7 @@ pub(super) fn check<'tcx>( /// Checks if the given method call matches the expected signature of `([&[mut]] self) -> bool` fn is_is_empty_sig(cx: &LateContext<'_>, call_id: HirId) -> bool { cx.typeck_results().type_dependent_def_id(call_id).map_or(false, |id| { - let sig = cx.tcx.fn_sig(id).skip_binder(); + let sig = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder(); sig.inputs().len() == 1 && sig.output().is_bool() }) } @@ -165,7 +165,7 @@ fn iterates_same_ty<'tcx>(cx: &LateContext<'tcx>, iter_ty: Ty<'tcx>, collect_ty: fn is_contains_sig(cx: &LateContext<'_>, call_id: HirId, iter_expr: &Expr<'_>) -> bool { let typeck = cx.typeck_results(); if let Some(id) = typeck.type_dependent_def_id(call_id) - && let sig = cx.tcx.fn_sig(id) + && let sig = cx.tcx.bound_fn_sig(id).subst_identity() && sig.skip_binder().output().is_bool() && let [_, search_ty] = *sig.skip_binder().inputs() && let ty::Ref(_, search_ty, Mutability::Not) = *cx.tcx.erase_late_bound_regions(sig.rebind(search_ty)).kind() diff --git a/clippy_lints/src/methods/unnecessary_to_owned.rs b/clippy_lints/src/methods/unnecessary_to_owned.rs index b812e81cb107b..8036e787aaecc 100644 --- a/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -246,7 +246,7 @@ fn check_other_call_arg<'tcx>( if_chain! { if let Some((maybe_call, maybe_arg)) = skip_addr_of_ancestors(cx, expr); if let Some((callee_def_id, _, recv, call_args)) = get_callee_substs_and_args(cx, maybe_call); - let fn_sig = cx.tcx.fn_sig(callee_def_id).skip_binder(); + let fn_sig = cx.tcx.bound_fn_sig(callee_def_id).subst_identity().skip_binder(); if let Some(i) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == maybe_arg.hir_id); if let Some(input) = fn_sig.inputs().get(i); let (input, n_refs) = peel_mid_ty_refs(*input); @@ -386,7 +386,7 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty< Node::Expr(parent_expr) => { if let Some((callee_def_id, call_substs, recv, call_args)) = get_callee_substs_and_args(cx, parent_expr) { - let fn_sig = cx.tcx.fn_sig(callee_def_id).skip_binder(); + let fn_sig = cx.tcx.bound_fn_sig(callee_def_id).subst_identity().skip_binder(); if let Some(arg_index) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == expr.hir_id) && let Some(param_ty) = fn_sig.inputs().get(arg_index) && let ty::Param(ParamTy { index: param_index , ..}) = param_ty.kind() diff --git a/clippy_lints/src/mut_key.rs b/clippy_lints/src/mut_key.rs index a651020ca6566..a2868883673f5 100644 --- a/clippy_lints/src/mut_key.rs +++ b/clippy_lints/src/mut_key.rs @@ -138,7 +138,7 @@ impl MutableKeyType { fn check_sig(&self, cx: &LateContext<'_>, item_hir_id: hir::HirId, decl: &hir::FnDecl<'_>) { let fn_def_id = cx.tcx.hir().local_def_id(item_hir_id); - let fn_sig = cx.tcx.fn_sig(fn_def_id); + let fn_sig = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity(); for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) { self.check_ty_(cx, hir_ty.span, *ty); } diff --git a/clippy_lints/src/needless_pass_by_value.rs b/clippy_lints/src/needless_pass_by_value.rs index 8c9d4c5cfe66f..e3d25603a7157 100644 --- a/clippy_lints/src/needless_pass_by_value.rs +++ b/clippy_lints/src/needless_pass_by_value.rs @@ -147,7 +147,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { ctx }; - let fn_sig = cx.tcx.fn_sig(fn_def_id); + let fn_sig = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity(); let fn_sig = cx.tcx.erase_late_bound_regions(fn_sig); for (idx, ((input, &ty), arg)) in decl.inputs.iter().zip(fn_sig.inputs()).zip(body.params).enumerate() { diff --git a/clippy_lints/src/pass_by_ref_or_value.rs b/clippy_lints/src/pass_by_ref_or_value.rs index 2d21aaa4f7fdb..5512109c6cbb1 100644 --- a/clippy_lints/src/pass_by_ref_or_value.rs +++ b/clippy_lints/src/pass_by_ref_or_value.rs @@ -143,7 +143,7 @@ impl<'tcx> PassByRefOrValue { return; } - let fn_sig = cx.tcx.fn_sig(def_id); + let fn_sig = cx.tcx.bound_fn_sig(def_id.into()).subst_identity(); let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id)); // Gather all the lifetimes found in the output type which may affect whether diff --git a/clippy_lints/src/ptr.rs b/clippy_lints/src/ptr.rs index 262953042581a..0a2d35015f573 100644 --- a/clippy_lints/src/ptr.rs +++ b/clippy_lints/src/ptr.rs @@ -164,7 +164,7 @@ impl<'tcx> LateLintPass<'tcx> for Ptr { check_mut_from_ref(cx, sig, None); for arg in check_fn_args( cx, - cx.tcx.fn_sig(item.owner_id).skip_binder().inputs(), + cx.tcx.bound_fn_sig(item.owner_id.to_def_id()).subst_identity().skip_binder().inputs(), sig.decl.inputs, &[], ) @@ -217,7 +217,7 @@ impl<'tcx> LateLintPass<'tcx> for Ptr { check_mut_from_ref(cx, sig, Some(body)); let decl = sig.decl; - let sig = cx.tcx.fn_sig(item_id).skip_binder(); + let sig = cx.tcx.bound_fn_sig(item_id.to_def_id()).subst_identity().skip_binder(); let lint_args: Vec<_> = check_fn_args(cx, sig.inputs(), decl.inputs, body.params) .filter(|arg| !is_trait_item || arg.mutability() == Mutability::Not) .collect(); @@ -624,7 +624,7 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, args: return; }; - match *self.cx.tcx.fn_sig(id).skip_binder().inputs()[i].peel_refs().kind() { + match *self.cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[i].peel_refs().kind() { ty::Dynamic(preds, _, _) if !matches_preds(self.cx, args.deref_ty.ty(self.cx), preds) => { set_skip_flag(); }, diff --git a/clippy_lints/src/returns.rs b/clippy_lints/src/returns.rs index bbbd9e4989e97..f3c5033060433 100644 --- a/clippy_lints/src/returns.rs +++ b/clippy_lints/src/returns.rs @@ -287,7 +287,8 @@ fn last_statement_borrows<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) if let Some(def_id) = fn_def_id(cx, e) && cx .tcx - .fn_sig(def_id) + .bound_fn_sig(def_id) + .subst_identity() .skip_binder() .output() .walk() diff --git a/clippy_lints/src/unit_return_expecting_ord.rs b/clippy_lints/src/unit_return_expecting_ord.rs index a138a4baa9b31..5df26d8b0a38c 100644 --- a/clippy_lints/src/unit_return_expecting_ord.rs +++ b/clippy_lints/src/unit_return_expecting_ord.rs @@ -76,7 +76,7 @@ fn get_projection_pred<'tcx>( fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Vec<(usize, String)> { let mut args_to_check = Vec::new(); if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) { - let fn_sig = cx.tcx.fn_sig(def_id); + let fn_sig = cx.tcx.bound_fn_sig(def_id).subst_identity(); let generics = cx.tcx.predicates_of(def_id); let fn_mut_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.lang_items().fn_mut_trait()); let ord_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.get_diagnostic_item(sym::Ord)); diff --git a/clippy_lints/src/unit_types/let_unit_value.rs b/clippy_lints/src/unit_types/let_unit_value.rs index ce9ebad8c89a8..681e59a1575d4 100644 --- a/clippy_lints/src/unit_types/let_unit_value.rs +++ b/clippy_lints/src/unit_types/let_unit_value.rs @@ -156,7 +156,7 @@ fn needs_inferred_result_ty( }, _ => return false, }; - let sig = cx.tcx.fn_sig(id).skip_binder(); + let sig = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder(); if let ty::Param(output_ty) = *sig.output().kind() { let args: Vec<&Expr<'_>> = if let Some(receiver) = receiver { std::iter::once(receiver).chain(args.iter()).collect() diff --git a/clippy_lints/src/use_self.rs b/clippy_lints/src/use_self.rs index 6ae9d9d635380..09324fd92943c 100644 --- a/clippy_lints/src/use_self.rs +++ b/clippy_lints/src/use_self.rs @@ -146,7 +146,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf { .associated_item(impl_item.owner_id) .trait_item_def_id .expect("impl method matches a trait method"); - let trait_method_sig = cx.tcx.fn_sig(trait_method); + let trait_method_sig = cx.tcx.bound_fn_sig(trait_method).subst_identity(); let trait_method_sig = cx.tcx.erase_late_bound_regions(trait_method_sig); // `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the diff --git a/clippy_utils/src/eager_or_lazy.rs b/clippy_utils/src/eager_or_lazy.rs index 96711936968b5..38588e32dbfe9 100644 --- a/clippy_utils/src/eager_or_lazy.rs +++ b/clippy_utils/src/eager_or_lazy.rs @@ -79,7 +79,7 @@ fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg: && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_))) { // Limit the function to either `(self) -> bool` or `(&self) -> bool` - match &**cx.tcx.fn_sig(fn_id).skip_binder().inputs_and_output { + match &**cx.tcx.bound_fn_sig(fn_id).subst_identity().skip_binder().inputs_and_output { [arg, res] if !arg.is_mutable_ptr() && arg.peel_refs() == ty && res.is_bool() => NoChange, _ => Lazy, } diff --git a/clippy_utils/src/lib.rs b/clippy_utils/src/lib.rs index 7a4a9036dd363..ccc5f3503d2f5 100644 --- a/clippy_utils/src/lib.rs +++ b/clippy_utils/src/lib.rs @@ -1379,7 +1379,7 @@ pub fn get_enclosing_loop_or_multi_call_closure<'tcx>( .chain(args.iter()) .position(|arg| arg.hir_id == id)?; let id = cx.typeck_results().type_dependent_def_id(e.hir_id)?; - let ty = cx.tcx.fn_sig(id).skip_binder().inputs()[i]; + let ty = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[i]; ty_is_fn_once_param(cx.tcx, ty, cx.tcx.param_env(id).caller_bounds()).then_some(()) }, _ => None, @@ -1580,14 +1580,14 @@ pub fn is_direct_expn_of(span: Span, name: &str) -> Option { /// Convenience function to get the return type of a function. pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId) -> Ty<'tcx> { let fn_def_id = cx.tcx.hir().local_def_id(fn_item); - let ret_ty = cx.tcx.fn_sig(fn_def_id).output(); + let ret_ty = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity().output(); cx.tcx.erase_late_bound_regions(ret_ty) } /// Convenience function to get the nth argument type of a function. pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId, nth: usize) -> Ty<'tcx> { let fn_def_id = cx.tcx.hir().local_def_id(fn_item); - let arg = cx.tcx.fn_sig(fn_def_id).input(nth); + let arg = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity().input(nth); cx.tcx.erase_late_bound_regions(arg) } diff --git a/clippy_utils/src/qualify_min_const_fn.rs b/clippy_utils/src/qualify_min_const_fn.rs index e5d7da682813c..1552e343582ee 100644 --- a/clippy_utils/src/qualify_min_const_fn.rs +++ b/clippy_utils/src/qualify_min_const_fn.rs @@ -55,7 +55,7 @@ pub fn is_min_const_fn<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, msrv: &Msrv) // impl trait is gone in MIR, so check the return type manually check_ty( tcx, - tcx.fn_sig(def_id).output().skip_binder(), + tcx.bound_fn_sig(def_id).subst_identity().output().skip_binder(), body.local_decls.iter().next().unwrap().source_info.span, )?; diff --git a/clippy_utils/src/sugg.rs b/clippy_utils/src/sugg.rs index 2d1044af17e8c..d6a698bbeaa8a 100644 --- a/clippy_utils/src/sugg.rs +++ b/clippy_utils/src/sugg.rs @@ -885,7 +885,7 @@ impl<'tcx> DerefDelegate<'_, 'tcx> { .cx .typeck_results() .type_dependent_def_id(parent_expr.hir_id) - .map(|did| self.cx.tcx.fn_sig(did).skip_binder()) + .map(|did| self.cx.tcx.bound_fn_sig(did).subst_identity().skip_binder()) { std::iter::once(receiver) .chain(call_args.iter()) diff --git a/clippy_utils/src/ty.rs b/clippy_utils/src/ty.rs index 99fba4fe741a1..14fc2c1001704 100644 --- a/clippy_utils/src/ty.rs +++ b/clippy_utils/src/ty.rs @@ -628,7 +628,7 @@ impl<'tcx> ExprFnSig<'tcx> { /// If the expression is function like, get the signature for it. pub fn expr_sig<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> Option> { if let Res::Def(DefKind::Fn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::AssocFn, id) = path_res(cx, expr) { - Some(ExprFnSig::Sig(cx.tcx.fn_sig(id), Some(id))) + Some(ExprFnSig::Sig(cx.tcx.bound_fn_sig(id).subst_identity(), Some(id))) } else { ty_sig(cx, cx.typeck_results().expr_ty_adjusted(expr).peel_refs()) } diff --git a/clippy_utils/src/visitors.rs b/clippy_utils/src/visitors.rs index 14c01a60b4c32..1680a40206a3a 100644 --- a/clippy_utils/src/visitors.rs +++ b/clippy_utils/src/visitors.rs @@ -392,12 +392,12 @@ pub fn is_expr_unsafe<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool { .cx .typeck_results() .type_dependent_def_id(e.hir_id) - .map_or(false, |id| self.cx.tcx.fn_sig(id).unsafety() == Unsafety::Unsafe) => + .map_or(false, |id| self.cx.tcx.bound_fn_sig(id).skip_binder().unsafety() == Unsafety::Unsafe) => { self.is_unsafe = true; }, ExprKind::Call(func, _) => match *self.cx.typeck_results().expr_ty(func).peel_refs().kind() { - ty::FnDef(id, _) if self.cx.tcx.fn_sig(id).unsafety() == Unsafety::Unsafe => self.is_unsafe = true, + ty::FnDef(id, _) if self.cx.tcx.bound_fn_sig(id).skip_binder().unsafety() == Unsafety::Unsafe => self.is_unsafe = true, ty::FnPtr(sig) if sig.unsafety() == Unsafety::Unsafe => self.is_unsafe = true, _ => walk_expr(self, e), }, From 7bb0bfea0417c0e165bb3084b9c471245e8c8ebf Mon Sep 17 00:00:00 2001 From: Kyle Matsuda Date: Wed, 18 Jan 2023 16:52:47 -0700 Subject: [PATCH 157/501] change fn_sig query to use EarlyBinder; remove bound_fn_sig query; add EarlyBinder to fn_sig in metadata --- src/main_shim.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main_shim.rs b/src/main_shim.rs index f46e6b6528c88..bc1b1ec7cd597 100644 --- a/src/main_shim.rs +++ b/src/main_shim.rs @@ -46,7 +46,7 @@ pub(crate) fn maybe_create_entry_wrapper( is_main_fn: bool, sigpipe: u8, ) { - let main_ret_ty = tcx.bound_fn_sig(rust_main_def_id).subst_identity().output(); + let main_ret_ty = tcx.fn_sig(rust_main_def_id).subst_identity().output(); // Given that `main()` has no arguments, // then its return type cannot have // late-bound regions, since late-bound From afb586fa1f20a9e444cf861370bf0bc60fd49c78 Mon Sep 17 00:00:00 2001 From: Kyle Matsuda Date: Wed, 18 Jan 2023 16:52:47 -0700 Subject: [PATCH 158/501] change fn_sig query to use EarlyBinder; remove bound_fn_sig query; add EarlyBinder to fn_sig in metadata --- clippy_lints/src/casts/as_ptr_cast_mut.rs | 2 +- clippy_lints/src/default_numeric_fallback.rs | 4 ++-- clippy_lints/src/dereference.rs | 14 +++++++------- .../src/functions/not_unsafe_ptr_arg_deref.rs | 2 +- clippy_lints/src/functions/result.rs | 2 +- clippy_lints/src/inherent_to_string.rs | 2 +- clippy_lints/src/iter_not_returning_iterator.rs | 2 +- clippy_lints/src/len_zero.rs | 10 +++++----- clippy_lints/src/loops/needless_range_loop.rs | 2 +- clippy_lints/src/map_unit_fn.rs | 2 +- clippy_lints/src/methods/expect_fun_call.rs | 4 ++-- clippy_lints/src/methods/mod.rs | 2 +- clippy_lints/src/methods/needless_collect.rs | 4 ++-- clippy_lints/src/methods/unnecessary_to_owned.rs | 4 ++-- clippy_lints/src/mut_key.rs | 2 +- clippy_lints/src/needless_pass_by_value.rs | 2 +- clippy_lints/src/pass_by_ref_or_value.rs | 2 +- clippy_lints/src/ptr.rs | 6 +++--- clippy_lints/src/returns.rs | 2 +- clippy_lints/src/unit_return_expecting_ord.rs | 2 +- clippy_lints/src/unit_types/let_unit_value.rs | 2 +- clippy_lints/src/use_self.rs | 2 +- clippy_utils/src/eager_or_lazy.rs | 2 +- clippy_utils/src/lib.rs | 6 +++--- clippy_utils/src/qualify_min_const_fn.rs | 2 +- clippy_utils/src/sugg.rs | 2 +- clippy_utils/src/ty.rs | 4 ++-- clippy_utils/src/visitors.rs | 4 ++-- 28 files changed, 48 insertions(+), 48 deletions(-) diff --git a/clippy_lints/src/casts/as_ptr_cast_mut.rs b/clippy_lints/src/casts/as_ptr_cast_mut.rs index 2a0e0857c5610..1633ffd589c38 100644 --- a/clippy_lints/src/casts/as_ptr_cast_mut.rs +++ b/clippy_lints/src/casts/as_ptr_cast_mut.rs @@ -17,7 +17,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, && let ExprKind::MethodCall(method_name, receiver, [], _) = cast_expr.peel_blocks().kind && method_name.ident.name == rustc_span::sym::as_ptr && let Some(as_ptr_did) = cx.typeck_results().type_dependent_def_id(cast_expr.peel_blocks().hir_id) - && let as_ptr_sig = cx.tcx.bound_fn_sig(as_ptr_did).subst_identity() + && let as_ptr_sig = cx.tcx.fn_sig(as_ptr_did).subst_identity() && let Some(first_param_ty) = as_ptr_sig.skip_binder().inputs().iter().next() && let ty::Ref(_, _, Mutability::Not) = first_param_ty.kind() && let Some(recv) = snippet_opt(cx, receiver.span) diff --git a/clippy_lints/src/default_numeric_fallback.rs b/clippy_lints/src/default_numeric_fallback.rs index 9c5a9f583743f..f806ba238c7c6 100644 --- a/clippy_lints/src/default_numeric_fallback.rs +++ b/clippy_lints/src/default_numeric_fallback.rs @@ -141,7 +141,7 @@ impl<'a, 'tcx> Visitor<'tcx> for NumericFallbackVisitor<'a, 'tcx> { ExprKind::MethodCall(_, receiver, args, _) => { if let Some(def_id) = self.cx.typeck_results().type_dependent_def_id(expr.hir_id) { - let fn_sig = self.cx.tcx.bound_fn_sig(def_id).subst_identity().skip_binder(); + let fn_sig = self.cx.tcx.fn_sig(def_id).subst_identity().skip_binder(); for (expr, bound) in iter::zip(std::iter::once(*receiver).chain(args.iter()), fn_sig.inputs()) { self.ty_bounds.push((*bound).into()); self.visit_expr(expr); @@ -215,7 +215,7 @@ fn fn_sig_opt<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId) -> Option Some(cx.tcx.bound_fn_sig(*def_id).subst_identity()), + ty::FnDef(def_id, _) => Some(cx.tcx.fn_sig(*def_id).subst_identity()), ty::FnPtr(fn_sig) => Some(*fn_sig), _ => None, } diff --git a/clippy_lints/src/dereference.rs b/clippy_lints/src/dereference.rs index 25620f45b8a98..fa3e5aa6b7213 100644 --- a/clippy_lints/src/dereference.rs +++ b/clippy_lints/src/dereference.rs @@ -759,7 +759,7 @@ fn walk_parents<'tcx>( }) if span.ctxt() == ctxt => { let output = cx .tcx - .erase_late_bound_regions(cx.tcx.bound_fn_sig(owner_id.to_def_id()).subst_identity().output()); + .erase_late_bound_regions(cx.tcx.fn_sig(owner_id).subst_identity().output()); Some(ty_auto_deref_stability(cx, output, precedence).position_for_result(cx)) }, @@ -791,7 +791,7 @@ fn walk_parents<'tcx>( } else { let output = cx .tcx - .erase_late_bound_regions(cx.tcx.bound_fn_sig(cx.tcx.hir().local_def_id(owner_id).into()).subst_identity().output()); + .erase_late_bound_regions(cx.tcx.fn_sig(cx.tcx.hir().local_def_id(owner_id)).subst_identity().output()); ty_auto_deref_stability(cx, output, precedence).position_for_result(cx) }, ) @@ -858,7 +858,7 @@ fn walk_parents<'tcx>( && let subs = cx .typeck_results() .node_substs_opt(parent.hir_id).map(|subs| &subs[1..]).unwrap_or_default() - && let impl_ty = if cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[0].is_ref() { + && let impl_ty = if cx.tcx.fn_sig(id).subst_identity().skip_binder().inputs()[0].is_ref() { // Trait methods taking `&self` sub_ty } else { @@ -879,7 +879,7 @@ fn walk_parents<'tcx>( return Some(Position::MethodReceiver); } args.iter().position(|arg| arg.hir_id == child_id).map(|i| { - let ty = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[i + 1]; + let ty = cx.tcx.fn_sig(id).subst_identity().skip_binder().inputs()[i + 1]; // `e.hir_id == child_id` for https://github.com/rust-lang/rust-clippy/issues/9739 // `method.args.is_none()` for https://github.com/rust-lang/rust-clippy/issues/9782 if e.hir_id == child_id && method.args.is_none() && let ty::Param(param_ty) = ty.kind() { @@ -896,7 +896,7 @@ fn walk_parents<'tcx>( } else { ty_auto_deref_stability( cx, - cx.tcx.erase_late_bound_regions(cx.tcx.bound_fn_sig(id).subst_identity().input(i + 1)), + cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(id).subst_identity().input(i + 1)), precedence, ) .position_for_arg() @@ -1093,7 +1093,7 @@ fn needless_borrow_impl_arg_position<'tcx>( let sized_trait_def_id = cx.tcx.lang_items().sized_trait(); let Some(callee_def_id) = fn_def_id(cx, parent) else { return Position::Other(precedence) }; - let fn_sig = cx.tcx.bound_fn_sig(callee_def_id).subst_identity().skip_binder(); + let fn_sig = cx.tcx.fn_sig(callee_def_id).subst_identity().skip_binder(); let substs_with_expr_ty = cx .typeck_results() .node_substs(if let ExprKind::Call(callee, _) = parent.kind { @@ -1221,7 +1221,7 @@ fn has_ref_mut_self_method(cx: &LateContext<'_>, trait_def_id: DefId) -> bool { .in_definition_order() .any(|assoc_item| { if assoc_item.fn_has_self_parameter { - let self_ty = cx.tcx.bound_fn_sig(assoc_item.def_id).subst_identity().skip_binder().inputs()[0]; + let self_ty = cx.tcx.fn_sig(assoc_item.def_id).subst_identity().skip_binder().inputs()[0]; matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Mut)) } else { false diff --git a/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs b/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs index 4f0371c027c25..cdb5e22e75982 100644 --- a/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs +++ b/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs @@ -58,7 +58,7 @@ fn check_raw_ptr<'tcx>( }, hir::ExprKind::MethodCall(_, recv, args, _) => { let def_id = typeck.type_dependent_def_id(e.hir_id).unwrap(); - if cx.tcx.bound_fn_sig(def_id).skip_binder().skip_binder().unsafety == hir::Unsafety::Unsafe { + if cx.tcx.fn_sig(def_id).skip_binder().skip_binder().unsafety == hir::Unsafety::Unsafe { check_arg(cx, &raw_ptrs, recv); for arg in args { check_arg(cx, &raw_ptrs, arg); diff --git a/clippy_lints/src/functions/result.rs b/clippy_lints/src/functions/result.rs index 21de62581f1c3..fa2a9b30c058d 100644 --- a/clippy_lints/src/functions/result.rs +++ b/clippy_lints/src/functions/result.rs @@ -21,7 +21,7 @@ fn result_err_ty<'tcx>( ) -> Option<(&'tcx hir::Ty<'tcx>, Ty<'tcx>)> { if !in_external_macro(cx.sess(), item_span) && let hir::FnRetTy::Return(hir_ty) = decl.output - && let ty = cx.tcx.erase_late_bound_regions(cx.tcx.bound_fn_sig(id.into()).subst_identity().output()) + && let ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(id).subst_identity().output()) && is_type_diagnostic_item(cx, ty, sym::Result) && let ty::Adt(_, substs) = ty.kind() { diff --git a/clippy_lints/src/inherent_to_string.rs b/clippy_lints/src/inherent_to_string.rs index d971684a3aa9c..612c3ea8fdfd8 100644 --- a/clippy_lints/src/inherent_to_string.rs +++ b/clippy_lints/src/inherent_to_string.rs @@ -124,7 +124,7 @@ fn show_lint(cx: &LateContext<'_>, item: &ImplItem<'_>) { .expect("Failed to get trait ID of `Display`!"); // Get the real type of 'self' - let self_type = cx.tcx.bound_fn_sig(item.owner_id.to_def_id()).skip_binder().input(0); + let self_type = cx.tcx.fn_sig(item.owner_id).skip_binder().input(0); let self_type = self_type.skip_binder().peel_refs(); // Emit either a warning or an error diff --git a/clippy_lints/src/iter_not_returning_iterator.rs b/clippy_lints/src/iter_not_returning_iterator.rs index 131af2fd9c381..7557a9ce13f11 100644 --- a/clippy_lints/src/iter_not_returning_iterator.rs +++ b/clippy_lints/src/iter_not_returning_iterator.rs @@ -66,7 +66,7 @@ impl<'tcx> LateLintPass<'tcx> for IterNotReturningIterator { fn check_sig(cx: &LateContext<'_>, name: &str, sig: &FnSig<'_>, fn_id: LocalDefId) { if sig.decl.implicit_self.has_implicit_self() { - let ret_ty = cx.tcx.erase_late_bound_regions(cx.tcx.bound_fn_sig(fn_id.into()).subst_identity().output()); + let ret_ty = cx.tcx.erase_late_bound_regions(cx.tcx.fn_sig(fn_id).subst_identity().output()); let ret_ty = cx .tcx .try_normalize_erasing_regions(cx.param_env, ret_ty) diff --git a/clippy_lints/src/len_zero.rs b/clippy_lints/src/len_zero.rs index 121d6b9f0fe7e..80ed2862a419a 100644 --- a/clippy_lints/src/len_zero.rs +++ b/clippy_lints/src/len_zero.rs @@ -144,7 +144,7 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { if let Some(local_id) = ty_id.as_local(); let ty_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id); if !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id); - if let Some(output) = parse_len_output(cx, cx.tcx.bound_fn_sig(item.owner_id.to_def_id()).subst_identity().skip_binder()); + if let Some(output) = parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).subst_identity().skip_binder()); then { let (name, kind) = match cx.tcx.hir().find(ty_hir_id) { Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"), @@ -196,7 +196,7 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, trait_items fn is_named_self(cx: &LateContext<'_>, item: &TraitItemRef, name: Symbol) -> bool { item.ident.name == name && if let AssocItemKind::Fn { has_self } = item.kind { - has_self && { cx.tcx.bound_fn_sig(item.id.owner_id.to_def_id()).skip_binder().inputs().skip_binder().len() == 1 } + has_self && { cx.tcx.fn_sig(item.id.owner_id).skip_binder().inputs().skip_binder().len() == 1 } } else { false } @@ -224,7 +224,7 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, trait_items .any(|i| { i.kind == ty::AssocKind::Fn && i.fn_has_self_parameter - && cx.tcx.bound_fn_sig(i.def_id).skip_binder().inputs().skip_binder().len() == 1 + && cx.tcx.fn_sig(i.def_id).skip_binder().inputs().skip_binder().len() == 1 }); if !is_empty_method_found { @@ -342,7 +342,7 @@ fn check_for_is_empty<'tcx>( ), Some(is_empty) if !(is_empty.fn_has_self_parameter - && check_is_empty_sig(cx.tcx.bound_fn_sig(is_empty.def_id).subst_identity().skip_binder(), self_kind, output)) => + && check_is_empty_sig(cx.tcx.fn_sig(is_empty.def_id).subst_identity().skip_binder(), self_kind, output)) => { ( format!( @@ -473,7 +473,7 @@ fn has_is_empty(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Gets an `AssocItem` and return true if it matches `is_empty(self)`. fn is_is_empty(cx: &LateContext<'_>, item: &ty::AssocItem) -> bool { if item.kind == ty::AssocKind::Fn { - let sig = cx.tcx.bound_fn_sig(item.def_id).skip_binder(); + let sig = cx.tcx.fn_sig(item.def_id).skip_binder(); let ty = sig.skip_binder(); ty.inputs().len() == 1 } else { diff --git a/clippy_lints/src/loops/needless_range_loop.rs b/clippy_lints/src/loops/needless_range_loop.rs index 3e025bc0e7160..e6ed4ea7a5db5 100644 --- a/clippy_lints/src/loops/needless_range_loop.rs +++ b/clippy_lints/src/loops/needless_range_loop.rs @@ -370,7 +370,7 @@ impl<'a, 'tcx> Visitor<'tcx> for VarVisitor<'a, 'tcx> { ExprKind::MethodCall(_, receiver, args, _) => { let def_id = self.cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap(); for (ty, expr) in iter::zip( - self.cx.tcx.bound_fn_sig(def_id).subst_identity().inputs().skip_binder(), + self.cx.tcx.fn_sig(def_id).subst_identity().inputs().skip_binder(), std::iter::once(receiver).chain(args.iter()), ) { self.prefer_mutable = false; diff --git a/clippy_lints/src/map_unit_fn.rs b/clippy_lints/src/map_unit_fn.rs index a179dd091e421..edcab6968cbe0 100644 --- a/clippy_lints/src/map_unit_fn.rs +++ b/clippy_lints/src/map_unit_fn.rs @@ -104,7 +104,7 @@ fn is_unit_function(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool { let ty = cx.typeck_results().expr_ty(expr); if let ty::FnDef(id, _) = *ty.kind() { - if let Some(fn_type) = cx.tcx.bound_fn_sig(id).subst_identity().no_bound_vars() { + if let Some(fn_type) = cx.tcx.fn_sig(id).subst_identity().no_bound_vars() { return is_unit_type(fn_type.output()); } } diff --git a/clippy_lints/src/methods/expect_fun_call.rs b/clippy_lints/src/methods/expect_fun_call.rs index 3f670ebc9178b..aed0ad5d9b5a7 100644 --- a/clippy_lints/src/methods/expect_fun_call.rs +++ b/clippy_lints/src/methods/expect_fun_call.rs @@ -70,7 +70,7 @@ pub(super) fn check<'tcx>( if let hir::ExprKind::Path(ref p) = fun.kind { match cx.qpath_res(p, fun.hir_id) { hir::def::Res::Def(hir::def::DefKind::Fn | hir::def::DefKind::AssocFn, def_id) => matches!( - cx.tcx.bound_fn_sig(def_id).subst_identity().output().skip_binder().kind(), + cx.tcx.fn_sig(def_id).subst_identity().output().skip_binder().kind(), ty::Ref(re, ..) if re.is_static(), ), _ => false, @@ -84,7 +84,7 @@ pub(super) fn check<'tcx>( .type_dependent_def_id(arg.hir_id) .map_or(false, |method_id| { matches!( - cx.tcx.bound_fn_sig(method_id).subst_identity().output().skip_binder().kind(), + cx.tcx.fn_sig(method_id).subst_identity().output().skip_binder().kind(), ty::Ref(re, ..) if re.is_static() ) }) diff --git a/clippy_lints/src/methods/mod.rs b/clippy_lints/src/methods/mod.rs index 6002ef1340bea..a7e45d5126ab0 100644 --- a/clippy_lints/src/methods/mod.rs +++ b/clippy_lints/src/methods/mod.rs @@ -3352,7 +3352,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods { let implements_trait = matches!(item.kind, hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. })); if let hir::ImplItemKind::Fn(ref sig, id) = impl_item.kind { - let method_sig = cx.tcx.bound_fn_sig(impl_item.owner_id.to_def_id()).subst_identity(); + let method_sig = cx.tcx.fn_sig(impl_item.owner_id).subst_identity(); let method_sig = cx.tcx.erase_late_bound_regions(method_sig); let first_arg_ty_opt = method_sig.inputs().iter().next().copied(); // if this impl block implements a trait, lint in trait definition instead diff --git a/clippy_lints/src/methods/needless_collect.rs b/clippy_lints/src/methods/needless_collect.rs index 1a1715d03a7cb..82d3b830d4f39 100644 --- a/clippy_lints/src/methods/needless_collect.rs +++ b/clippy_lints/src/methods/needless_collect.rs @@ -137,7 +137,7 @@ pub(super) fn check<'tcx>( /// Checks if the given method call matches the expected signature of `([&[mut]] self) -> bool` fn is_is_empty_sig(cx: &LateContext<'_>, call_id: HirId) -> bool { cx.typeck_results().type_dependent_def_id(call_id).map_or(false, |id| { - let sig = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder(); + let sig = cx.tcx.fn_sig(id).subst_identity().skip_binder(); sig.inputs().len() == 1 && sig.output().is_bool() }) } @@ -165,7 +165,7 @@ fn iterates_same_ty<'tcx>(cx: &LateContext<'tcx>, iter_ty: Ty<'tcx>, collect_ty: fn is_contains_sig(cx: &LateContext<'_>, call_id: HirId, iter_expr: &Expr<'_>) -> bool { let typeck = cx.typeck_results(); if let Some(id) = typeck.type_dependent_def_id(call_id) - && let sig = cx.tcx.bound_fn_sig(id).subst_identity() + && let sig = cx.tcx.fn_sig(id).subst_identity() && sig.skip_binder().output().is_bool() && let [_, search_ty] = *sig.skip_binder().inputs() && let ty::Ref(_, search_ty, Mutability::Not) = *cx.tcx.erase_late_bound_regions(sig.rebind(search_ty)).kind() diff --git a/clippy_lints/src/methods/unnecessary_to_owned.rs b/clippy_lints/src/methods/unnecessary_to_owned.rs index 8036e787aaecc..12e053cb2134d 100644 --- a/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -246,7 +246,7 @@ fn check_other_call_arg<'tcx>( if_chain! { if let Some((maybe_call, maybe_arg)) = skip_addr_of_ancestors(cx, expr); if let Some((callee_def_id, _, recv, call_args)) = get_callee_substs_and_args(cx, maybe_call); - let fn_sig = cx.tcx.bound_fn_sig(callee_def_id).subst_identity().skip_binder(); + let fn_sig = cx.tcx.fn_sig(callee_def_id).subst_identity().skip_binder(); if let Some(i) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == maybe_arg.hir_id); if let Some(input) = fn_sig.inputs().get(i); let (input, n_refs) = peel_mid_ty_refs(*input); @@ -386,7 +386,7 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty< Node::Expr(parent_expr) => { if let Some((callee_def_id, call_substs, recv, call_args)) = get_callee_substs_and_args(cx, parent_expr) { - let fn_sig = cx.tcx.bound_fn_sig(callee_def_id).subst_identity().skip_binder(); + let fn_sig = cx.tcx.fn_sig(callee_def_id).subst_identity().skip_binder(); if let Some(arg_index) = recv.into_iter().chain(call_args).position(|arg| arg.hir_id == expr.hir_id) && let Some(param_ty) = fn_sig.inputs().get(arg_index) && let ty::Param(ParamTy { index: param_index , ..}) = param_ty.kind() diff --git a/clippy_lints/src/mut_key.rs b/clippy_lints/src/mut_key.rs index a2868883673f5..16947cd5e3548 100644 --- a/clippy_lints/src/mut_key.rs +++ b/clippy_lints/src/mut_key.rs @@ -138,7 +138,7 @@ impl MutableKeyType { fn check_sig(&self, cx: &LateContext<'_>, item_hir_id: hir::HirId, decl: &hir::FnDecl<'_>) { let fn_def_id = cx.tcx.hir().local_def_id(item_hir_id); - let fn_sig = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity(); + let fn_sig = cx.tcx.fn_sig(fn_def_id).subst_identity(); for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) { self.check_ty_(cx, hir_ty.span, *ty); } diff --git a/clippy_lints/src/needless_pass_by_value.rs b/clippy_lints/src/needless_pass_by_value.rs index e3d25603a7157..25ec9082c7076 100644 --- a/clippy_lints/src/needless_pass_by_value.rs +++ b/clippy_lints/src/needless_pass_by_value.rs @@ -147,7 +147,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { ctx }; - let fn_sig = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity(); + let fn_sig = cx.tcx.fn_sig(fn_def_id).subst_identity(); let fn_sig = cx.tcx.erase_late_bound_regions(fn_sig); for (idx, ((input, &ty), arg)) in decl.inputs.iter().zip(fn_sig.inputs()).zip(body.params).enumerate() { diff --git a/clippy_lints/src/pass_by_ref_or_value.rs b/clippy_lints/src/pass_by_ref_or_value.rs index 5512109c6cbb1..954eeba751ffa 100644 --- a/clippy_lints/src/pass_by_ref_or_value.rs +++ b/clippy_lints/src/pass_by_ref_or_value.rs @@ -143,7 +143,7 @@ impl<'tcx> PassByRefOrValue { return; } - let fn_sig = cx.tcx.bound_fn_sig(def_id.into()).subst_identity(); + let fn_sig = cx.tcx.fn_sig(def_id).subst_identity(); let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id)); // Gather all the lifetimes found in the output type which may affect whether diff --git a/clippy_lints/src/ptr.rs b/clippy_lints/src/ptr.rs index 0a2d35015f573..8afe286fbd5dc 100644 --- a/clippy_lints/src/ptr.rs +++ b/clippy_lints/src/ptr.rs @@ -164,7 +164,7 @@ impl<'tcx> LateLintPass<'tcx> for Ptr { check_mut_from_ref(cx, sig, None); for arg in check_fn_args( cx, - cx.tcx.bound_fn_sig(item.owner_id.to_def_id()).subst_identity().skip_binder().inputs(), + cx.tcx.fn_sig(item.owner_id).subst_identity().skip_binder().inputs(), sig.decl.inputs, &[], ) @@ -217,7 +217,7 @@ impl<'tcx> LateLintPass<'tcx> for Ptr { check_mut_from_ref(cx, sig, Some(body)); let decl = sig.decl; - let sig = cx.tcx.bound_fn_sig(item_id.to_def_id()).subst_identity().skip_binder(); + let sig = cx.tcx.fn_sig(item_id).subst_identity().skip_binder(); let lint_args: Vec<_> = check_fn_args(cx, sig.inputs(), decl.inputs, body.params) .filter(|arg| !is_trait_item || arg.mutability() == Mutability::Not) .collect(); @@ -624,7 +624,7 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &'tcx Body<'_>, args: return; }; - match *self.cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[i].peel_refs().kind() { + match *self.cx.tcx.fn_sig(id).subst_identity().skip_binder().inputs()[i].peel_refs().kind() { ty::Dynamic(preds, _, _) if !matches_preds(self.cx, args.deref_ty.ty(self.cx), preds) => { set_skip_flag(); }, diff --git a/clippy_lints/src/returns.rs b/clippy_lints/src/returns.rs index f3c5033060433..dc1275a3686d0 100644 --- a/clippy_lints/src/returns.rs +++ b/clippy_lints/src/returns.rs @@ -287,7 +287,7 @@ fn last_statement_borrows<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) if let Some(def_id) = fn_def_id(cx, e) && cx .tcx - .bound_fn_sig(def_id) + .fn_sig(def_id) .subst_identity() .skip_binder() .output() diff --git a/clippy_lints/src/unit_return_expecting_ord.rs b/clippy_lints/src/unit_return_expecting_ord.rs index 5df26d8b0a38c..289ca4e9bed3c 100644 --- a/clippy_lints/src/unit_return_expecting_ord.rs +++ b/clippy_lints/src/unit_return_expecting_ord.rs @@ -76,7 +76,7 @@ fn get_projection_pred<'tcx>( fn get_args_to_check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Vec<(usize, String)> { let mut args_to_check = Vec::new(); if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) { - let fn_sig = cx.tcx.bound_fn_sig(def_id).subst_identity(); + let fn_sig = cx.tcx.fn_sig(def_id).subst_identity(); let generics = cx.tcx.predicates_of(def_id); let fn_mut_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.lang_items().fn_mut_trait()); let ord_preds = get_trait_predicates_for_trait_id(cx, generics, cx.tcx.get_diagnostic_item(sym::Ord)); diff --git a/clippy_lints/src/unit_types/let_unit_value.rs b/clippy_lints/src/unit_types/let_unit_value.rs index 681e59a1575d4..d6167a62169d4 100644 --- a/clippy_lints/src/unit_types/let_unit_value.rs +++ b/clippy_lints/src/unit_types/let_unit_value.rs @@ -156,7 +156,7 @@ fn needs_inferred_result_ty( }, _ => return false, }; - let sig = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder(); + let sig = cx.tcx.fn_sig(id).subst_identity().skip_binder(); if let ty::Param(output_ty) = *sig.output().kind() { let args: Vec<&Expr<'_>> = if let Some(receiver) = receiver { std::iter::once(receiver).chain(args.iter()).collect() diff --git a/clippy_lints/src/use_self.rs b/clippy_lints/src/use_self.rs index 09324fd92943c..3cd35838961f6 100644 --- a/clippy_lints/src/use_self.rs +++ b/clippy_lints/src/use_self.rs @@ -146,7 +146,7 @@ impl<'tcx> LateLintPass<'tcx> for UseSelf { .associated_item(impl_item.owner_id) .trait_item_def_id .expect("impl method matches a trait method"); - let trait_method_sig = cx.tcx.bound_fn_sig(trait_method).subst_identity(); + let trait_method_sig = cx.tcx.fn_sig(trait_method).subst_identity(); let trait_method_sig = cx.tcx.erase_late_bound_regions(trait_method_sig); // `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the diff --git a/clippy_utils/src/eager_or_lazy.rs b/clippy_utils/src/eager_or_lazy.rs index 38588e32dbfe9..5c89dd3e49f41 100644 --- a/clippy_utils/src/eager_or_lazy.rs +++ b/clippy_utils/src/eager_or_lazy.rs @@ -79,7 +79,7 @@ fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg: && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_))) { // Limit the function to either `(self) -> bool` or `(&self) -> bool` - match &**cx.tcx.bound_fn_sig(fn_id).subst_identity().skip_binder().inputs_and_output { + match &**cx.tcx.fn_sig(fn_id).subst_identity().skip_binder().inputs_and_output { [arg, res] if !arg.is_mutable_ptr() && arg.peel_refs() == ty && res.is_bool() => NoChange, _ => Lazy, } diff --git a/clippy_utils/src/lib.rs b/clippy_utils/src/lib.rs index ccc5f3503d2f5..23791ebe92254 100644 --- a/clippy_utils/src/lib.rs +++ b/clippy_utils/src/lib.rs @@ -1379,7 +1379,7 @@ pub fn get_enclosing_loop_or_multi_call_closure<'tcx>( .chain(args.iter()) .position(|arg| arg.hir_id == id)?; let id = cx.typeck_results().type_dependent_def_id(e.hir_id)?; - let ty = cx.tcx.bound_fn_sig(id).subst_identity().skip_binder().inputs()[i]; + let ty = cx.tcx.fn_sig(id).subst_identity().skip_binder().inputs()[i]; ty_is_fn_once_param(cx.tcx, ty, cx.tcx.param_env(id).caller_bounds()).then_some(()) }, _ => None, @@ -1580,14 +1580,14 @@ pub fn is_direct_expn_of(span: Span, name: &str) -> Option { /// Convenience function to get the return type of a function. pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId) -> Ty<'tcx> { let fn_def_id = cx.tcx.hir().local_def_id(fn_item); - let ret_ty = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity().output(); + let ret_ty = cx.tcx.fn_sig(fn_def_id).subst_identity().output(); cx.tcx.erase_late_bound_regions(ret_ty) } /// Convenience function to get the nth argument type of a function. pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId, nth: usize) -> Ty<'tcx> { let fn_def_id = cx.tcx.hir().local_def_id(fn_item); - let arg = cx.tcx.bound_fn_sig(fn_def_id.into()).subst_identity().input(nth); + let arg = cx.tcx.fn_sig(fn_def_id).subst_identity().input(nth); cx.tcx.erase_late_bound_regions(arg) } diff --git a/clippy_utils/src/qualify_min_const_fn.rs b/clippy_utils/src/qualify_min_const_fn.rs index 1552e343582ee..13de780b71095 100644 --- a/clippy_utils/src/qualify_min_const_fn.rs +++ b/clippy_utils/src/qualify_min_const_fn.rs @@ -55,7 +55,7 @@ pub fn is_min_const_fn<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, msrv: &Msrv) // impl trait is gone in MIR, so check the return type manually check_ty( tcx, - tcx.bound_fn_sig(def_id).subst_identity().output().skip_binder(), + tcx.fn_sig(def_id).subst_identity().output().skip_binder(), body.local_decls.iter().next().unwrap().source_info.span, )?; diff --git a/clippy_utils/src/sugg.rs b/clippy_utils/src/sugg.rs index d6a698bbeaa8a..8d767f9d44d3a 100644 --- a/clippy_utils/src/sugg.rs +++ b/clippy_utils/src/sugg.rs @@ -885,7 +885,7 @@ impl<'tcx> DerefDelegate<'_, 'tcx> { .cx .typeck_results() .type_dependent_def_id(parent_expr.hir_id) - .map(|did| self.cx.tcx.bound_fn_sig(did).subst_identity().skip_binder()) + .map(|did| self.cx.tcx.fn_sig(did).subst_identity().skip_binder()) { std::iter::once(receiver) .chain(call_args.iter()) diff --git a/clippy_utils/src/ty.rs b/clippy_utils/src/ty.rs index 14fc2c1001704..1d5d55d5b54cf 100644 --- a/clippy_utils/src/ty.rs +++ b/clippy_utils/src/ty.rs @@ -628,7 +628,7 @@ impl<'tcx> ExprFnSig<'tcx> { /// If the expression is function like, get the signature for it. pub fn expr_sig<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> Option> { if let Res::Def(DefKind::Fn | DefKind::Ctor(_, CtorKind::Fn) | DefKind::AssocFn, id) = path_res(cx, expr) { - Some(ExprFnSig::Sig(cx.tcx.bound_fn_sig(id).subst_identity(), Some(id))) + Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).subst_identity(), Some(id))) } else { ty_sig(cx, cx.typeck_results().expr_ty_adjusted(expr).peel_refs()) } @@ -646,7 +646,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option Some(ExprFnSig::Sig(cx.tcx.bound_fn_sig(id).subst(cx.tcx, subs), Some(id))), + ty::FnDef(id, subs) => Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).subst(cx.tcx, subs), Some(id))), ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => { sig_from_bounds(cx, ty, cx.tcx.item_bounds(def_id).subst(cx.tcx, substs), cx.tcx.opt_parent(def_id)) }, diff --git a/clippy_utils/src/visitors.rs b/clippy_utils/src/visitors.rs index 1680a40206a3a..d18b62d1bf16a 100644 --- a/clippy_utils/src/visitors.rs +++ b/clippy_utils/src/visitors.rs @@ -392,12 +392,12 @@ pub fn is_expr_unsafe<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> bool { .cx .typeck_results() .type_dependent_def_id(e.hir_id) - .map_or(false, |id| self.cx.tcx.bound_fn_sig(id).skip_binder().unsafety() == Unsafety::Unsafe) => + .map_or(false, |id| self.cx.tcx.fn_sig(id).skip_binder().unsafety() == Unsafety::Unsafe) => { self.is_unsafe = true; }, ExprKind::Call(func, _) => match *self.cx.typeck_results().expr_ty(func).peel_refs().kind() { - ty::FnDef(id, _) if self.cx.tcx.bound_fn_sig(id).skip_binder().unsafety() == Unsafety::Unsafe => self.is_unsafe = true, + ty::FnDef(id, _) if self.cx.tcx.fn_sig(id).skip_binder().unsafety() == Unsafety::Unsafe => self.is_unsafe = true, ty::FnPtr(sig) if sig.unsafety() == Unsafety::Unsafe => self.is_unsafe = true, _ => walk_expr(self, e), }, From 21575908c62f563fbc6d161c4fa44b2000a04e2f Mon Sep 17 00:00:00 2001 From: Kyle Matsuda Date: Thu, 19 Jan 2023 12:09:01 -0700 Subject: [PATCH 159/501] add EarlyBinder::no_bound_vars --- src/main_shim.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main_shim.rs b/src/main_shim.rs index bc1b1ec7cd597..70cf28267d70c 100644 --- a/src/main_shim.rs +++ b/src/main_shim.rs @@ -46,7 +46,7 @@ pub(crate) fn maybe_create_entry_wrapper( is_main_fn: bool, sigpipe: u8, ) { - let main_ret_ty = tcx.fn_sig(rust_main_def_id).subst_identity().output(); + let main_ret_ty = tcx.fn_sig(rust_main_def_id).no_bound_vars().unwrap().output(); // Given that `main()` has no arguments, // then its return type cannot have // late-bound regions, since late-bound From b64a20930c57009da999bc280327d63d7ce8e987 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Fri, 13 Jan 2023 13:56:51 +0100 Subject: [PATCH 160/501] Update clippy for restructured format flags fields. --- clippy_utils/src/macros.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/clippy_utils/src/macros.rs b/clippy_utils/src/macros.rs index a8f8da67b5171..659063b97e74a 100644 --- a/clippy_utils/src/macros.rs +++ b/clippy_utils/src/macros.rs @@ -711,8 +711,8 @@ pub struct FormatSpec<'tcx> { pub fill: Option, /// Optionally specified alignment. pub align: Alignment, - /// Packed version of various flags provided, see [`rustc_parse_format::Flag`]. - pub flags: u32, + /// Whether all flag options are set to default (no flags specified). + pub no_flags: bool, /// Represents either the maximum width or the integer precision. pub precision: Count<'tcx>, /// The minimum width, will be padded according to `width`/`align` @@ -728,7 +728,7 @@ impl<'tcx> FormatSpec<'tcx> { Some(Self { fill: spec.fill, align: spec.align, - flags: spec.flags, + no_flags: spec.sign.is_none() && !spec.alternate && !spec.zero_pad && spec.debug_hex.is_none(), precision: Count::new( FormatParamUsage::Precision, spec.precision, @@ -773,7 +773,7 @@ impl<'tcx> FormatSpec<'tcx> { self.width.is_implied() && self.precision.is_implied() && self.align == Alignment::AlignUnknown - && self.flags == 0 + && self.no_flags } } From 54da0bfbf0501a4954481c1de75ff33a6c12e140 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Jan 2023 11:06:41 +0100 Subject: [PATCH 161/501] Record method resolution for call expressions --- crates/hir-expand/src/name.rs | 1 + crates/hir-ty/src/builder.rs | 17 ++++- crates/hir-ty/src/infer/expr.rs | 14 +++- crates/hir-ty/src/infer/unify.rs | 25 +++++-- crates/hir-ty/src/tests/method_resolution.rs | 9 ++- crates/hir-ty/src/tests/traits.rs | 69 ++++++++------------ crates/hir/src/lib.rs | 39 ++++++----- crates/hir/src/source_analyzer.rs | 2 +- crates/ide-db/src/defs.rs | 2 +- crates/test-utils/src/minicore.rs | 18 ++++- 10 files changed, 119 insertions(+), 77 deletions(-) diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index b62f4fe770160..b3a71ca0a3746 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -347,6 +347,7 @@ pub mod known { recursion_limit, feature, // known methods of lang items + call_once, eq, ne, ge, diff --git a/crates/hir-ty/src/builder.rs b/crates/hir-ty/src/builder.rs index d5ef0c22dec83..8faef7bf71e98 100644 --- a/crates/hir-ty/src/builder.rs +++ b/crates/hir-ty/src/builder.rs @@ -63,7 +63,7 @@ impl TyBuilder { } fn build_internal(self) -> (D, Substitution) { - assert_eq!(self.vec.len(), self.param_kinds.len()); + assert_eq!(self.vec.len(), self.param_kinds.len(), "{:?}", &self.param_kinds); for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) { self.assert_match_kind(a, e); } @@ -282,6 +282,21 @@ impl TyBuilder { let (Tuple(size), subst) = self.build_internal(); TyKind::Tuple(size, subst).intern(Interner) } + + pub fn tuple_with(elements: I) -> Ty + where + I: IntoIterator, + ::IntoIter: ExactSizeIterator, + { + let elements = elements.into_iter(); + let len = elements.len(); + let mut b = + TyBuilder::new(Tuple(len), iter::repeat(ParamKind::Type).take(len).collect(), None); + for e in elements { + b = b.push(e); + } + b.build() + } } impl TyBuilder { diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 7ae85d20611a8..334434c30caaa 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -331,11 +331,18 @@ impl<'a> InferenceContext<'a> { derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs) || res.is_none(); let (param_tys, ret_ty) = match res { - Some(res) => { + Some((func, params, ret_ty)) => { let adjustments = auto_deref_adjust_steps(&derefs); // FIXME: Handle call adjustments for Fn/FnMut self.write_expr_adj(*callee, adjustments); - res + if let Some((trait_, func)) = func { + let subst = TyBuilder::subst_for_def(self.db, trait_, None) + .push(callee_ty.clone()) + .push(TyBuilder::tuple_with(params.iter().cloned())) + .build(); + self.write_method_resolution(tgt_expr, func, subst.clone()); + } + (params, ret_ty) } None => (Vec::new(), self.err_ty()), // FIXME diagnostic }; @@ -586,6 +593,7 @@ impl<'a> InferenceContext<'a> { self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } Expr::Try { expr } => { + // FIXME: Note down method resolution her let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) } @@ -626,6 +634,7 @@ impl<'a> InferenceContext<'a> { Expr::UnaryOp { expr, op } => { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); let inner_ty = self.resolve_ty_shallow(&inner_ty); + // FIXME: Note down method resolution her match op { UnaryOp::Deref => { autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty()) @@ -732,6 +741,7 @@ impl<'a> InferenceContext<'a> { } } Expr::Index { base, index } => { + // FIXME: note down method resolution for the `index`/`index_mut` function let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none()); diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index e7ddd1591fe8a..46ed3533c8c7b 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -8,6 +8,7 @@ use chalk_ir::{ }; use chalk_solve::infer::ParameterEnaVariableExt; use ena::unify::UnifyKey; +use hir_def::{FunctionId, TraitId}; use hir_expand::name; use stdx::never; @@ -626,18 +627,26 @@ impl<'a> InferenceTable<'a> { } } - pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + pub(crate) fn callable_sig( + &mut self, + ty: &Ty, + num_args: usize, + ) -> Option<(Option<(TraitId, FunctionId)>, Vec, Ty)> { match ty.callable_sig(self.db) { - Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), + Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())), None => self.callable_sig_from_fn_trait(ty, num_args), } } - fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + fn callable_sig_from_fn_trait( + &mut self, + ty: &Ty, + num_args: usize, + ) -> Option<(Option<(TraitId, FunctionId)>, Vec, Ty)> { let krate = self.trait_env.krate; let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; - let output_assoc_type = - self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; + let trait_data = self.db.trait_data(fn_once_trait); + let output_assoc_type = trait_data.associated_type_by_name(&name![Output])?; let mut arg_tys = vec![]; let arg_ty = TyBuilder::tuple(num_args) @@ -675,7 +684,11 @@ impl<'a> InferenceTable<'a> { if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() { self.register_obligation(obligation.goal); let return_ty = self.normalize_projection_ty(projection); - Some((arg_tys, return_ty)) + Some(( + Some(fn_once_trait).zip(trait_data.method_by_name(&name!(call_once))), + arg_tys, + return_ty, + )) } else { None } diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index 6c7a5329970d7..fb736982ed09f 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -986,14 +986,13 @@ fn main() { } #[test] -fn method_resolution_encountering_fn_type() { +fn explicit_fn_once_call_fn_item() { check_types( r#" -//- /main.rs +//- minicore: fn fn foo() {} -trait FnOnce { fn call(self); } -fn test() { foo.call(); } - //^^^^^^^^^^ {unknown} +fn test() { foo.call_once(); } + //^^^^^^^^^^^^^^^ () "#, ); } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 88670364bde05..d8c3588acf635 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -1757,25 +1757,19 @@ fn test() { fn fn_trait() { check_infer_with_mismatches( r#" -trait FnOnce { - type Output; - - fn call_once(self, args: Args) -> >::Output; -} +//- minicore: fn fn test u128>(f: F) { f.call_once((1, 2)); }"#, expect![[r#" - 56..60 'self': Self - 62..66 'args': Args - 149..150 'f': F - 155..183 '{ ...2)); }': () - 161..162 'f': F - 161..180 'f.call...1, 2))': u128 - 173..179 '(1, 2)': (u32, u64) - 174..175 '1': u32 - 177..178 '2': u64 + 38..39 'f': F + 44..72 '{ ...2)); }': () + 50..51 'f': F + 50..69 'f.call...1, 2))': u128 + 62..68 '(1, 2)': (u32, u64) + 63..64 '1': u32 + 66..67 '2': u64 "#]], ); } @@ -1784,12 +1778,7 @@ fn test u128>(f: F) { fn fn_ptr_and_item() { check_infer_with_mismatches( r#" -#[lang="fn_once"] -trait FnOnce { - type Output; - - fn call_once(self, args: Args) -> Self::Output; -} +//- minicore: fn trait Foo { fn foo(&self) -> T; @@ -1815,27 +1804,25 @@ fn test() { opt.map(f); }"#, expect![[r#" - 74..78 'self': Self - 80..84 'args': Args - 139..143 'self': &Self - 243..247 'self': &Bar - 260..271 '{ loop {} }': (A1, R) - 262..269 'loop {}': ! - 267..269 '{}': () - 355..359 'self': Opt - 361..362 'f': F - 377..388 '{ loop {} }': Opt - 379..386 'loop {}': ! - 384..386 '{}': () - 402..518 '{ ...(f); }': () - 412..415 'bar': Bar u32> - 441..444 'bar': Bar u32> - 441..450 'bar.foo()': (u8, u32) - 461..464 'opt': Opt - 483..484 'f': fn(u8) -> u32 - 505..508 'opt': Opt - 505..515 'opt.map(f)': Opt - 513..514 'f': fn(u8) -> u32 + 28..32 'self': &Self + 132..136 'self': &Bar + 149..160 '{ loop {} }': (A1, R) + 151..158 'loop {}': ! + 156..158 '{}': () + 244..248 'self': Opt + 250..251 'f': F + 266..277 '{ loop {} }': Opt + 268..275 'loop {}': ! + 273..275 '{}': () + 291..407 '{ ...(f); }': () + 301..304 'bar': Bar u32> + 330..333 'bar': Bar u32> + 330..339 'bar.foo()': (u8, u32) + 350..353 'opt': Opt + 372..373 'f': fn(u8) -> u32 + 394..397 'opt': Opt + 394..404 'opt.map(f)': Opt + 402..403 'f': fn(u8) -> u32 "#]], ); } diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 4415bef4bb1c7..39589bf8bc2a1 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2411,7 +2411,7 @@ impl Local { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct DeriveHelper { pub(crate) derive: MacroId, - pub(crate) idx: usize, + pub(crate) idx: u32, } impl DeriveHelper { @@ -2421,15 +2421,18 @@ impl DeriveHelper { pub fn name(&self, db: &dyn HirDatabase) -> Name { match self.derive { - MacroId::Macro2Id(it) => { - db.macro2_data(it).helpers.as_deref().and_then(|it| it.get(self.idx)).cloned() - } + MacroId::Macro2Id(it) => db + .macro2_data(it) + .helpers + .as_deref() + .and_then(|it| it.get(self.idx as usize)) + .cloned(), MacroId::MacroRulesId(_) => None, MacroId::ProcMacroId(proc_macro) => db .proc_macro_data(proc_macro) .helpers .as_deref() - .and_then(|it| it.get(self.idx)) + .and_then(|it| it.get(self.idx as usize)) .cloned(), } .unwrap_or_else(|| Name::missing()) @@ -2440,7 +2443,7 @@ impl DeriveHelper { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct BuiltinAttr { krate: Option, - idx: usize, + idx: u32, } impl BuiltinAttr { @@ -2449,7 +2452,8 @@ impl BuiltinAttr { if let builtin @ Some(_) = Self::builtin(name) { return builtin; } - let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?; + let idx = + db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)? as u32; Some(BuiltinAttr { krate: Some(krate.id), idx }) } @@ -2457,21 +2461,21 @@ impl BuiltinAttr { hir_def::builtin_attr::INERT_ATTRIBUTES .iter() .position(|tool| tool.name == name) - .map(|idx| BuiltinAttr { krate: None, idx }) + .map(|idx| BuiltinAttr { krate: None, idx: idx as u32 }) } pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { // FIXME: Return a `Name` here match self.krate { - Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(), - None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name), + Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(), + None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx as usize].name), } } pub fn template(&self, _: &dyn HirDatabase) -> Option { match self.krate { Some(_) => None, - None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template), + None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx as usize].template), } } } @@ -2479,7 +2483,7 @@ impl BuiltinAttr { #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct ToolModule { krate: Option, - idx: usize, + idx: u32, } impl ToolModule { @@ -2488,7 +2492,8 @@ impl ToolModule { if let builtin @ Some(_) = Self::builtin(name) { return builtin; } - let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?; + let idx = + db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)? as u32; Some(ToolModule { krate: Some(krate.id), idx }) } @@ -2496,14 +2501,14 @@ impl ToolModule { hir_def::builtin_attr::TOOL_MODULES .iter() .position(|&tool| tool == name) - .map(|idx| ToolModule { krate: None, idx }) + .map(|idx| ToolModule { krate: None, idx: idx as u32 }) } pub fn name(&self, db: &dyn HirDatabase) -> SmolStr { // FIXME: Return a `Name` here match self.krate { - Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(), - None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]), + Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx as usize].clone(), + None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx as usize]), } } } @@ -2831,7 +2836,7 @@ impl Impl { } } -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct TraitRef { env: Arc, trait_ref: hir_ty::TraitRef, diff --git a/crates/hir/src/source_analyzer.rs b/crates/hir/src/source_analyzer.rs index 5e0c9933a7b6b..3b39e9fa919a1 100644 --- a/crates/hir/src/source_analyzer.rs +++ b/crates/hir/src/source_analyzer.rs @@ -628,7 +628,7 @@ impl SourceAnalyzer { { return Some(PathResolution::DeriveHelper(DeriveHelper { derive: *macro_id, - idx, + idx: idx as u32, })); } } diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 6c13c039723b2..ed7f04fd8e7fe 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -34,8 +34,8 @@ pub enum Definition { TypeAlias(TypeAlias), BuiltinType(BuiltinType), SelfType(Impl), - Local(Local), GenericParam(GenericParam), + Local(Local), Label(Label), DeriveHelper(DeriveHelper), BuiltinAttr(BuiltinAttr), diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index dff60914409a5..63fd1726cf3f6 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -106,6 +106,11 @@ pub mod marker { impl Copy for &T {} } // endregion:copy + + // region:fn + #[lang = "tuple_trait"] + pub trait Tuple {} + // endregion:fn } // region:default @@ -329,19 +334,26 @@ pub mod ops { // region:fn mod function { + use crate::marker::Tuple; + #[lang = "fn"] #[fundamental] - pub trait Fn: FnMut {} + pub trait Fn: FnMut { + extern "rust-call" fn call(&self, args: Args) -> Self::Output; + } #[lang = "fn_mut"] #[fundamental] - pub trait FnMut: FnOnce {} + pub trait FnMut: FnOnce { + extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output; + } #[lang = "fn_once"] #[fundamental] - pub trait FnOnce { + pub trait FnOnce { #[lang = "fn_once_output"] type Output; + extern "rust-call" fn call_once(self, args: Args) -> Self::Output; } } pub use self::function::{Fn, FnMut, FnOnce}; From a7f81e3cdcdd65077893b170378179ec89eab347 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Jan 2023 11:15:05 +0100 Subject: [PATCH 162/501] Record method resolution for index expressions --- crates/hir-ty/src/infer/expr.rs | 12 ++++++++++-- crates/hir-ty/src/tests/traits.rs | 6 ++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 334434c30caaa..1f8fa4a20f9b5 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -17,7 +17,7 @@ use hir_def::{ resolver::resolver_for_expr, ConstParamId, FieldId, ItemContainerId, Lookup, }; -use hir_expand::name::Name; +use hir_expand::name::{name, Name}; use stdx::always; use syntax::ast::RangeOp; @@ -741,7 +741,6 @@ impl<'a> InferenceContext<'a> { } } Expr::Index { base, index } => { - // FIXME: note down method resolution for the `index`/`index_mut` function let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none()); @@ -758,6 +757,15 @@ impl<'a> InferenceContext<'a> { adj.apply(&mut self.table, base_ty) }); self.write_expr_adj(*base, adj); + if let Some(func) = + self.db.trait_data(index_trait).method_by_name(&name!(index)) + { + let substs = TyBuilder::subst_for_def(self.db, index_trait, None) + .push(self_ty.clone()) + .push(index_ty.clone()) + .build(); + self.write_method_resolution(tgt_expr, func, substs.clone()); + } self.resolve_associated_type_with_params( self_ty, self.resolve_ops_index_output(), diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index d8c3588acf635..015085bde4563 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -2295,10 +2295,8 @@ fn unselected_projection_in_trait_env_no_cycle() { // this is not a cycle check_types( r#" -//- /main.rs -trait Index { - type Output; -} +//- minicore: index +use core::ops::Index; type Key = ::Key; From 4b4eabad3c3ce45812b08692b78d5c0d2e0d4dc2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Jan 2023 11:25:34 +0100 Subject: [PATCH 163/501] Record method resolution for try expressions --- crates/hir-ty/src/infer.rs | 26 +++++++++++++------------- crates/hir-ty/src/infer/expr.rs | 20 ++++++++++++++++---- 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 4402c75947c7b..571b3e9686317 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -938,19 +938,24 @@ impl<'a> InferenceContext<'a> { self.db.trait_data(trait_).associated_type_by_name(&name![Item]) } - fn resolve_ops_try_ok(&self) -> Option { - let trait_ = self.resolve_lang_item(LangItem::Try)?.as_trait()?; + fn resolve_output_on(&self, trait_: TraitId) -> Option { self.db.trait_data(trait_).associated_type_by_name(&name![Output]) } + fn resolve_lang_trait(&self, lang: LangItem) -> Option { + self.resolve_lang_item(lang)?.as_trait() + } + + fn resolve_ops_try_output(&self) -> Option { + self.resolve_output_on(self.resolve_lang_trait(LangItem::Try)?) + } + fn resolve_ops_neg_output(&self) -> Option { - let trait_ = self.resolve_lang_item(LangItem::Neg)?.as_trait()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?) } fn resolve_ops_not_output(&self) -> Option { - let trait_ = self.resolve_lang_item(LangItem::Not)?.as_trait()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + self.resolve_output_on(self.resolve_lang_trait(LangItem::Not)?) } fn resolve_future_future_output(&self) -> Option { @@ -960,7 +965,7 @@ impl<'a> InferenceContext<'a> { .lookup(self.db.upcast()) .container else { return None }; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + self.resolve_output_on(trait_) } fn resolve_boxed_box(&self) -> Option { @@ -998,13 +1003,8 @@ impl<'a> InferenceContext<'a> { Some(struct_.into()) } - fn resolve_ops_index(&self) -> Option { - self.resolve_lang_item(LangItem::Index)?.as_trait() - } - fn resolve_ops_index_output(&self) -> Option { - let trait_ = self.resolve_ops_index()?; - self.db.trait_data(trait_).associated_type_by_name(&name![Output]) + self.resolve_output_on(self.resolve_lang_trait(LangItem::Index)?) } fn resolve_va_list(&self) -> Option { diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 1f8fa4a20f9b5..7b9bf0c5cf3de 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -13,6 +13,7 @@ use hir_def::{ ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp, }, generics::TypeOrConstParamData, + lang_item::LangItem, path::{GenericArg, GenericArgs}, resolver::resolver_for_expr, ConstParamId, FieldId, ItemContainerId, Lookup, @@ -157,7 +158,8 @@ impl<'a> InferenceContext<'a> { } // The ok-ish type that is expected from the last expression - let ok_ty = self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_ok()); + let ok_ty = + self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_output()); self.with_breakable_ctx(BreakableKind::Block, ok_ty.clone(), None, |this| { this.infer_expr(*body, &Expectation::has_type(ok_ty)); @@ -593,9 +595,19 @@ impl<'a> InferenceContext<'a> { self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } Expr::Try { expr } => { - // FIXME: Note down method resolution her let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) + if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) { + if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) { + let subst = TyBuilder::subst_for_def(self.db, trait_, None) + .push(inner_ty.clone()) + .build(); + self.write_method_resolution(tgt_expr, func, subst.clone()); + } + let try_output = self.resolve_output_on(trait_); + self.resolve_associated_type(inner_ty, try_output) + } else { + self.err_ty() + } } Expr::Cast { expr, type_ref } => { // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary) @@ -744,7 +756,7 @@ impl<'a> InferenceContext<'a> { let base_ty = self.infer_expr_inner(*base, &Expectation::none()); let index_ty = self.infer_expr(*index, &Expectation::none()); - if let Some(index_trait) = self.resolve_ops_index() { + if let Some(index_trait) = self.resolve_lang_trait(LangItem::Index) { let canonicalized = self.canonicalize(base_ty.clone()); let receiver_adjustments = method_resolution::resolve_indexing_op( self.db, From 4ff6f3845fb68ab5e90cbc37ecb125a0436cb93f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Jan 2023 11:40:44 +0100 Subject: [PATCH 164/501] Update ide test outputs --- crates/ide/src/inlay_hints/bind_pat.rs | 4 ++-- crates/ide/src/inlay_hints/chaining.rs | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index 971168aa28f6d..da44d952970b3 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -325,7 +325,7 @@ fn main(a: SliceIter<'_, Container>) { file_id: FileId( 1, ), - range: 2248..2256, + range: 2611..2619, }, ), tooltip: "", @@ -338,7 +338,7 @@ fn main(a: SliceIter<'_, Container>) { file_id: FileId( 1, ), - range: 2248..2256, + range: 2611..2619, }, ), tooltip: "", diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index b522450310674..222ee59be8b48 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -435,7 +435,7 @@ fn main() { file_id: FileId( 1, ), - range: 2248..2256, + range: 2611..2619, }, ), tooltip: "", @@ -455,7 +455,7 @@ fn main() { file_id: FileId( 1, ), - range: 2248..2256, + range: 2611..2619, }, ), tooltip: "", @@ -475,7 +475,7 @@ fn main() { file_id: FileId( 1, ), - range: 2248..2256, + range: 2611..2619, }, ), tooltip: "", From 68291906116feef3e74d9f4c8c5641eebd8dd9da Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Jan 2023 12:33:40 +0100 Subject: [PATCH 165/501] Handle boolean scrutinees in match <-> if let replacement assists better --- .../src/handlers/replace_if_let_with_match.rs | 183 +++++++++++++++--- 1 file changed, 159 insertions(+), 24 deletions(-) diff --git a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs index 484c27387da94..457559656a42b 100644 --- a/crates/ide-assists/src/handlers/replace_if_let_with_match.rs +++ b/crates/ide-assists/src/handlers/replace_if_let_with_match.rs @@ -13,7 +13,7 @@ use syntax::{ edit::{AstNodeEdit, IndentLevel}, make, HasName, }, - AstNode, TextRange, + AstNode, TextRange, T, }; use crate::{ @@ -96,8 +96,9 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' cond_bodies.push((cond, body)); } - if !pat_seen { - // Don't offer turning an if (chain) without patterns into a match + if !pat_seen && cond_bodies.len() != 1 { + // Don't offer turning an if (chain) without patterns into a match, + // unless its a simple `if cond { .. } (else { .. })` return None; } @@ -114,6 +115,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' Either::Left(pat) => { make::match_arm(iter::once(pat), None, unwrap_trivial_block(body)) } + Either::Right(_) if !pat_seen => make::match_arm( + iter::once(make::literal_pat("true").into()), + None, + unwrap_trivial_block(body), + ), Either::Right(expr) => make::match_arm( iter::once(make::wildcard_pat().into()), Some(expr), @@ -144,31 +150,36 @@ fn make_else_arm( else_block: Option, conditionals: &[(Either, ast::BlockExpr)], ) -> ast::MatchArm { - if let Some(else_block) = else_block { - let pattern = if let [(Either::Left(pat), _)] = conditionals { - ctx.sema + let (pattern, expr) = if let Some(else_block) = else_block { + let pattern = match conditionals { + [(Either::Right(_), _)] => make::literal_pat("false").into(), + [(Either::Left(pat), _)] => match ctx + .sema .type_of_pat(pat) .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted())) - .zip(Some(pat)) - } else { - None - }; - let pattern = match pattern { - Some((it, pat)) => { - if does_pat_match_variant(pat, &it.sad_pattern()) { - it.happy_pattern_wildcard() - } else if does_nested_pattern(pat) { - make::wildcard_pat().into() - } else { - it.sad_pattern() + { + Some(it) => { + if does_pat_match_variant(pat, &it.sad_pattern()) { + it.happy_pattern_wildcard() + } else if does_nested_pattern(pat) { + make::wildcard_pat().into() + } else { + it.sad_pattern() + } } - } - None => make::wildcard_pat().into(), + None => make::wildcard_pat().into(), + }, + _ => make::wildcard_pat().into(), }; - make::match_arm(iter::once(pattern), None, unwrap_trivial_block(else_block)) + (pattern, unwrap_trivial_block(else_block)) } else { - make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit()) - } + let pattern = match conditionals { + [(Either::Right(_), _)] => make::literal_pat("false").into(), + _ => make::wildcard_pat().into(), + }; + (pattern, make::expr_unit()) + }; + make::match_arm(iter::once(pattern), None, expr) } // Assist: replace_match_with_if_let @@ -231,7 +242,19 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' } } - let condition = make::expr_let(if_let_pat, scrutinee); + let condition = match if_let_pat { + ast::Pat::LiteralPat(p) + if p.literal().map_or(false, |it| it.token().kind() == T![true]) => + { + scrutinee + } + ast::Pat::LiteralPat(p) + if p.literal().map_or(false, |it| it.token().kind() == T![false]) => + { + make::expr_prefix(T![!], scrutinee) + } + _ => make::expr_let(if_let_pat, scrutinee).into(), + }; let then_block = make_block_expr(then_expr.reset_indent()); let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) }; let if_let_expr = make::expr_if( @@ -327,6 +350,58 @@ fn main() { ) } + #[test] + fn test_if_with_match_no_else() { + check_assist( + replace_if_let_with_match, + r#" +pub fn foo(foo: bool) { + if foo$0 { + self.foo(); + } +} +"#, + r#" +pub fn foo(foo: bool) { + match foo { + true => { + self.foo(); + } + false => (), + } +} +"#, + ) + } + + #[test] + fn test_if_with_match_with_else() { + check_assist( + replace_if_let_with_match, + r#" +pub fn foo(foo: bool) { + if foo$0 { + self.foo(); + } else { + self.bar(); + } +} +"#, + r#" +pub fn foo(foo: bool) { + match foo { + true => { + self.foo(); + } + false => { + self.bar(); + } + } +} +"#, + ) + } + #[test] fn test_if_let_with_match_no_else() { check_assist( @@ -993,6 +1068,66 @@ fn main() { code() } } +"#, + ) + } + + #[test] + fn test_replace_match_with_if_bool() { + check_assist( + replace_match_with_if_let, + r#" +fn main() { + match$0 b { + true => (), + _ => code(), + } +} +"#, + r#" +fn main() { + if b { + () + } else { + code() + } +} +"#, + ); + check_assist( + replace_match_with_if_let, + r#" +fn main() { + match$0 b { + false => code(), + true => (), + } +} +"#, + r#" +fn main() { + if !b { + code() + } +} +"#, + ); + check_assist( + replace_match_with_if_let, + r#" +fn main() { + match$0 b { + false => (), + true => code(), + } +} +"#, + r#" +fn main() { + if b { + code() + } +} "#, ) } From e24fa2f915f896a68dd08e2c1689da1c27c5834b Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 27 Jan 2023 12:43:49 +0100 Subject: [PATCH 166/501] Rustup to rustc 1.69.0-nightly (d7948c843 2023-01-26) --- rust-toolchain | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-toolchain b/rust-toolchain index 77345b9a17c6e..f7205cb98009a 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-01-20" +channel = "nightly-2023-01-27" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] From b2598f4801364216e452c0eaef32931d49f38514 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Jan 2023 13:49:28 +0100 Subject: [PATCH 167/501] Don't fail workspace loading if sysroot can't be found --- crates/project-model/src/sysroot.rs | 16 +-- crates/project-model/src/tests.rs | 2 +- crates/project-model/src/workspace.rs | 149 ++++++++++++++++++-------- crates/rust-analyzer/src/reload.rs | 33 +----- 4 files changed, 121 insertions(+), 79 deletions(-) diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 046786580e7a4..e1dde12bad821 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -70,6 +70,10 @@ impl Sysroot { pub fn crates(&self) -> impl Iterator + ExactSizeIterator + '_ { self.crates.iter().map(|(id, _data)| id) } + + pub fn is_empty(&self) -> bool { + self.crates.is_empty() + } } impl Sysroot { @@ -79,8 +83,7 @@ impl Sysroot { let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; - let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?; - Ok(res) + Ok(Sysroot::load(sysroot_dir, sysroot_src_dir)) } pub fn discover_rustc( @@ -97,11 +100,10 @@ impl Sysroot { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| { format_err!("can't load standard library from sysroot {}", sysroot_dir.display()) })?; - let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?; - Ok(res) + Ok(Sysroot::load(sysroot_dir, sysroot_src_dir)) } - pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result { + pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Sysroot { let mut sysroot = Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() }; @@ -152,14 +154,14 @@ impl Sysroot { } else { "" }; - anyhow::bail!( + tracing::error!( "could not find libcore in sysroot path `{}`{}", sysroot.src_root.as_path().display(), var_note, ); } - Ok(sysroot) + sysroot } fn by_name(&self, name: &str) -> Option { diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 19ee685691107..505b660f34493 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -81,7 +81,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap() + Sysroot::load(sysroot_dir, sysroot_src_dir) } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index 755bf95199906..d784d3d0e9afb 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -63,7 +63,7 @@ pub struct PackageRoot { pub exclude: Vec, } -#[derive(Clone, Eq, PartialEq)] +#[derive(Clone)] pub enum ProjectWorkspace { /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`. Cargo { @@ -83,7 +83,6 @@ pub enum ProjectWorkspace { }, /// Project workspace was manually specified using a `rust-project.json` file. Json { project: ProjectJson, sysroot: Option, rustc_cfg: Vec }, - // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning. // That's not the end user experience we should strive for. // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working. @@ -163,7 +162,7 @@ impl ProjectWorkspace { project_json, config.target.as_deref(), &config.extra_env, - )? + ) } ProjectManifest::CargoToml(cargo_toml) => { let cargo_version = utf8_stdout({ @@ -193,20 +192,27 @@ impl ProjectWorkspace { let sysroot = match &config.sysroot { Some(RustcSource::Path(path)) => { - Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| { - format!("Failed to find sysroot at {}.", path.display()) - })?) + match Sysroot::with_sysroot_dir(path.clone()) { + Ok(it) => Some(it), + Err(e) => { + tracing::error!(%e, "Failed to find sysroot at {}.", path.display()); + None + } + } + } + Some(RustcSource::Discover) => { + match Sysroot::discover(cargo_toml.parent(), &config.extra_env) { + Ok(it) => Some(it), + Err(e) => { + tracing::error!( + %e, + "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", + cargo_toml.display() + ); + None + } + } } - Some(RustcSource::Discover) => Some( - Sysroot::discover(cargo_toml.parent(), &config.extra_env).with_context( - || { - format!( - "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", - cargo_toml.display() - ) - }, - )?, - ), None => None, }; if let Some(sysroot) = &sysroot { @@ -225,18 +231,22 @@ impl ProjectWorkspace { } let rustc = match rustc_dir { - Some(rustc_dir) => Some({ - let meta = CargoWorkspace::fetch_metadata( - &rustc_dir, - cargo_toml.parent(), - config, - progress, - ) - .with_context(|| { - "Failed to read Cargo metadata for Rust sources".to_string() - })?; - CargoWorkspace::new(meta) - }), + Some(rustc_dir) => match CargoWorkspace::fetch_metadata( + &rustc_dir, + cargo_toml.parent(), + config, + progress, + ) { + Ok(meta) => Some(CargoWorkspace::new(meta)), + Err(e) => { + tracing::error!( + %e, + "Failed to read Cargo metadata from rustc source at {}", + rustc_dir.display() + ); + None + } + }, None => None, }; @@ -272,15 +282,14 @@ impl ProjectWorkspace { project_json: ProjectJson, target: Option<&str>, extra_env: &FxHashMap, - ) -> Result { + ) -> ProjectWorkspace { let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { - (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?), + (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)), (Some(sysroot), None) => { // assume sysroot is structured like rustup's and guess `sysroot_src` let sysroot_src = sysroot.join("lib").join("rustlib").join("src").join("rust").join("library"); - - Some(Sysroot::load(sysroot, sysroot_src)?) + Some(Sysroot::load(sysroot, sysroot_src)) } (None, Some(sysroot_src)) => { // assume sysroot is structured like rustup's and guess `sysroot` @@ -288,7 +297,7 @@ impl ProjectWorkspace { for _ in 0..5 { sysroot.pop(); } - Some(Sysroot::load(sysroot, sysroot_src)?) + Some(Sysroot::load(sysroot, sysroot_src)) } (None, None) => None, }; @@ -297,7 +306,7 @@ impl ProjectWorkspace { } let rustc_cfg = rustc_cfg::get(None, target, extra_env); - Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }) + ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg } } pub fn load_detached_files( @@ -305,18 +314,29 @@ impl ProjectWorkspace { config: &CargoConfig, ) -> Result { let sysroot = match &config.sysroot { - Some(RustcSource::Path(path)) => Some( - Sysroot::with_sysroot_dir(path.clone()) - .with_context(|| format!("Failed to find sysroot at {}.", path.display()))?, - ), + Some(RustcSource::Path(path)) => match Sysroot::with_sysroot_dir(path.clone()) { + Ok(it) => Some(it), + Err(e) => { + tracing::error!(%e, "Failed to find sysroot at {}.", path.display()); + None + } + }, Some(RustcSource::Discover) => { let dir = &detached_files .first() .and_then(|it| it.parent()) .ok_or_else(|| format_err!("No detached files to load"))?; - Some(Sysroot::discover(dir, &config.extra_env).with_context(|| { - format!("Failed to find sysroot in {}. Is rust-src installed?", dir.display()) - })?) + match Sysroot::discover(dir, &config.extra_env) { + Ok(it) => Some(it), + Err(e) => { + tracing::error!( + %e, + "Failed to find sysroot for {}. Is rust-src installed?", + dir.display() + ); + None + } + } } None => None, }; @@ -541,7 +561,7 @@ impl ProjectWorkspace { load_proc_macro, load, project, - sysroot, + sysroot.as_ref(), extra_env, Err("rust-project.json projects have no target layout set".into()), ), @@ -585,6 +605,49 @@ impl ProjectWorkspace { } crate_graph } + + pub fn eq_ignore_build_data(&self, other: &Self) -> bool { + match (self, other) { + ( + Self::Cargo { + cargo, + sysroot, + rustc, + rustc_cfg, + cfg_overrides, + toolchain, + build_scripts: _, + target_layout: _, + }, + Self::Cargo { + cargo: o_cargo, + sysroot: o_sysroot, + rustc: o_rustc, + rustc_cfg: o_rustc_cfg, + cfg_overrides: o_cfg_overrides, + toolchain: o_toolchain, + build_scripts: _, + target_layout: _, + }, + ) => { + cargo == o_cargo + && rustc == o_rustc + && rustc_cfg == o_rustc_cfg + && cfg_overrides == o_cfg_overrides + && toolchain == o_toolchain + && sysroot == o_sysroot + } + ( + Self::Json { project, sysroot, rustc_cfg }, + Self::Json { project: o_project, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg }, + ) => project == o_project && rustc_cfg == o_rustc_cfg && sysroot == o_sysroot, + ( + Self::DetachedFiles { files, sysroot, rustc_cfg }, + Self::DetachedFiles { files: o_files, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg }, + ) => files == o_files && sysroot == o_sysroot && rustc_cfg == o_rustc_cfg, + _ => false, + } + } } fn project_json_to_crate_graph( @@ -592,7 +655,7 @@ fn project_json_to_crate_graph( load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, project: &ProjectJson, - sysroot: &Option, + sysroot: Option<&Sysroot>, extra_env: &FxHashMap, target_layout: TargetLayoutLoadResult, ) -> CrateGraph { diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 3d7342d191378..a33b8e14cf3f0 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -148,11 +148,11 @@ impl GlobalState { ) } LinkedProject::InlineJsonProject(it) => { - project_model::ProjectWorkspace::load_inline( + Ok(project_model::ProjectWorkspace::load_inline( it.clone(), cargo_config.target.as_deref(), &cargo_config.extra_env, - ) + )) } }) .collect::>(); @@ -212,35 +212,11 @@ impl GlobalState { let workspaces = workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::>(); - fn eq_ignore_build_data<'a>( - left: &'a ProjectWorkspace, - right: &'a ProjectWorkspace, - ) -> bool { - let key = |p: &'a ProjectWorkspace| match p { - ProjectWorkspace::Cargo { - cargo, - sysroot, - rustc, - rustc_cfg, - cfg_overrides, - - build_scripts: _, - toolchain: _, - target_layout: _, - } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)), - _ => None, - }; - match (key(left), key(right)) { - (Some(lk), Some(rk)) => lk == rk, - _ => left == right, - } - } - let same_workspaces = workspaces.len() == self.workspaces.len() && workspaces .iter() .zip(self.workspaces.iter()) - .all(|(l, r)| eq_ignore_build_data(l, r)); + .all(|(l, r)| l.eq_ignore_build_data(r)); if same_workspaces { let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result(); @@ -270,7 +246,8 @@ impl GlobalState { // Here, we completely changed the workspace (Cargo.toml edit), so // we don't care about build-script results, they are stale. - self.workspaces = Arc::new(workspaces) + // FIXME: can we abort the build scripts here? + self.workspaces = Arc::new(workspaces); } if let FilesWatcher::Client = self.config.files().watcher { From e7a2d13ff63c6b16f70f7854a9225de7301e044c Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Fri, 27 Jan 2023 16:51:55 +0100 Subject: [PATCH 168/501] Fix "add missing impl members" assist for impls inside blocks --- .../src/handlers/add_missing_impl_members.rs | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 627a9852fc8b8..4e11b31deb02b 100644 --- a/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -109,6 +109,7 @@ fn add_missing_impl_members_inner( if ctx.token_at_offset().all(|t| { t.parent_ancestors() + .take_while(|node| node != impl_def.syntax()) .any(|s| ast::BlockExpr::can_cast(s.kind()) || ast::ParamList::can_cast(s.kind())) }) { return None; @@ -1486,4 +1487,35 @@ impl Trait for () { }"#, ) } + + #[test] + fn test_works_inside_function() { + check_assist( + add_missing_impl_members, + r#" +trait Tr { + fn method(); +} +fn main() { + struct S; + impl Tr for S { + $0 + } +} +"#, + r#" +trait Tr { + fn method(); +} +fn main() { + struct S; + impl Tr for S { + fn method() { + ${0:todo!()} + } + } +} +"#, + ); + } } From 7aa4a205a83bfbdba1131ee8a6463d5c029bb089 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Thu, 26 Jan 2023 13:32:33 -0700 Subject: [PATCH 169/501] rustdoc: merge doctest tooltip with notable traits tooltip Fixes https://discord.com/channels/442252698964721669/443150878111694848/1066420140167680000 Fixes #91100 --- src/librustdoc/html/highlight.rs | 16 ++- src/librustdoc/html/render/mod.rs | 2 +- src/librustdoc/html/static/css/rustdoc.css | 54 ++------ src/librustdoc/html/static/css/themes/ayu.css | 2 - .../html/static/css/themes/dark.css | 2 - .../html/static/css/themes/light.css | 2 - src/librustdoc/html/static/js/main.js | 112 ++++++++------- tests/rustdoc-gui/codeblock-tooltip.goml | 94 +++++-------- tests/rustdoc-gui/notable-trait.goml | 128 +++++++++--------- tests/rustdoc/codeblock-title.rs | 2 +- tests/rustdoc/doc-notable_trait.rs | 6 +- tests/rustdoc/spotlight-from-dependency.rs | 2 +- 12 files changed, 186 insertions(+), 236 deletions(-) diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 8a9e6caf611ba..11dd69487fbb7 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -96,13 +96,19 @@ fn write_header(out: &mut Buffer, class: &str, extra_content: Option, to ); if tooltip != Tooltip::None { + let edition_code; write!( out, - "

", - if let Tooltip::Edition(edition_info) = tooltip { - format!(" data-edition=\"{}\"", edition_info) - } else { - String::new() + "", + match tooltip { + Tooltip::Ignore => "This example is not tested", + Tooltip::CompileFail => "This example deliberately fails to compile", + Tooltip::ShouldPanic => "This example panics", + Tooltip::Edition(edition) => { + edition_code = format!("This example runs with edition {edition}"); + &edition_code + } + Tooltip::None => unreachable!(), }, ); } diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index d644293d3ef12..5655246532cd4 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -1313,7 +1313,7 @@ pub(crate) fn notable_traits_button(ty: &clean::Type, cx: &mut Context<'_>) -> O if has_notable_trait { cx.types_with_notable_traits.insert(ty.clone()); Some(format!( - " ", + " ", ty = Escape(&format!("{:#}", ty.print(cx))), )) } else { diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index bf83ff2044e69..437b8748c1dce 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -697,8 +697,8 @@ h2.small-section-header > .anchor { .main-heading a:hover, .example-wrap > pre.rust a:hover, .all-items a:hover, -.docblock a:not(.test-arrow):not(.scrape-help):hover, -.docblock-short a:not(.test-arrow):not(.scrape-help):hover, +.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover, +.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover, .item-info a { text-decoration: underline; } @@ -1093,44 +1093,8 @@ pre.rust .doccomment { display: block; left: -25px; top: 5px; -} - -.example-wrap .tooltip:hover::after { - padding: 5px 3px 3px 3px; - border-radius: 6px; - margin-left: 5px; - font-size: 1rem; - border: 1px solid var(--border-color); - position: absolute; - width: max-content; - top: -2px; - z-index: 1; - background-color: var(--tooltip-background-color); - color: var(--tooltip-color); -} - -.example-wrap .tooltip:hover::before { - content: " "; - position: absolute; - top: 50%; - left: 16px; - margin-top: -5px; - z-index: 1; - border: 5px solid transparent; - border-right-color: var(--tooltip-background-color); -} - -.example-wrap.ignore .tooltip:hover::after { - content: "This example is not tested"; -} -.example-wrap.compile_fail .tooltip:hover::after { - content: "This example deliberately fails to compile"; -} -.example-wrap.should_panic .tooltip:hover::after { - content: "This example panics"; -} -.example-wrap.edition .tooltip:hover::after { - content: "This code runs with edition " attr(data-edition); + margin: 0; + line-height: 1; } .example-wrap.compile_fail .tooltip, @@ -1196,7 +1160,7 @@ a.test-arrow:hover { border-right: 3px solid var(--target-border-color); } -.notable-traits { +.code-header a.tooltip { color: inherit; margin-right: 15px; position: relative; @@ -1205,7 +1169,7 @@ a.test-arrow:hover { /* placeholder thunk so that the mouse can easily travel from "(i)" to popover the resulting "hover tunnel" is a stepped triangle, approximating https://bjk5.com/post/44698559168/breaking-down-amazons-mega-dropdown */ -.notable-traits:hover::after { +a.tooltip:hover::after { position: absolute; top: calc(100% - 10px); left: -15px; @@ -1214,11 +1178,11 @@ a.test-arrow:hover { content: "\00a0"; } -.notable .content { +.popover.tooltip .content { margin: 0.25em 0.5em; } -.notable .content pre, .notable .content code { +.popover.tooltip .content pre, .popover.tooltip .content code { background: transparent; margin: 0; padding: 0; @@ -1226,7 +1190,7 @@ a.test-arrow:hover { white-space: pre-wrap; } -.notable .content > h3:first-child { +.popover.tooltip .content > h3:first-child { margin: 0 0 5px 0; } diff --git a/src/librustdoc/html/static/css/themes/ayu.css b/src/librustdoc/html/static/css/themes/ayu.css index ed779bf6166ee..43fe676e68bcb 100644 --- a/src/librustdoc/html/static/css/themes/ayu.css +++ b/src/librustdoc/html/static/css/themes/ayu.css @@ -70,8 +70,6 @@ Original by Dempfi (https://github.com/dempfi/ayu) --test-arrow-hover-background-color: rgba(57, 175, 215, 0.368); --target-background-color: rgba(255, 236, 164, 0.06); --target-border-color: rgba(255, 180, 76, 0.85); - --tooltip-background-color: #314559; - --tooltip-color: #c5c5c5; --kbd-color: #c5c5c5; --kbd-background: #314559; --kbd-box-shadow-color: #5c6773; diff --git a/src/librustdoc/html/static/css/themes/dark.css b/src/librustdoc/html/static/css/themes/dark.css index 3766f0daa42ff..6777675f66f57 100644 --- a/src/librustdoc/html/static/css/themes/dark.css +++ b/src/librustdoc/html/static/css/themes/dark.css @@ -65,8 +65,6 @@ --test-arrow-hover-background-color: #4e8bca; --target-background-color: #494a3d; --target-border-color: #bb7410; - --tooltip-background-color: #000; - --tooltip-color: #fff; --kbd-color: #000; --kbd-background: #fafbfc; --kbd-box-shadow-color: #c6cbd1; diff --git a/src/librustdoc/html/static/css/themes/light.css b/src/librustdoc/html/static/css/themes/light.css index 8a7f6abcf8d8e..a7ee84b43f52b 100644 --- a/src/librustdoc/html/static/css/themes/light.css +++ b/src/librustdoc/html/static/css/themes/light.css @@ -65,8 +65,6 @@ --test-arrow-hover-background-color: #4e8bca; --target-background-color: #fdffd3; --target-border-color: #ad7c37; - --tooltip-background-color: #000; - --tooltip-color: #fff; --kbd-color: #000; --kbd-background: #fafbfc; --kbd-box-shadow-color: #c6cbd1; diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 604ab147f6a16..8d57ed98a877b 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -379,7 +379,7 @@ function loadCss(cssUrl) { } ev.preventDefault(); searchState.defocus(); - window.hideAllModals(true); // true = reset focus for notable traits + window.hideAllModals(true); // true = reset focus for tooltips } function handleShortcut(ev) { @@ -789,17 +789,17 @@ function loadCss(cssUrl) { // we need to switch away from mobile mode and make the main content area scrollable. hideSidebar(); } - if (window.CURRENT_NOTABLE_ELEMENT) { - // As a workaround to the behavior of `contains: layout` used in doc togglers, the - // notable traits popup is positioned using javascript. + if (window.CURRENT_TOOLTIP_ELEMENT) { + // As a workaround to the behavior of `contains: layout` used in doc togglers, + // tooltip popovers are positioned using javascript. // // This means when the window is resized, we need to redo the layout. - const base = window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE; - const force_visible = base.NOTABLE_FORCE_VISIBLE; - hideNotable(false); + const base = window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE; + const force_visible = base.TOOLTIP_FORCE_VISIBLE; + hideTooltip(false); if (force_visible) { - showNotable(base); - base.NOTABLE_FORCE_VISIBLE = true; + showTooltip(base); + base.TOOLTIP_FORCE_VISIBLE = true; } } }); @@ -827,27 +827,35 @@ function loadCss(cssUrl) { }); }); - function showNotable(e) { - if (!window.NOTABLE_TRAITS) { + function showTooltip(e) { + const notable_ty = e.getAttribute("data-notable-ty"); + if (!window.NOTABLE_TRAITS && notable_ty) { const data = document.getElementById("notable-traits-data"); if (data) { window.NOTABLE_TRAITS = JSON.parse(data.innerText); } else { - throw new Error("showNotable() called on page without any notable traits!"); + throw new Error("showTooltip() called with notable without any notable traits!"); } } - if (window.CURRENT_NOTABLE_ELEMENT && window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE === e) { + if (window.CURRENT_TOOLTIP_ELEMENT && window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE === e) { // Make this function idempotent. return; } window.hideAllModals(false); - const ty = e.getAttribute("data-ty"); const wrapper = document.createElement("div"); - wrapper.innerHTML = "
" + window.NOTABLE_TRAITS[ty] + "
"; - wrapper.className = "notable popover"; + if (notable_ty) { + wrapper.innerHTML = "
" + + window.NOTABLE_TRAITS[notable_ty] + "
"; + } else if (e.getAttribute("title") !== undefined) { + const titleContent = document.createElement("div"); + titleContent.className = "content"; + titleContent.appendChild(document.createTextNode(e.getAttribute("title"))); + wrapper.appendChild(titleContent); + } + wrapper.className = "tooltip popover"; const focusCatcher = document.createElement("div"); focusCatcher.setAttribute("tabindex", "0"); - focusCatcher.onfocus = hideNotable; + focusCatcher.onfocus = hideTooltip; wrapper.appendChild(focusCatcher); const pos = e.getBoundingClientRect(); // 5px overlap so that the mouse can easily travel from place to place @@ -869,62 +877,62 @@ function loadCss(cssUrl) { ); } wrapper.style.visibility = ""; - window.CURRENT_NOTABLE_ELEMENT = wrapper; - window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE = e; + window.CURRENT_TOOLTIP_ELEMENT = wrapper; + window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE = e; wrapper.onpointerleave = function(ev) { // If this is a synthetic touch event, ignore it. A click event will be along shortly. if (ev.pointerType !== "mouse") { return; } - if (!e.NOTABLE_FORCE_VISIBLE && !elemIsInParent(event.relatedTarget, e)) { - hideNotable(true); + if (!e.TOOLTIP_FORCE_VISIBLE && !elemIsInParent(event.relatedTarget, e)) { + hideTooltip(true); } }; } - function notableBlurHandler(event) { - if (window.CURRENT_NOTABLE_ELEMENT && - !elemIsInParent(document.activeElement, window.CURRENT_NOTABLE_ELEMENT) && - !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT) && - !elemIsInParent(document.activeElement, window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE) && - !elemIsInParent(event.relatedTarget, window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE) + function tooltipBlurHandler(event) { + if (window.CURRENT_TOOLTIP_ELEMENT && + !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT) && + !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT) && + !elemIsInParent(document.activeElement, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) && + !elemIsInParent(event.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE) ) { // Work around a difference in the focus behaviour between Firefox, Chrome, and Safari. - // When I click the button on an already-opened notable trait popover, Safari + // When I click the button on an already-opened tooltip popover, Safari // hides the popover and then immediately shows it again, while everyone else hides it // and it stays hidden. // // To work around this, make sure the click finishes being dispatched before - // hiding the popover. Since `hideNotable()` is idempotent, this makes Safari behave + // hiding the popover. Since `hideTooltip()` is idempotent, this makes Safari behave // consistently with the other two. - setTimeout(() => hideNotable(false), 0); + setTimeout(() => hideTooltip(false), 0); } } - function hideNotable(focus) { - if (window.CURRENT_NOTABLE_ELEMENT) { - if (window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.NOTABLE_FORCE_VISIBLE) { + function hideTooltip(focus) { + if (window.CURRENT_TOOLTIP_ELEMENT) { + if (window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE) { if (focus) { - window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.focus(); + window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.focus(); } - window.CURRENT_NOTABLE_ELEMENT.NOTABLE_BASE.NOTABLE_FORCE_VISIBLE = false; + window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE = false; } const body = document.getElementsByTagName("body")[0]; - body.removeChild(window.CURRENT_NOTABLE_ELEMENT); - window.CURRENT_NOTABLE_ELEMENT = null; + body.removeChild(window.CURRENT_TOOLTIP_ELEMENT); + window.CURRENT_TOOLTIP_ELEMENT = null; } } - onEachLazy(document.getElementsByClassName("notable-traits"), e => { + onEachLazy(document.getElementsByClassName("tooltip"), e => { e.onclick = function() { - this.NOTABLE_FORCE_VISIBLE = this.NOTABLE_FORCE_VISIBLE ? false : true; - if (window.CURRENT_NOTABLE_ELEMENT && !this.NOTABLE_FORCE_VISIBLE) { - hideNotable(true); + this.TOOLTIP_FORCE_VISIBLE = this.TOOLTIP_FORCE_VISIBLE ? false : true; + if (window.CURRENT_TOOLTIP_ELEMENT && !this.TOOLTIP_FORCE_VISIBLE) { + hideTooltip(true); } else { - showNotable(this); - window.CURRENT_NOTABLE_ELEMENT.setAttribute("tabindex", "0"); - window.CURRENT_NOTABLE_ELEMENT.focus(); - window.CURRENT_NOTABLE_ELEMENT.onblur = notableBlurHandler; + showTooltip(this); + window.CURRENT_TOOLTIP_ELEMENT.setAttribute("tabindex", "0"); + window.CURRENT_TOOLTIP_ELEMENT.focus(); + window.CURRENT_TOOLTIP_ELEMENT.onblur = tooltipBlurHandler; } return false; }; @@ -933,16 +941,16 @@ function loadCss(cssUrl) { if (ev.pointerType !== "mouse") { return; } - showNotable(this); + showTooltip(this); }; e.onpointerleave = function(ev) { // If this is a synthetic touch event, ignore it. A click event will be along shortly. if (ev.pointerType !== "mouse") { return; } - if (!this.NOTABLE_FORCE_VISIBLE && - !elemIsInParent(ev.relatedTarget, window.CURRENT_NOTABLE_ELEMENT)) { - hideNotable(true); + if (!this.TOOLTIP_FORCE_VISIBLE && + !elemIsInParent(ev.relatedTarget, window.CURRENT_TOOLTIP_ELEMENT)) { + hideTooltip(true); } }; }); @@ -1044,14 +1052,14 @@ function loadCss(cssUrl) { } /** - * Hide popover menus, notable trait tooltips, and the sidebar (if applicable). + * Hide popover menus, clickable tooltips, and the sidebar (if applicable). * - * Pass "true" to reset focus for notable traits. + * Pass "true" to reset focus for tooltip popovers. */ window.hideAllModals = function(switchFocus) { hideSidebar(); window.hidePopoverMenus(); - hideNotable(switchFocus); + hideTooltip(switchFocus); }; /** diff --git a/tests/rustdoc-gui/codeblock-tooltip.goml b/tests/rustdoc-gui/codeblock-tooltip.goml index a3ef4e77b5424..36b67073a0353 100644 --- a/tests/rustdoc-gui/codeblock-tooltip.goml +++ b/tests/rustdoc-gui/codeblock-tooltip.goml @@ -30,24 +30,16 @@ define-function: ( ".docblock .example-wrap.compile_fail", {"border-left": "2px solid rgb(255, 0, 0)"}, ) - assert-css: ( - ".docblock .example-wrap.compile_fail .tooltip::after", - { - "content": '"This example deliberately fails to compile"', - "padding": "5px 3px 3px", - "background-color": |background|, - "color": |color|, - "border": "1px solid " + |border|, - }, - ) - assert-css: ( - ".docblock .example-wrap.compile_fail .tooltip::before", - { - "border-width": "5px", - "border-style": "solid", - "border-color": "rgba(0, 0, 0, 0) " + |background| + " rgba(0, 0, 0, 0) rgba(0, 0, 0, 0)", - }, - ) + click: ".docblock .example-wrap.compile_fail .tooltip" + assert-text: ( + ".popover.tooltip", + "This example deliberately fails to compile" + ) + assert-css: (".popover.tooltip", { + "color": |color|, + "background-color": |background|, + "border-color": |border|, + }) // should_panic block assert-css: ( @@ -69,24 +61,16 @@ define-function: ( ".docblock .example-wrap.should_panic", {"border-left": "2px solid rgb(255, 0, 0)"}, ) - assert-css: ( - ".docblock .example-wrap.should_panic .tooltip::after", - { - "content": '"This example panics"', - "padding": "5px 3px 3px", - "background-color": |background|, - "color": |color|, - "border": "1px solid " + |border|, - }, - ) - assert-css: ( - ".docblock .example-wrap.should_panic .tooltip::before", - { - "border-width": "5px", - "border-style": "solid", - "border-color": "rgba(0, 0, 0, 0) " + |background| + " rgba(0, 0, 0, 0) rgba(0, 0, 0, 0)", - }, + click: ".docblock .example-wrap.should_panic .tooltip" + assert-text: ( + ".popover.tooltip", + "This example panics" ) + assert-css: (".popover.tooltip", { + "color": |color|, + "background-color": |background|, + "border-color": |border|, + }) // ignore block assert-css: ( @@ -108,42 +92,36 @@ define-function: ( ".docblock .example-wrap.ignore", {"border-left": "2px solid rgb(255, 142, 0)"}, ) - assert-css: ( - ".docblock .example-wrap.ignore .tooltip::after", - { - "content": '"This example is not tested"', - "padding": "5px 3px 3px", - "background-color": |background|, - "color": |color|, - "border": "1px solid " + |border|, - }, - ) - assert-css: ( - ".docblock .example-wrap.ignore .tooltip::before", - { - "border-width": "5px", - "border-style": "solid", - "border-color": "rgba(0, 0, 0, 0) " + |background| + " rgba(0, 0, 0, 0) rgba(0, 0, 0, 0)", - }, - ) + click: ".docblock .example-wrap.ignore .tooltip" + assert-text: ( + ".popover.tooltip", + "This example is not tested" + ) + assert-css: (".popover.tooltip", { + "color": |color|, + "background-color": |background|, + "border-color": |border|, + }) + click: ".docblock .example-wrap.ignore .tooltip" + assert-false: ".popover.tooltip" }, ) call-function: ("check-colors", { "theme": "ayu", - "background": "rgb(49, 69, 89)", + "background": "rgb(15, 20, 25)", "color": "rgb(197, 197, 197)", "border": "rgb(92, 103, 115)", }) call-function: ("check-colors", { "theme": "dark", - "background": "rgb(0, 0, 0)", - "color": "rgb(255, 255, 255)", + "background": "rgb(53, 53, 53)", + "color": "rgb(221, 221, 221)", "border": "rgb(224, 224, 224)", }) call-function: ("check-colors", { "theme": "light", - "background": "rgb(0, 0, 0)", - "color": "rgb(255, 255, 255)", + "background": "rgb(255, 255, 255)", + "color": "rgb(0, 0, 0)", "border": "rgb(224, 224, 224)", }) diff --git a/tests/rustdoc-gui/notable-trait.goml b/tests/rustdoc-gui/notable-trait.goml index b4fa7d0dbf0e2..207289151992e 100644 --- a/tests/rustdoc-gui/notable-trait.goml +++ b/tests/rustdoc-gui/notable-trait.goml @@ -6,13 +6,13 @@ size: (1100, 600) // Checking they have the same y position. compare-elements-position: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("y"), ) // Checking they don't have the same x position. compare-elements-position-false: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("x"), ) // The `i` should be *after* the type. @@ -21,33 +21,33 @@ assert-position: ( {"x": 677}, ) assert-position: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", {"x": 955}, ) // The tooltip should be below the `i` // Also, clicking the tooltip should bring its text into the DOM -assert-count: ("//*[@class='notable popover']", 0) -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -assert-count: ("//*[@class='notable popover']", 1) +assert-count: ("//*[@class='tooltip popover']", 0) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +assert-count: ("//*[@class='tooltip popover']", 1) compare-elements-position-near: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", - "//*[@class='notable popover']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", + "//*[@class='tooltip popover']", {"y": 30} ) compare-elements-position-false: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", - "//*[@class='notable popover']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", + "//*[@class='tooltip popover']", ("x") ) -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" move-cursor-to: "//h1" -assert-count: ("//*[@class='notable popover']", 0) +assert-count: ("//*[@class='tooltip popover']", 0) // Now only the `i` should be on the next line. size: (1055, 600) compare-elements-position-false: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("y", "x"), ) @@ -56,13 +56,13 @@ size: (980, 600) // Checking they have the same y position. compare-elements-position: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("y"), ) // Checking they don't have the same x position. compare-elements-position-false: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("x"), ) // The `i` should be *after* the type. @@ -71,7 +71,7 @@ assert-position: ( {"x": 245}, ) assert-position: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", {"x": 523}, ) @@ -80,13 +80,13 @@ size: (650, 600) // Checking they have the same y position. compare-elements-position: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("y"), ) // Checking they don't have the same x position. compare-elements-position-false: ( "//*[@id='method.create_an_iterator_from_read']//a[text()='NotableStructWithLongName']", - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", ("x"), ) // The `i` should be *after* the type. @@ -95,29 +95,29 @@ assert-position: ( {"x": 15}, ) assert-position: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", {"x": 293}, ) // The tooltip should STILL be below `i` -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -assert-count: ("//*[@class='notable popover']", 1) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +assert-count: ("//*[@class='tooltip popover']", 1) compare-elements-position-near: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", - "//*[@class='notable popover']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", + "//*[@class='tooltip popover']", {"y": 30} ) compare-elements-position-false: ( - "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']", - "//*[@class='notable popover']", + "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']", + "//*[@class='tooltip popover']", ("x") ) assert-position: ( - "//*[@class='notable popover']", + "//*[@class='tooltip popover']", {"x": 0} ) -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" move-cursor-to: "//h1" -assert-count: ("//*[@class='notable popover']", 0) +assert-count: ("//*[@class='tooltip popover']", 0) // Now check the colors. define-function: ( @@ -133,26 +133,26 @@ define-function: ( // We reload the page so the local storage settings are being used. reload: - move-cursor-to: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" - assert-count: (".notable.popover", 1) + move-cursor-to: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" + assert-count: (".tooltip.popover", 1) assert-css: ( - ".notable.popover h3", + ".tooltip.popover h3", {"color": |header_color|}, ALL, ) assert-css: ( - ".notable.popover pre", + ".tooltip.popover pre", {"color": |content_color|}, ALL, ) assert-css: ( - ".notable.popover pre a.struct", + ".tooltip.popover pre a.struct", {"color": |type_color|}, ALL, ) assert-css: ( - ".notable.popover pre a.trait", + ".tooltip.popover pre a.trait", {"color": |trait_color|}, ALL, ) @@ -195,24 +195,24 @@ call-function: ( reload: // Check that pressing escape works -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -move-cursor-to: "//*[@class='notable popover']" -assert-count: ("//*[@class='notable popover']", 1) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +move-cursor-to: "//*[@class='tooltip popover']" +assert-count: ("//*[@class='tooltip popover']", 1) press-key: "Escape" -assert-count: ("//*[@class='notable popover']", 0) -assert: "#method\.create_an_iterator_from_read .notable-traits:focus" +assert-count: ("//*[@class='tooltip popover']", 0) +assert: "#method\.create_an_iterator_from_read .tooltip:focus" // Check that clicking outside works. -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -assert-count: ("//*[@class='notable popover']", 1) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +assert-count: ("//*[@class='tooltip popover']", 1) click: ".search-input" -assert-count: ("//*[@class='notable popover']", 0) -assert-false: "#method\.create_an_iterator_from_read .notable-traits:focus" +assert-count: ("//*[@class='tooltip popover']", 0) +assert-false: "#method\.create_an_iterator_from_read .tooltip:focus" // Check that pressing tab over and over works. -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -move-cursor-to: "//*[@class='notable popover']" -assert-count: ("//*[@class='notable popover']", 1) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +move-cursor-to: "//*[@class='tooltip popover']" +assert-count: ("//*[@class='tooltip popover']", 1) press-key: "Tab" press-key: "Tab" press-key: "Tab" @@ -220,8 +220,8 @@ press-key: "Tab" press-key: "Tab" press-key: "Tab" press-key: "Tab" -assert-count: ("//*[@class='notable popover']", 0) -assert: "#method\.create_an_iterator_from_read .notable-traits:focus" +assert-count: ("//*[@class='tooltip popover']", 0) +assert: "#method\.create_an_iterator_from_read .tooltip:focus" // Now we check that the focus isn't given back to the wrong item when opening // another popover. @@ -231,8 +231,8 @@ click: "#method\.create_an_iterator_from_read .fn" assert-window-property-false: {"scrollY": |scroll|} // Store the new position. store-window-property: (scroll, "scrollY") -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -wait-for: "//*[@class='notable popover']" +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +wait-for: "//*[@class='tooltip popover']" click: "#settings-menu a" click: ".search-input" // We ensure we didn't come back to the previous focused item. @@ -245,8 +245,8 @@ click: "#method\.create_an_iterator_from_read .fn" assert-window-property-false: {"scrollY": |scroll|} // Store the new position. store-window-property: (scroll, "scrollY") -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -wait-for: "//*[@class='notable popover']" +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +wait-for: "//*[@class='tooltip popover']" click: "#settings-menu a" press-key: "Escape" // We ensure we didn't come back to the previous focused item. @@ -254,23 +254,23 @@ assert-window-property-false: {"scrollY": |scroll|} // Opening the mobile sidebar should close the popover. size: (650, 600) -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -assert-count: ("//*[@class='notable popover']", 1) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +assert-count: ("//*[@class='tooltip popover']", 1) click: ".sidebar-menu-toggle" assert: "//*[@class='sidebar shown']" -assert-count: ("//*[@class='notable popover']", 0) -assert-false: "#method\.create_an_iterator_from_read .notable-traits:focus" -// Clicking a notable popover should close the sidebar. -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -assert-count: ("//*[@class='notable popover']", 1) +assert-count: ("//*[@class='tooltip popover']", 0) +assert-false: "#method\.create_an_iterator_from_read .tooltip:focus" +// Clicking a notable trait tooltip popover should close the sidebar. +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +assert-count: ("//*[@class='tooltip popover']", 1) assert-false: "//*[@class='sidebar shown']" // Also check the focus handling for the help button. size: (1100, 600) reload: -assert-count: ("//*[@class='notable popover']", 0) -click: "//*[@id='method.create_an_iterator_from_read']//*[@class='notable-traits']" -assert-count: ("//*[@class='notable popover']", 1) +assert-count: ("//*[@class='tooltip popover']", 0) +click: "//*[@id='method.create_an_iterator_from_read']//*[@class='tooltip']" +assert-count: ("//*[@class='tooltip popover']", 1) click: "#help-button a" -assert-count: ("//*[@class='notable popover']", 0) -assert-false: "#method\.create_an_iterator_from_read .notable-traits:focus" +assert-count: ("//*[@class='tooltip popover']", 0) +assert-false: "#method\.create_an_iterator_from_read .tooltip:focus" diff --git a/tests/rustdoc/codeblock-title.rs b/tests/rustdoc/codeblock-title.rs index b9b0b0d1abf99..761afb8bd08d0 100644 --- a/tests/rustdoc/codeblock-title.rs +++ b/tests/rustdoc/codeblock-title.rs @@ -3,7 +3,7 @@ // @has foo/fn.bar.html '//*[@class="example-wrap compile_fail"]/*[@class="tooltip"]' "ⓘ" // @has foo/fn.bar.html '//*[@class="example-wrap ignore"]/*[@class="tooltip"]' "ⓘ" // @has foo/fn.bar.html '//*[@class="example-wrap should_panic"]/*[@class="tooltip"]' "ⓘ" -// @has foo/fn.bar.html '//*[@data-edition="2018"]' "ⓘ" +// @has foo/fn.bar.html '//*[@title="This example runs with edition 2018"]' "ⓘ" /// foo /// diff --git a/tests/rustdoc/doc-notable_trait.rs b/tests/rustdoc/doc-notable_trait.rs index 279faf5540140..d8941769fa67a 100644 --- a/tests/rustdoc/doc-notable_trait.rs +++ b/tests/rustdoc/doc-notable_trait.rs @@ -9,7 +9,7 @@ impl SomeTrait for Wrapper {} #[doc(notable_trait)] pub trait SomeTrait { // @has doc_notable_trait/trait.SomeTrait.html - // @has - '//a[@class="notable-traits"]/@data-ty' 'Wrapper' + // @has - '//a[@class="tooltip"]/@data-notable-ty' 'Wrapper' // @snapshot wrap-me - '//script[@id="notable-traits-data"]' fn wrap_me(self) -> Wrapper where Self: Sized { Wrapper { @@ -23,7 +23,7 @@ impl SomeTrait for SomeStruct {} impl SomeStruct { // @has doc_notable_trait/struct.SomeStruct.html - // @has - '//a[@class="notable-traits"]/@data-ty' 'SomeStruct' + // @has - '//a[@class="tooltip"]/@data-notable-ty' 'SomeStruct' // @snapshot some-struct-new - '//script[@id="notable-traits-data"]' pub fn new() -> SomeStruct { SomeStruct @@ -31,7 +31,7 @@ impl SomeStruct { } // @has doc_notable_trait/fn.bare_fn.html -// @has - '//a[@class="notable-traits"]/@data-ty' 'SomeStruct' +// @has - '//a[@class="tooltip"]/@data-notable-ty' 'SomeStruct' // @snapshot bare-fn - '//script[@id="notable-traits-data"]' pub fn bare_fn() -> SomeStruct { SomeStruct diff --git a/tests/rustdoc/spotlight-from-dependency.rs b/tests/rustdoc/spotlight-from-dependency.rs index 090ad187d9cc0..426759c7bf8a2 100644 --- a/tests/rustdoc/spotlight-from-dependency.rs +++ b/tests/rustdoc/spotlight-from-dependency.rs @@ -3,7 +3,7 @@ use std::iter::Iterator; // @has foo/struct.Odd.html -// @has - '//*[@id="method.new"]//a[@class="notable-traits"]/@data-ty' 'Odd' +// @has - '//*[@id="method.new"]//a[@class="tooltip"]/@data-notable-ty' 'Odd' // @snapshot odd - '//script[@id="notable-traits-data"]' pub struct Odd { current: usize, From e993072661a578189a41405b03da8127c4902e2c Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Fri, 27 Jan 2023 19:17:23 +0100 Subject: [PATCH 170/501] Provide signature help when typing record literal --- crates/ide/src/signature_help.rs | 180 +++++++++++++++++++++++++++++-- 1 file changed, 173 insertions(+), 7 deletions(-) diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index f807ba30f40a3..d249bc4728efc 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -4,13 +4,15 @@ use std::collections::BTreeSet; use either::Either; -use hir::{AssocItem, GenericParam, HasAttrs, HirDisplay, Semantics, Trait}; -use ide_db::{active_parameter::callable_for_node, base_db::FilePosition}; +use hir::{ + AssocItem, GenericParam, HasAttrs, HirDisplay, ModuleDef, PathResolution, Semantics, Trait, +}; +use ide_db::{active_parameter::callable_for_node, base_db::FilePosition, FxIndexMap}; use stdx::format_to; use syntax::{ algo, ast::{self, HasArgList}, - match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize, + match_ast, AstNode, Direction, SyntaxKind, SyntaxToken, TextRange, TextSize, }; use crate::RootDatabase; @@ -37,14 +39,18 @@ impl SignatureHelp { } fn push_call_param(&mut self, param: &str) { - self.push_param('(', param); + self.push_param("(", param); } fn push_generic_param(&mut self, param: &str) { - self.push_param('<', param); + self.push_param("<", param); + } + + fn push_record_field(&mut self, param: &str) { + self.push_param("{ ", param); } - fn push_param(&mut self, opening_delim: char, param: &str) { + fn push_param(&mut self, opening_delim: &str, param: &str) { if !self.signature.ends_with(opening_delim) { self.signature.push_str(", "); } @@ -85,6 +91,13 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio } return signature_help_for_generics(&sema, garg_list, token); }, + ast::RecordExpr(record) => { + let cursor_outside = record.record_expr_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token); + if cursor_outside { + continue; + } + return signature_help_for_record_lit(&sema, record, token); + }, _ => (), } } @@ -92,7 +105,9 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio // Stop at multi-line expressions, since the signature of the outer call is not very // helpful inside them. if let Some(expr) = ast::Expr::cast(node.clone()) { - if expr.syntax().text().contains_char('\n') { + if expr.syntax().text().contains_char('\n') + && expr.syntax().kind() != SyntaxKind::RECORD_EXPR + { return None; } } @@ -368,6 +383,81 @@ fn add_assoc_type_bindings( } } +fn signature_help_for_record_lit( + sema: &Semantics<'_, RootDatabase>, + record: ast::RecordExpr, + token: SyntaxToken, +) -> Option { + let arg_list = record + .syntax() + .ancestors() + .filter_map(ast::RecordExpr::cast) + .find(|list| list.syntax().text_range().contains(token.text_range().start()))?; + + let active_parameter = arg_list + .record_expr_field_list()? + .fields() + .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start()) + .count(); + + let mut res = SignatureHelp { + doc: None, + signature: String::new(), + parameters: vec![], + active_parameter: Some(active_parameter), + }; + + let fields; + + let db = sema.db; + match sema.resolve_path(&record.path()?)? { + PathResolution::Def(ModuleDef::Adt(adt)) => match adt { + hir::Adt::Struct(it) => { + fields = it.fields(db); + res.doc = it.docs(db).map(|it| it.into()); + format_to!(res.signature, "struct {} {{ ", it.name(db)); + } + hir::Adt::Union(it) => { + fields = it.fields(db); + res.doc = it.docs(db).map(|it| it.into()); + format_to!(res.signature, "union {} {{ ", it.name(db)); + } + _ => return None, + }, + PathResolution::Def(ModuleDef::Variant(variant)) => { + fields = variant.fields(db); + let en = variant.parent_enum(db); + + res.doc = en.docs(db).map(|it| it.into()); + format_to!(res.signature, "enum {}::{} {{ ", en.name(db), variant.name(db)); + } + _ => return None, + } + + let mut fields = + fields.into_iter().map(|field| (field.name(db), Some(field))).collect::>(); + let mut buf = String::new(); + for field in record.record_expr_field_list()?.fields() { + let Some((field, _, ty)) = sema.resolve_record_field(&field) else { continue }; + let name = field.name(db); + format_to!(buf, "{name}: {}", ty.display_truncated(db, Some(20))); + res.push_record_field(&buf); + buf.clear(); + + if let Some(field) = fields.get_mut(&name) { + *field = None; + } + } + for (name, field) in fields { + let Some(field) = field else { continue }; + format_to!(buf, "{name}: {}", field.ty(db).display_truncated(db, Some(20))); + res.push_record_field(&buf); + buf.clear(); + } + res.signature.push_str(" }"); + Some(res) +} + #[cfg(test)] mod tests { use std::iter; @@ -1405,4 +1495,80 @@ fn take( "#]], ); } + + #[test] + fn record_literal() { + check( + r#" +struct Strukt { + t: T, + u: U, + unit: (), +} +fn f() { + Strukt { + u: 0, + $0 + } +} +"#, + expect![[r#" + struct Strukt { u: i32, t: T, unit: () } + ------ ^^^^ -------- + "#]], + ); + } + + #[test] + fn record_literal_nonexistent_field() { + check( + r#" +struct Strukt { + a: u8, +} +fn f() { + Strukt { + b: 8, + $0 + } +} +"#, + expect![[r#" + struct Strukt { a: u8 } + ----- + "#]], + ); + } + + #[test] + fn tuple_variant_record_literal() { + check( + r#" +enum Opt { + Some(u8), +} +fn f() { + Opt::Some {$0} +} +"#, + expect![[r#" + enum Opt::Some { 0: u8 } + ^^^^^ + "#]], + ); + check( + r#" +enum Opt { + Some(u8), +} +fn f() { + Opt::Some {0:0,$0} +} +"#, + expect![[r#" + enum Opt::Some { 0: u8 } + ----- + "#]], + ); + } } From cad4cb38cdbf033fb5d7a6c726d86a15e2f4e436 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Fri, 27 Jan 2023 19:25:31 +0100 Subject: [PATCH 171/501] Make it work with `Self { .. }` --- crates/ide/src/signature_help.rs | 43 ++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 10 deletions(-) diff --git a/crates/ide/src/signature_help.rs b/crates/ide/src/signature_help.rs index d249bc4728efc..a666562f1010c 100644 --- a/crates/ide/src/signature_help.rs +++ b/crates/ide/src/signature_help.rs @@ -410,8 +410,21 @@ fn signature_help_for_record_lit( let fields; let db = sema.db; - match sema.resolve_path(&record.path()?)? { - PathResolution::Def(ModuleDef::Adt(adt)) => match adt { + let path_res = sema.resolve_path(&record.path()?)?; + if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res { + fields = variant.fields(db); + let en = variant.parent_enum(db); + + res.doc = en.docs(db).map(|it| it.into()); + format_to!(res.signature, "enum {}::{} {{ ", en.name(db), variant.name(db)); + } else { + let adt = match path_res { + PathResolution::SelfType(imp) => imp.self_ty(db).as_adt()?, + PathResolution::Def(ModuleDef::Adt(adt)) => adt, + _ => return None, + }; + + match adt { hir::Adt::Struct(it) => { fields = it.fields(db); res.doc = it.docs(db).map(|it| it.into()); @@ -423,15 +436,7 @@ fn signature_help_for_record_lit( format_to!(res.signature, "union {} {{ ", it.name(db)); } _ => return None, - }, - PathResolution::Def(ModuleDef::Variant(variant)) => { - fields = variant.fields(db); - let en = variant.parent_enum(db); - - res.doc = en.docs(db).map(|it| it.into()); - format_to!(res.signature, "enum {}::{} {{ ", en.name(db), variant.name(db)); } - _ => return None, } let mut fields = @@ -1571,4 +1576,22 @@ fn f() { "#]], ); } + + #[test] + fn record_literal_self() { + check( + r#" +struct S { t: u8 } +impl S { + fn new() -> Self { + Self { $0 } + } +} + "#, + expect![[r#" + struct S { t: u8 } + ^^^^^ + "#]], + ); + } } From 3ba9b13490b88a902ae74056a86aeaab038a6a40 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 27 Jan 2023 18:44:19 +0000 Subject: [PATCH 172/501] Don't download abi-cafe and simple-raytracer in ./y.rs prepare Instead download them on the fly --- build_system/abi_cafe.rs | 8 +++++--- build_system/bench.rs | 13 ++++++++++--- build_system/prepare.rs | 18 +++++++----------- build_system/tests.rs | 5 ----- build_system/utils.rs | 12 ++++++++++-- config.txt | 1 - 6 files changed, 32 insertions(+), 25 deletions(-) diff --git a/build_system/abi_cafe.rs b/build_system/abi_cafe.rs index dbee9be04eea6..0da27f529b3ec 100644 --- a/build_system/abi_cafe.rs +++ b/build_system/abi_cafe.rs @@ -6,11 +6,10 @@ use super::prepare::GitRepo; use super::utils::{spawn_and_wait, CargoProject, Compiler}; use super::SysrootKind; -pub(crate) static ABI_CAFE_REPO: GitRepo = +static ABI_CAFE_REPO: GitRepo = GitRepo::github("Gankra", "abi-cafe", "4c6dc8c9c687e2b3a760ff2176ce236872b37212", "abi-cafe"); -pub(crate) static ABI_CAFE: CargoProject = - CargoProject::new(&ABI_CAFE_REPO.source_dir(), "abi_cafe"); +static ABI_CAFE: CargoProject = CargoProject::new(&ABI_CAFE_REPO.source_dir(), "abi_cafe"); pub(crate) fn run( channel: &str, @@ -19,6 +18,9 @@ pub(crate) fn run( cg_clif_dylib: &Path, bootstrap_host_compiler: &Compiler, ) { + ABI_CAFE_REPO.fetch(dirs); + spawn_and_wait(ABI_CAFE.fetch("cargo", &bootstrap_host_compiler.rustc, dirs)); + eprintln!("Building sysroot for abi-cafe"); build_sysroot::build_sysroot( dirs, diff --git a/build_system/bench.rs b/build_system/bench.rs index 01d44dafbdd17..f48f7bece0282 100644 --- a/build_system/bench.rs +++ b/build_system/bench.rs @@ -7,7 +7,7 @@ use super::prepare::GitRepo; use super::rustc_info::get_file_name; use super::utils::{hyperfine_command, is_ci, spawn_and_wait, CargoProject, Compiler}; -pub(crate) static SIMPLE_RAYTRACER_REPO: GitRepo = GitRepo::github( +static SIMPLE_RAYTRACER_REPO: GitRepo = GitRepo::github( "ebobby", "simple-raytracer", "804a7a21b9e673a482797aa289a18ed480e4d813", @@ -15,10 +15,10 @@ pub(crate) static SIMPLE_RAYTRACER_REPO: GitRepo = GitRepo::github( ); // Use a separate target dir for the initial LLVM build to reduce unnecessary recompiles -pub(crate) static SIMPLE_RAYTRACER_LLVM: CargoProject = +static SIMPLE_RAYTRACER_LLVM: CargoProject = CargoProject::new(&SIMPLE_RAYTRACER_REPO.source_dir(), "simple_raytracer_llvm"); -pub(crate) static SIMPLE_RAYTRACER: CargoProject = +static SIMPLE_RAYTRACER: CargoProject = CargoProject::new(&SIMPLE_RAYTRACER_REPO.source_dir(), "simple_raytracer"); pub(crate) fn benchmark(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { @@ -32,6 +32,13 @@ fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { std::process::exit(1); } + SIMPLE_RAYTRACER_REPO.fetch(dirs); + spawn_and_wait(SIMPLE_RAYTRACER.fetch( + &bootstrap_host_compiler.cargo, + &bootstrap_host_compiler.rustc, + dirs, + )); + eprintln!("[LLVM BUILD] simple-raytracer"); let build_cmd = SIMPLE_RAYTRACER_LLVM.build(bootstrap_host_compiler, dirs); spawn_and_wait(build_cmd); diff --git a/build_system/prepare.rs b/build_system/prepare.rs index f25a81dc23459..50b1b7836dee1 100644 --- a/build_system/prepare.rs +++ b/build_system/prepare.rs @@ -11,22 +11,18 @@ use super::utils::{copy_dir_recursively, git_command, retry_spawn_and_wait, spaw pub(crate) fn prepare(dirs: &Dirs) { RelPath::DOWNLOAD.ensure_fresh(dirs); - spawn_and_wait(super::build_backend::CG_CLIF.fetch("cargo", dirs)); + spawn_and_wait(super::build_backend::CG_CLIF.fetch("cargo", "rustc", dirs)); prepare_sysroot(dirs); - spawn_and_wait(super::build_sysroot::STANDARD_LIBRARY.fetch("cargo", dirs)); - spawn_and_wait(super::tests::LIBCORE_TESTS.fetch("cargo", dirs)); + spawn_and_wait(super::build_sysroot::STANDARD_LIBRARY.fetch("cargo", "rustc", dirs)); + spawn_and_wait(super::tests::LIBCORE_TESTS.fetch("cargo", "rustc", dirs)); - super::abi_cafe::ABI_CAFE_REPO.fetch(dirs); - spawn_and_wait(super::abi_cafe::ABI_CAFE.fetch("cargo", dirs)); super::tests::RAND_REPO.fetch(dirs); - spawn_and_wait(super::tests::RAND.fetch("cargo", dirs)); + spawn_and_wait(super::tests::RAND.fetch("cargo", "rustc", dirs)); super::tests::REGEX_REPO.fetch(dirs); - spawn_and_wait(super::tests::REGEX.fetch("cargo", dirs)); + spawn_and_wait(super::tests::REGEX.fetch("cargo", "rustc", dirs)); super::tests::PORTABLE_SIMD_REPO.fetch(dirs); - spawn_and_wait(super::tests::PORTABLE_SIMD.fetch("cargo", dirs)); - super::bench::SIMPLE_RAYTRACER_REPO.fetch(dirs); - spawn_and_wait(super::bench::SIMPLE_RAYTRACER.fetch("cargo", dirs)); + spawn_and_wait(super::tests::PORTABLE_SIMD.fetch("cargo", "rustc", dirs)); } fn prepare_sysroot(dirs: &Dirs) { @@ -80,7 +76,7 @@ impl GitRepo { } } - fn fetch(&self, dirs: &Dirs) { + pub(crate) fn fetch(&self, dirs: &Dirs) { match self.url { GitRepoUrl::Github { user, repo } => { clone_repo_shallow_github( diff --git a/build_system/tests.rs b/build_system/tests.rs index dcfadd737566e..e9486888f86a4 100644 --- a/build_system/tests.rs +++ b/build_system/tests.rs @@ -1,4 +1,3 @@ -use super::bench::SIMPLE_RAYTRACER; use super::build_sysroot::{self, SYSROOT_SRC}; use super::config; use super::path::{Dirs, RelPath}; @@ -134,10 +133,6 @@ const EXTENDED_SYSROOT_SUITE: &[TestCase] = &[ spawn_and_wait(build_cmd); } }), - TestCase::custom("test.simple-raytracer", &|runner| { - SIMPLE_RAYTRACER.clean(&runner.dirs); - spawn_and_wait(SIMPLE_RAYTRACER.build(&runner.target_compiler, &runner.dirs)); - }), TestCase::custom("test.libcore", &|runner| { LIBCORE_TESTS.clean(&runner.dirs); diff --git a/build_system/utils.rs b/build_system/utils.rs index da2a94a0a4ff8..bdff1abeb279a 100644 --- a/build_system/utils.rs +++ b/build_system/utils.rs @@ -121,10 +121,18 @@ impl CargoProject { } #[must_use] - pub(crate) fn fetch(&self, cargo: impl AsRef, dirs: &Dirs) -> Command { + pub(crate) fn fetch( + &self, + cargo: impl AsRef, + rustc: impl AsRef, + dirs: &Dirs, + ) -> Command { let mut cmd = Command::new(cargo.as_ref()); - cmd.arg("fetch").arg("--manifest-path").arg(self.manifest_path(dirs)); + cmd.env("RUSTC", rustc.as_ref()) + .arg("fetch") + .arg("--manifest-path") + .arg(self.manifest_path(dirs)); cmd } diff --git a/config.txt b/config.txt index d49cc90791a5d..d6e3924a24d64 100644 --- a/config.txt +++ b/config.txt @@ -44,7 +44,6 @@ aot.issue-72793 testsuite.extended_sysroot test.rust-random/rand -test.simple-raytracer test.libcore test.regex-shootout-regex-dna test.regex From ba6d3e7e9d0c2b2cad77d7da2f2d6b4158ab2ad0 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 1 Oct 2022 14:56:24 +0200 Subject: [PATCH 173/501] Introduce GeneratorWitnessMIR. --- clippy_lints/src/dereference.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/clippy_lints/src/dereference.rs b/clippy_lints/src/dereference.rs index fa3e5aa6b7213..8e921839e8b2f 100644 --- a/clippy_lints/src/dereference.rs +++ b/clippy_lints/src/dereference.rs @@ -1419,6 +1419,7 @@ fn ty_auto_deref_stability<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, precedenc | ty::FnDef(..) | ty::Generator(..) | ty::GeneratorWitness(..) + | ty::GeneratorWitnessMIR(..) | ty::Closure(..) | ty::Never | ty::Tuple(_) From 5c7a65251a178d878995b5a9cb1e6eeac7506af2 Mon Sep 17 00:00:00 2001 From: Philipp Krones Date: Fri, 27 Jan 2023 21:09:08 +0100 Subject: [PATCH 174/501] Merge commit '1480cea393d0cee195e59949eabdfbcf1230f7f9' into clippyup --- .github/workflows/clippy_bors.yml | 5 + CHANGELOG.md | 199 ++++++- Cargo.toml | 2 +- README.md | 16 +- book/src/SUMMARY.md | 1 + book/src/configuration.md | 16 +- book/src/development/adding_lints.md | 7 +- book/src/development/infrastructure/book.md | 14 +- .../infrastructure/changelog_update.md | 16 +- book/src/lint_configuration.md | 523 ++++++++++++++++++ clippy_lints/Cargo.toml | 2 +- clippy_lints/src/bool_assert_comparison.rs | 53 +- .../src/casts/cast_possible_truncation.rs | 32 +- clippy_lints/src/casts/mod.rs | 20 +- clippy_lints/src/declared_lints.rs | 1 + clippy_lints/src/doc.rs | 2 +- clippy_lints/src/enum_variants.rs | 4 +- clippy_lints/src/from_raw_with_void_ptr.rs | 2 +- clippy_lints/src/instant_subtraction.rs | 2 +- clippy_lints/src/let_underscore.rs | 2 +- clippy_lints/src/lib.rs | 2 + clippy_lints/src/manual_is_ascii_check.rs | 2 +- clippy_lints/src/methods/mod.rs | 4 +- clippy_lints/src/missing_trait_methods.rs | 2 +- .../src/multiple_unsafe_ops_per_block.rs | 185 +++++++ clippy_lints/src/only_used_in_recursion.rs | 4 +- clippy_lints/src/returns.rs | 26 +- .../src/suspicious_xor_used_as_pow.rs | 4 +- clippy_lints/src/transmute/mod.rs | 7 +- .../transmutes_expressible_as_ptr_casts.rs | 58 +- clippy_lints/src/transmute/utils.rs | 24 +- .../src/undocumented_unsafe_blocks.rs | 12 + clippy_lints/src/utils/conf.rs | 3 +- .../internal_lints/metadata_collector.rs | 79 ++- clippy_utils/Cargo.toml | 2 +- clippy_utils/src/lib.rs | 4 + clippy_utils/src/ty.rs | 9 +- declare_clippy_lint/Cargo.toml | 2 +- rust-toolchain | 2 +- src/main.rs | 2 +- ...unnecessary_def_path_hardcoded_path.stderr | 16 +- tests/ui/bool_assert_comparison.fixed | 161 ++++++ tests/ui/bool_assert_comparison.rs | 43 +- tests/ui/bool_assert_comparison.stderr | 257 +++++++-- tests/ui/cast.rs | 1 + tests/ui/cast.stderr | 163 +++++- tests/ui/cast_size.stderr | 53 ++ tests/ui/module_name_repetitions.stderr | 20 +- tests/ui/multiple_unsafe_ops_per_block.rs | 110 ++++ tests/ui/multiple_unsafe_ops_per_block.stderr | 129 +++++ tests/ui/needless_return.fixed | 10 + tests/ui/needless_return.rs | 10 + tests/ui/needless_return.stderr | 110 ++-- .../transmutes_expressible_as_ptr_casts.fixed | 2 + .../ui/transmutes_expressible_as_ptr_casts.rs | 2 + ...transmutes_expressible_as_ptr_casts.stderr | 10 +- tests/ui/unnecessary_safety_comment.rs | 17 + 57 files changed, 2212 insertions(+), 254 deletions(-) create mode 100644 book/src/lint_configuration.md create mode 100644 clippy_lints/src/multiple_unsafe_ops_per_block.rs create mode 100644 tests/ui/bool_assert_comparison.fixed create mode 100644 tests/ui/multiple_unsafe_ops_per_block.rs create mode 100644 tests/ui/multiple_unsafe_ops_per_block.stderr diff --git a/.github/workflows/clippy_bors.yml b/.github/workflows/clippy_bors.yml index 1bc457a947936..24e677ce8e170 100644 --- a/.github/workflows/clippy_bors.yml +++ b/.github/workflows/clippy_bors.yml @@ -157,6 +157,11 @@ jobs: - name: Test metadata collection run: cargo collect-metadata + - name: Test lint_configuration.md is up-to-date + run: | + echo "run \`cargo collect-metadata\` if this fails" + git update-index --refresh + integration_build: needs: changelog runs-on: ubuntu-latest diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e31e8f0d9815..e2cde09776f4c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,11 +6,204 @@ document. ## Unreleased / Beta / In Rust Nightly -[4f142aa1...master](https://github.com/rust-lang/rust-clippy/compare/4f142aa1...master) +[d822110d...master](https://github.com/rust-lang/rust-clippy/compare/d822110d...master) + +## Rust 1.67 + +Current stable, released 2023-01-26 + +[4f142aa1...d822110d](https://github.com/rust-lang/rust-clippy/compare/4f142aa1...d822110d) + +### New Lints + +* [`seek_from_current`] + [#9681](https://github.com/rust-lang/rust-clippy/pull/9681) +* [`from_raw_with_void_ptr`] + [#9690](https://github.com/rust-lang/rust-clippy/pull/9690) +* [`misnamed_getters`] + [#9770](https://github.com/rust-lang/rust-clippy/pull/9770) +* [`seek_to_start_instead_of_rewind`] + [#9667](https://github.com/rust-lang/rust-clippy/pull/9667) +* [`suspicious_xor_used_as_pow`] + [#9506](https://github.com/rust-lang/rust-clippy/pull/9506) +* [`unnecessary_safety_doc`] + [#9822](https://github.com/rust-lang/rust-clippy/pull/9822) +* [`unchecked_duration_subtraction`] + [#9570](https://github.com/rust-lang/rust-clippy/pull/9570) +* [`manual_is_ascii_check`] + [#9765](https://github.com/rust-lang/rust-clippy/pull/9765) +* [`unnecessary_safety_comment`] + [#9851](https://github.com/rust-lang/rust-clippy/pull/9851) +* [`let_underscore_future`] + [#9760](https://github.com/rust-lang/rust-clippy/pull/9760) +* [`manual_let_else`] + [#8437](https://github.com/rust-lang/rust-clippy/pull/8437) + +### Moves and Deprecations + +* Moved [`uninlined_format_args`] to `style` (Now warn-by-default) + [#9865](https://github.com/rust-lang/rust-clippy/pull/9865) +* Moved [`needless_collect`] to `nursery` (Now allow-by-default) + [#9705](https://github.com/rust-lang/rust-clippy/pull/9705) +* Moved [`or_fun_call`] to `nursery` (Now allow-by-default) + [#9829](https://github.com/rust-lang/rust-clippy/pull/9829) +* Uplifted [`let_underscore_lock`] into rustc + [#9697](https://github.com/rust-lang/rust-clippy/pull/9697) +* Uplifted [`let_underscore_drop`] into rustc + [#9697](https://github.com/rust-lang/rust-clippy/pull/9697) +* Moved [`bool_to_int_with_if`] to `pedantic` (Now allow-by-default) + [#9830](https://github.com/rust-lang/rust-clippy/pull/9830) +* Move `index_refutable_slice` to `pedantic` (Now warn-by-default) + [#9975](https://github.com/rust-lang/rust-clippy/pull/9975) +* Moved [`manual_clamp`] to `nursery` (Now allow-by-default) + [#10101](https://github.com/rust-lang/rust-clippy/pull/10101) + +### Enhancements + +* The scope of `#![clippy::msrv]` is now tracked correctly + [#9924](https://github.com/rust-lang/rust-clippy/pull/9924) +* `#[clippy::msrv]` can now be used as an outer attribute + [#9860](https://github.com/rust-lang/rust-clippy/pull/9860) +* Clippy will now avoid Cargo's cache, if `Cargo.toml` or `clippy.toml` have changed + [#9707](https://github.com/rust-lang/rust-clippy/pull/9707) +* [`uninlined_format_args`]: Added a new config `allow-mixed-uninlined-format-args` to allow the + lint, if only some arguments can be inlined + [#9865](https://github.com/rust-lang/rust-clippy/pull/9865) +* [`needless_lifetimes`]: Now provides suggests for individual lifetimes + [#9743](https://github.com/rust-lang/rust-clippy/pull/9743) +* [`needless_collect`]: Now detects needless `is_empty` and `contains` calls + [#8744](https://github.com/rust-lang/rust-clippy/pull/8744) +* [`blanket_clippy_restriction_lints`]: Now lints, if `clippy::restriction` is enabled via the + command line arguments + [#9755](https://github.com/rust-lang/rust-clippy/pull/9755) +* [`mutable_key_type`]: Now has the `ignore-interior-mutability` configuration, to add types which + should be ignored by the lint + [#9692](https://github.com/rust-lang/rust-clippy/pull/9692) +* [`uninlined_format_args`]: Now works for multiline `format!` expressions + [#9945](https://github.com/rust-lang/rust-clippy/pull/9945) +* [`cognitive_complexity`]: Now works for async functions + [#9828](https://github.com/rust-lang/rust-clippy/pull/9828) + [#9836](https://github.com/rust-lang/rust-clippy/pull/9836) +* [`vec_box`]: Now avoids an off-by-one error when using the `vec-box-size-threshold` configuration + [#9848](https://github.com/rust-lang/rust-clippy/pull/9848) +* [`never_loop`]: Now correctly handles breaks in nested labeled blocks + [#9858](https://github.com/rust-lang/rust-clippy/pull/9858) + [#9837](https://github.com/rust-lang/rust-clippy/pull/9837) +* [`disallowed_methods`], [`disallowed_types`], [`disallowed_macros`]: Now correctly resolve + paths, if a crate is used multiple times with different versions + [#9800](https://github.com/rust-lang/rust-clippy/pull/9800) +* [`disallowed_methods`]: Can now be used for local methods + [#9800](https://github.com/rust-lang/rust-clippy/pull/9800) +* [`print_stdout`], [`print_stderr`]: Can now be enabled in test with the `allow-print-in-tests` + config value + [#9797](https://github.com/rust-lang/rust-clippy/pull/9797) +* [`from_raw_with_void_ptr`]: Now works for `Rc`, `Arc`, `alloc::rc::Weak` and + `alloc::sync::Weak` types. + [#9700](https://github.com/rust-lang/rust-clippy/pull/9700) +* [`needless_borrowed_reference`]: Now works for struct and tuple patterns with wildcards + [#9855](https://github.com/rust-lang/rust-clippy/pull/9855) +* [`or_fun_call`]: Now supports `map_or` methods + [#9689](https://github.com/rust-lang/rust-clippy/pull/9689) +* [`unwrap_used`], [`expect_used`]: No longer lints in test code + [#9686](https://github.com/rust-lang/rust-clippy/pull/9686) +* [`fn_params_excessive_bools`]: Is now emitted with the lint level at the linted function + [#9698](https://github.com/rust-lang/rust-clippy/pull/9698) + +### False Positive Fixes + +* [`new_ret_no_self`]: No longer lints when `impl Trait` is returned + [#9733](https://github.com/rust-lang/rust-clippy/pull/9733) +* [`unnecessary_lazy_evaluations`]: No longer lints, if the type has a significant drop + [#9750](https://github.com/rust-lang/rust-clippy/pull/9750) +* [`option_if_let_else`]: No longer lints, if any arm has guard + [#9747](https://github.com/rust-lang/rust-clippy/pull/9747) +* [`explicit_auto_deref`]: No longer lints, if the target type is a projection with generic + arguments + [#9813](https://github.com/rust-lang/rust-clippy/pull/9813) +* [`unnecessary_to_owned`]: No longer lints, if the suggestion effects types + [#9796](https://github.com/rust-lang/rust-clippy/pull/9796) +* [`needless_borrow`]: No longer lints, if the suggestion is affected by `Deref` + [#9674](https://github.com/rust-lang/rust-clippy/pull/9674) +* [`unused_unit`]: No longer lints, if lifetimes are bound to the return type + [#9849](https://github.com/rust-lang/rust-clippy/pull/9849) +* [`mut_mut`]: No longer lints cases with unsized mutable references + [#9835](https://github.com/rust-lang/rust-clippy/pull/9835) +* [`bool_to_int_with_if`]: No longer lints in const context + [#9738](https://github.com/rust-lang/rust-clippy/pull/9738) +* [`use_self`]: No longer lints in macros + [#9704](https://github.com/rust-lang/rust-clippy/pull/9704) +* [`unnecessary_operation`]: No longer lints, if multiple macros are involved + [#9981](https://github.com/rust-lang/rust-clippy/pull/9981) +* [`allow_attributes_without_reason`]: No longer lints inside external macros + [#9630](https://github.com/rust-lang/rust-clippy/pull/9630) +* [`question_mark`]: No longer lints for `if let Err()` with an `else` branch + [#9722](https://github.com/rust-lang/rust-clippy/pull/9722) +* [`unnecessary_cast`]: No longer lints if the identifier and cast originate from different macros + [#9980](https://github.com/rust-lang/rust-clippy/pull/9980) +* [`arithmetic_side_effects`]: Now detects operations with associated constants + [#9592](https://github.com/rust-lang/rust-clippy/pull/9592) +* [`explicit_auto_deref`]: No longer lints, if the initial value is not a reference or reference + receiver + [#9997](https://github.com/rust-lang/rust-clippy/pull/9997) +* [`module_name_repetitions`], [`single_component_path_imports`]: Now handle `#[allow]` + attributes correctly + [#9879](https://github.com/rust-lang/rust-clippy/pull/9879) +* [`bool_to_int_with_if`]: No longer lints `if let` statements + [#9714](https://github.com/rust-lang/rust-clippy/pull/9714) +* [`needless_borrow`]: No longer lints, `if`-`else`-statements that require the borrow + [#9791](https://github.com/rust-lang/rust-clippy/pull/9791) +* [`needless_borrow`]: No longer lints borrows, if moves were illegal + [#9711](https://github.com/rust-lang/rust-clippy/pull/9711) +* [`manual_swap`]: No longer lints in const context + [#9871](https://github.com/rust-lang/rust-clippy/pull/9871) + +### Suggestion Fixes/Improvements + +* [`missing_safety_doc`], [`missing_errors_doc`], [`missing_panics_doc`]: No longer show the + entire item in the lint emission. + [#9772](https://github.com/rust-lang/rust-clippy/pull/9772) +* [`needless_lifetimes`]: Only suggests `'_` when it's applicable + [#9743](https://github.com/rust-lang/rust-clippy/pull/9743) +* [`use_self`]: Now suggests full paths correctly + [#9726](https://github.com/rust-lang/rust-clippy/pull/9726) +* [`redundant_closure_call`]: Now correctly deals with macros during suggestion creation + [#9987](https://github.com/rust-lang/rust-clippy/pull/9987) +* [`unnecessary_cast`]: Suggestions now correctly deal with references + [#9996](https://github.com/rust-lang/rust-clippy/pull/9996) +* [`unnecessary_join`]: Suggestions now correctly use [turbofish] operators + [#9779](https://github.com/rust-lang/rust-clippy/pull/9779) +* [`equatable_if_let`]: Can now suggest `matches!` replacements + [#9368](https://github.com/rust-lang/rust-clippy/pull/9368) +* [`string_extend_chars`]: Suggestions now correctly work for `str` slices + [#9741](https://github.com/rust-lang/rust-clippy/pull/9741) +* [`redundant_closure_for_method_calls`]: Suggestions now include angle brackets and generic + arguments if needed + [#9745](https://github.com/rust-lang/rust-clippy/pull/9745) +* [`manual_let_else`]: Suggestions no longer expand macro calls + [#9943](https://github.com/rust-lang/rust-clippy/pull/9943) +* [`infallible_destructuring_match`]: Suggestions now preserve references + [#9850](https://github.com/rust-lang/rust-clippy/pull/9850) +* [`result_large_err`]: The error now shows the largest enum variant + [#9662](https://github.com/rust-lang/rust-clippy/pull/9662) +* [`needless_return`]: Suggestions are now formatted better + [#9967](https://github.com/rust-lang/rust-clippy/pull/9967) +* [`unused_rounding`]: The suggestion now preserves the original float literal notation + [#9870](https://github.com/rust-lang/rust-clippy/pull/9870) + +[turbofish]: https://turbo.fish/::%3CClippy%3E + +### ICE Fixes + +* [`result_large_err`]: Fixed ICE for empty enums + [#10007](https://github.com/rust-lang/rust-clippy/pull/10007) +* [`redundant_allocation`]: Fixed ICE for types with bounded variables + [#9773](https://github.com/rust-lang/rust-clippy/pull/9773) +* [`unused_rounding`]: Fixed ICE, if `_` was used as a separator + [#10001](https://github.com/rust-lang/rust-clippy/pull/10001) ## Rust 1.66 -Current stable, released 2022-12-15 +Released 2022-12-15 [b52fb523...4f142aa1](https://github.com/rust-lang/rust-clippy/compare/b52fb523...4f142aa1) @@ -166,6 +359,7 @@ Current stable, released 2022-12-15 * [`unnecessary_to_owned`]: Avoid ICEs in favor of false negatives if information is missing [#9505](https://github.com/rust-lang/rust-clippy/pull/9505) + [#10027](https://github.com/rust-lang/rust-clippy/pull/10027) * [`manual_range_contains`]: No longer ICEs on values behind references [#9627](https://github.com/rust-lang/rust-clippy/pull/9627) * [`needless_pass_by_value`]: No longer ICEs on unsized `dyn Fn` arguments @@ -4383,6 +4577,7 @@ Released 2018-09-13 [`multi_assignments`]: https://rust-lang.github.io/rust-clippy/master/index.html#multi_assignments [`multiple_crate_versions`]: https://rust-lang.github.io/rust-clippy/master/index.html#multiple_crate_versions [`multiple_inherent_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#multiple_inherent_impl +[`multiple_unsafe_ops_per_block`]: https://rust-lang.github.io/rust-clippy/master/index.html#multiple_unsafe_ops_per_block [`must_use_candidate`]: https://rust-lang.github.io/rust-clippy/master/index.html#must_use_candidate [`must_use_unit`]: https://rust-lang.github.io/rust-clippy/master/index.html#must_use_unit [`mut_from_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#mut_from_ref diff --git a/Cargo.toml b/Cargo.toml index f8cb4b7219c47..2cfb47dd758aa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "clippy" -version = "0.1.68" +version = "0.1.69" description = "A bunch of helpful lints to avoid common pitfalls in Rust" repository = "https://github.com/rust-lang/rust-clippy" readme = "README.md" diff --git a/README.md b/README.md index 81254ba8b8b8f..ab44db694835f 100644 --- a/README.md +++ b/README.md @@ -194,11 +194,21 @@ value` mapping e.g. ```toml avoid-breaking-exported-api = false disallowed-names = ["toto", "tata", "titi"] -cognitive-complexity-threshold = 30 ``` -See the [list of configurable lints](https://rust-lang.github.io/rust-clippy/master/index.html#Configuration), -the lint descriptions contain the names and meanings of these configuration variables. +The [table of configurations](https://doc.rust-lang.org/nightly/clippy/lint_configuration.html) +contains all config values, their default, and a list of lints they affect. +Each [configurable lint](https://rust-lang.github.io/rust-clippy/master/index.html#Configuration) +, also contains information about these values. + +For configurations that are a list type with default values such as +[disallowed-names](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_names), +you can use the unique value `".."` to extend the default values instead of replacing them. + +```toml +# default of disallowed-names is ["foo", "baz", "quux"] +disallowed-names = ["bar", ".."] # -> ["bar", "foo", "baz", "quux"] +``` > **Note** > diff --git a/book/src/SUMMARY.md b/book/src/SUMMARY.md index 1f0b8db28a152..0649f7a631df4 100644 --- a/book/src/SUMMARY.md +++ b/book/src/SUMMARY.md @@ -5,6 +5,7 @@ - [Installation](installation.md) - [Usage](usage.md) - [Configuration](configuration.md) + - [Lint Configuration](lint_configuration.md) - [Clippy's Lints](lints.md) - [Continuous Integration](continuous_integration/README.md) - [GitHub Actions](continuous_integration/github_actions.md) diff --git a/book/src/configuration.md b/book/src/configuration.md index 430ff8b739ae8..87f4a697af9fd 100644 --- a/book/src/configuration.md +++ b/book/src/configuration.md @@ -8,11 +8,21 @@ basic `variable = value` mapping eg. ```toml avoid-breaking-exported-api = false disallowed-names = ["toto", "tata", "titi"] -cognitive-complexity-threshold = 30 ``` -See the [list of configurable lints](https://rust-lang.github.io/rust-clippy/master/index.html#Configuration), -the lint descriptions contain the names and meanings of these configuration variables. +The [table of configurations](./lint_configuration.md) +contains all config values, their default, and a list of lints they affect. +Each [configurable lint](https://rust-lang.github.io/rust-clippy/master/index.html#Configuration) +, also contains information about these values. + +For configurations that are a list type with default values such as +[disallowed-names](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_names), +you can use the unique value `".."` to extend the default values instead of replacing them. + +```toml +# default of disallowed-names is ["foo", "baz", "quux"] +disallowed-names = ["bar", ".."] # -> ["bar", "foo", "baz", "quux"] +``` To deactivate the "for further information visit *lint-link*" message you can define the `CLIPPY_DISABLE_DOCS_LINKS` environment variable. diff --git a/book/src/development/adding_lints.md b/book/src/development/adding_lints.md index 8b4eee8c9d94d..f57dc627dce4c 100644 --- a/book/src/development/adding_lints.md +++ b/book/src/development/adding_lints.md @@ -146,7 +146,8 @@ For cargo lints, the process of testing differs in that we are interested in the manifest. If our new lint is named e.g. `foo_categories`, after running `cargo dev -new_lint` we will find by default two new crates, each with its manifest file: +new_lint --name=foo_categories --type=cargo --category=cargo` we will find by +default two new crates, each with its manifest file: * `tests/ui-cargo/foo_categories/fail/Cargo.toml`: this file should cause the new lint to raise an error. @@ -699,6 +700,10 @@ for some users. Adding a configuration is done in the following steps: `clippy.toml` file with the configuration value and a rust file that should be linted by Clippy. The test can otherwise be written as usual. +5. Update [Lint Configuration](../lint_configuration.md) + + Run `cargo collect-metadata` to generate documentation changes for the book. + [`clippy_lints::utils::conf`]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/utils/conf.rs [`clippy_lints` lib file]: https://github.com/rust-lang/rust-clippy/blob/master/clippy_lints/src/lib.rs [`tests/ui`]: https://github.com/rust-lang/rust-clippy/blob/master/tests/ui diff --git a/book/src/development/infrastructure/book.md b/book/src/development/infrastructure/book.md index a48742191850b..dbd624ecd7382 100644 --- a/book/src/development/infrastructure/book.md +++ b/book/src/development/infrastructure/book.md @@ -3,15 +3,15 @@ This document explains how to make additions and changes to the Clippy book, the guide to Clippy that you're reading right now. The Clippy book is formatted with [Markdown](https://www.markdownguide.org) and generated by -[mdbook](https://github.com/rust-lang/mdBook). +[mdBook](https://github.com/rust-lang/mdBook). -- [Get mdbook](#get-mdbook) +- [Get mdBook](#get-mdbook) - [Make changes](#make-changes) -## Get mdbook +## Get mdBook While not strictly necessary since the book source is simply Markdown text -files, having mdbook locally will allow you to build, test and serve the book +files, having mdBook locally will allow you to build, test and serve the book locally to view changes before you commit them to the repository. You likely already have `cargo` installed, so the easiest option is to simply: @@ -19,7 +19,7 @@ already have `cargo` installed, so the easiest option is to simply: cargo install mdbook ``` -See the mdbook [installation](https://github.com/rust-lang/mdBook#installation) +See the mdBook [installation](https://github.com/rust-lang/mdBook#installation) instructions for other options. ## Make changes @@ -27,7 +27,7 @@ instructions for other options. The book's [src](https://github.com/rust-lang/rust-clippy/tree/master/book/src) directory contains all of the markdown files used to generate the book. If you -want to see your changes in real time, you can use the mdbook `serve` command to +want to see your changes in real time, you can use the mdBook `serve` command to run a web server locally that will automatically update changes as they are made. From the top level of your `rust-clippy` directory: @@ -38,5 +38,5 @@ mdbook serve book --open Then navigate to `http://localhost:3000` to see the generated book. While the server is running, changes you make will automatically be updated. -For more information, see the mdbook +For more information, see the mdBook [guide](https://rust-lang.github.io/mdBook/). diff --git a/book/src/development/infrastructure/changelog_update.md b/book/src/development/infrastructure/changelog_update.md index 80a47affe30d0..d1ac7237b5e35 100644 --- a/book/src/development/infrastructure/changelog_update.md +++ b/book/src/development/infrastructure/changelog_update.md @@ -95,11 +95,23 @@ As section headers, we use: Please also be sure to update the Beta/Unreleased sections at the top with the relevant commit ranges. -If you have the time, it would be appreciated if you double-check, that the -`#[clippy::version]` attributes for the added lints contains the correct version. +#### 3.1 Include `beta-accepted` PRs + +Look for the [`beta-accepted`] label and make sure to also include the PRs with +that label in the changelog. If you can, remove the `beta-accepted` labels +**after** the changelog PR was merged. + +> _Note:_ Some of those PRs might even got backported to the previous `beta`. +> Those have to be included in the changelog of the _previous_ release. + +### 4. Update `clippy::version` attributes + +Next, make sure to check that the `#[clippy::version]` attributes for the added +lints contain the correct version. [changelog]: https://github.com/rust-lang/rust-clippy/blob/master/CHANGELOG.md [forge]: https://forge.rust-lang.org/ [rust_master_tools]: https://github.com/rust-lang/rust/tree/master/src/tools/clippy [rust_beta_tools]: https://github.com/rust-lang/rust/tree/beta/src/tools/clippy [rust_stable_tools]: https://github.com/rust-lang/rust/releases +[`beta-accepted`]: https://github.com/rust-lang/rust-clippy/issues?q=label%3Abeta-accepted+ diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md new file mode 100644 index 0000000000000..f79dbb50ff490 --- /dev/null +++ b/book/src/lint_configuration.md @@ -0,0 +1,523 @@ + + +## Lint Configuration Options +|
Option
| Default Value | +|--|--| +| [arithmetic-side-effects-allowed](#arithmetic-side-effects-allowed) | `{}` | +| [arithmetic-side-effects-allowed-binary](#arithmetic-side-effects-allowed-binary) | `[]` | +| [arithmetic-side-effects-allowed-unary](#arithmetic-side-effects-allowed-unary) | `{}` | +| [avoid-breaking-exported-api](#avoid-breaking-exported-api) | `true` | +| [msrv](#msrv) | `None` | +| [cognitive-complexity-threshold](#cognitive-complexity-threshold) | `25` | +| [disallowed-names](#disallowed-names) | `["foo", "baz", "quux"]` | +| [doc-valid-idents](#doc-valid-idents) | `["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "DirectX", "ECMAScript", "GPLv2", "GPLv3", "GitHub", "GitLab", "IPv4", "IPv6", "ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript", "NaN", "NaNs", "OAuth", "GraphQL", "OCaml", "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenDNS", "WebGL", "TensorFlow", "TrueType", "iOS", "macOS", "FreeBSD", "TeX", "LaTeX", "BibTeX", "BibLaTeX", "MinGW", "CamelCase"]` | +| [too-many-arguments-threshold](#too-many-arguments-threshold) | `7` | +| [type-complexity-threshold](#type-complexity-threshold) | `250` | +| [single-char-binding-names-threshold](#single-char-binding-names-threshold) | `4` | +| [too-large-for-stack](#too-large-for-stack) | `200` | +| [enum-variant-name-threshold](#enum-variant-name-threshold) | `3` | +| [enum-variant-size-threshold](#enum-variant-size-threshold) | `200` | +| [verbose-bit-mask-threshold](#verbose-bit-mask-threshold) | `1` | +| [literal-representation-threshold](#literal-representation-threshold) | `16384` | +| [trivial-copy-size-limit](#trivial-copy-size-limit) | `None` | +| [pass-by-value-size-limit](#pass-by-value-size-limit) | `256` | +| [too-many-lines-threshold](#too-many-lines-threshold) | `100` | +| [array-size-threshold](#array-size-threshold) | `512000` | +| [vec-box-size-threshold](#vec-box-size-threshold) | `4096` | +| [max-trait-bounds](#max-trait-bounds) | `3` | +| [max-struct-bools](#max-struct-bools) | `3` | +| [max-fn-params-bools](#max-fn-params-bools) | `3` | +| [warn-on-all-wildcard-imports](#warn-on-all-wildcard-imports) | `false` | +| [disallowed-macros](#disallowed-macros) | `[]` | +| [disallowed-methods](#disallowed-methods) | `[]` | +| [disallowed-types](#disallowed-types) | `[]` | +| [unreadable-literal-lint-fractions](#unreadable-literal-lint-fractions) | `true` | +| [upper-case-acronyms-aggressive](#upper-case-acronyms-aggressive) | `false` | +| [matches-for-let-else](#matches-for-let-else) | `WellKnownTypes` | +| [cargo-ignore-publish](#cargo-ignore-publish) | `false` | +| [standard-macro-braces](#standard-macro-braces) | `[]` | +| [enforced-import-renames](#enforced-import-renames) | `[]` | +| [allowed-scripts](#allowed-scripts) | `["Latin"]` | +| [enable-raw-pointer-heuristic-for-send](#enable-raw-pointer-heuristic-for-send) | `true` | +| [max-suggested-slice-pattern-length](#max-suggested-slice-pattern-length) | `3` | +| [max-include-file-size](#max-include-file-size) | `1000000` | +| [allow-expect-in-tests](#allow-expect-in-tests) | `false` | +| [allow-unwrap-in-tests](#allow-unwrap-in-tests) | `false` | +| [allow-dbg-in-tests](#allow-dbg-in-tests) | `false` | +| [allow-print-in-tests](#allow-print-in-tests) | `false` | +| [large-error-threshold](#large-error-threshold) | `128` | +| [ignore-interior-mutability](#ignore-interior-mutability) | `["bytes::Bytes"]` | +| [allow-mixed-uninlined-format-args](#allow-mixed-uninlined-format-args) | `true` | +| [suppress-restriction-lint-in-const](#suppress-restriction-lint-in-const) | `false` | + +### arithmetic-side-effects-allowed +Suppress checking of the passed type names in all types of operations. + +If a specific operation is desired, consider using `arithmetic_side_effects_allowed_binary` or `arithmetic_side_effects_allowed_unary` instead. + +#### Example + +```toml +arithmetic-side-effects-allowed = ["SomeType", "AnotherType"] +``` + +#### Noteworthy + +A type, say `SomeType`, listed in this configuration has the same behavior of +`["SomeType" , "*"], ["*", "SomeType"]` in `arithmetic_side_effects_allowed_binary`. + +**Default Value:** `{}` (`rustc_data_structures::fx::FxHashSet`) + +* [arithmetic_side_effects](https://rust-lang.github.io/rust-clippy/master/index.html#arithmetic_side_effects) + + +### arithmetic-side-effects-allowed-binary +Suppress checking of the passed type pair names in binary operations like addition or +multiplication. + +Supports the "*" wildcard to indicate that a certain type won't trigger the lint regardless +of the involved counterpart. For example, `["SomeType", "*"]` or `["*", "AnotherType"]`. + +Pairs are asymmetric, which means that `["SomeType", "AnotherType"]` is not the same as +`["AnotherType", "SomeType"]`. + +#### Example + +```toml +arithmetic-side-effects-allowed-binary = [["SomeType" , "f32"], ["AnotherType", "*"]] +``` + +**Default Value:** `[]` (`Vec<[String; 2]>`) + +* [arithmetic_side_effects](https://rust-lang.github.io/rust-clippy/master/index.html#arithmetic_side_effects) + + +### arithmetic-side-effects-allowed-unary +Suppress checking of the passed type names in unary operations like "negation" (`-`). + +#### Example + +```toml +arithmetic-side-effects-allowed-unary = ["SomeType", "AnotherType"] +``` + +**Default Value:** `{}` (`rustc_data_structures::fx::FxHashSet`) + +* [arithmetic_side_effects](https://rust-lang.github.io/rust-clippy/master/index.html#arithmetic_side_effects) + + +### avoid-breaking-exported-api +Suppress lints whenever the suggested change would cause breakage for other crates. + +**Default Value:** `true` (`bool`) + +* [enum_variant_names](https://rust-lang.github.io/rust-clippy/master/index.html#enum_variant_names) +* [large_types_passed_by_value](https://rust-lang.github.io/rust-clippy/master/index.html#large_types_passed_by_value) +* [trivially_copy_pass_by_ref](https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref) +* [unnecessary_wraps](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_wraps) +* [unused_self](https://rust-lang.github.io/rust-clippy/master/index.html#unused_self) +* [upper_case_acronyms](https://rust-lang.github.io/rust-clippy/master/index.html#upper_case_acronyms) +* [wrong_self_convention](https://rust-lang.github.io/rust-clippy/master/index.html#wrong_self_convention) +* [box_collection](https://rust-lang.github.io/rust-clippy/master/index.html#box_collection) +* [redundant_allocation](https://rust-lang.github.io/rust-clippy/master/index.html#redundant_allocation) +* [rc_buffer](https://rust-lang.github.io/rust-clippy/master/index.html#rc_buffer) +* [vec_box](https://rust-lang.github.io/rust-clippy/master/index.html#vec_box) +* [option_option](https://rust-lang.github.io/rust-clippy/master/index.html#option_option) +* [linkedlist](https://rust-lang.github.io/rust-clippy/master/index.html#linkedlist) +* [rc_mutex](https://rust-lang.github.io/rust-clippy/master/index.html#rc_mutex) + + +### msrv +The minimum rust version that the project supports + +**Default Value:** `None` (`Option`) + +* [manual_split_once](https://rust-lang.github.io/rust-clippy/master/index.html#manual_split_once) +* [manual_str_repeat](https://rust-lang.github.io/rust-clippy/master/index.html#manual_str_repeat) +* [cloned_instead_of_copied](https://rust-lang.github.io/rust-clippy/master/index.html#cloned_instead_of_copied) +* [redundant_field_names](https://rust-lang.github.io/rust-clippy/master/index.html#redundant_field_names) +* [redundant_static_lifetimes](https://rust-lang.github.io/rust-clippy/master/index.html#redundant_static_lifetimes) +* [filter_map_next](https://rust-lang.github.io/rust-clippy/master/index.html#filter_map_next) +* [checked_conversions](https://rust-lang.github.io/rust-clippy/master/index.html#checked_conversions) +* [manual_range_contains](https://rust-lang.github.io/rust-clippy/master/index.html#manual_range_contains) +* [use_self](https://rust-lang.github.io/rust-clippy/master/index.html#use_self) +* [mem_replace_with_default](https://rust-lang.github.io/rust-clippy/master/index.html#mem_replace_with_default) +* [manual_non_exhaustive](https://rust-lang.github.io/rust-clippy/master/index.html#manual_non_exhaustive) +* [option_as_ref_deref](https://rust-lang.github.io/rust-clippy/master/index.html#option_as_ref_deref) +* [map_unwrap_or](https://rust-lang.github.io/rust-clippy/master/index.html#map_unwrap_or) +* [match_like_matches_macro](https://rust-lang.github.io/rust-clippy/master/index.html#match_like_matches_macro) +* [manual_strip](https://rust-lang.github.io/rust-clippy/master/index.html#manual_strip) +* [missing_const_for_fn](https://rust-lang.github.io/rust-clippy/master/index.html#missing_const_for_fn) +* [unnested_or_patterns](https://rust-lang.github.io/rust-clippy/master/index.html#unnested_or_patterns) +* [from_over_into](https://rust-lang.github.io/rust-clippy/master/index.html#from_over_into) +* [ptr_as_ptr](https://rust-lang.github.io/rust-clippy/master/index.html#ptr_as_ptr) +* [if_then_some_else_none](https://rust-lang.github.io/rust-clippy/master/index.html#if_then_some_else_none) +* [approx_constant](https://rust-lang.github.io/rust-clippy/master/index.html#approx_constant) +* [deprecated_cfg_attr](https://rust-lang.github.io/rust-clippy/master/index.html#deprecated_cfg_attr) +* [index_refutable_slice](https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice) +* [map_clone](https://rust-lang.github.io/rust-clippy/master/index.html#map_clone) +* [borrow_as_ptr](https://rust-lang.github.io/rust-clippy/master/index.html#borrow_as_ptr) +* [manual_bits](https://rust-lang.github.io/rust-clippy/master/index.html#manual_bits) +* [err_expect](https://rust-lang.github.io/rust-clippy/master/index.html#err_expect) +* [cast_abs_to_unsigned](https://rust-lang.github.io/rust-clippy/master/index.html#cast_abs_to_unsigned) +* [uninlined_format_args](https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args) +* [manual_clamp](https://rust-lang.github.io/rust-clippy/master/index.html#manual_clamp) +* [manual_let_else](https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else) +* [unchecked_duration_subtraction](https://rust-lang.github.io/rust-clippy/master/index.html#unchecked_duration_subtraction) + + +### cognitive-complexity-threshold +The maximum cognitive complexity a function can have + +**Default Value:** `25` (`u64`) + +* [cognitive_complexity](https://rust-lang.github.io/rust-clippy/master/index.html#cognitive_complexity) + + +### disallowed-names +The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value +`".."` can be used as part of the list to indicate, that the configured values should be appended to the +default configuration of Clippy. By default any configuration will replace the default value. + +**Default Value:** `["foo", "baz", "quux"]` (`Vec`) + +* [disallowed_names](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_names) + + +### doc-valid-idents +The list of words this lint should not consider as identifiers needing ticks. The value +`".."` can be used as part of the list to indicate, that the configured values should be appended to the +default configuration of Clippy. By default any configuraction will replace the default value. For example: +* `doc-valid-idents = ["ClipPy"]` would replace the default list with `["ClipPy"]`. +* `doc-valid-idents = ["ClipPy", ".."]` would append `ClipPy` to the default list. + +Default list: + +**Default Value:** `["KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "DirectX", "ECMAScript", "GPLv2", "GPLv3", "GitHub", "GitLab", "IPv4", "IPv6", "ClojureScript", "CoffeeScript", "JavaScript", "PureScript", "TypeScript", "NaN", "NaNs", "OAuth", "GraphQL", "OCaml", "OpenGL", "OpenMP", "OpenSSH", "OpenSSL", "OpenStreetMap", "OpenDNS", "WebGL", "TensorFlow", "TrueType", "iOS", "macOS", "FreeBSD", "TeX", "LaTeX", "BibTeX", "BibLaTeX", "MinGW", "CamelCase"]` (`Vec`) + +* [doc_markdown](https://rust-lang.github.io/rust-clippy/master/index.html#doc_markdown) + + +### too-many-arguments-threshold +The maximum number of argument a function or method can have + +**Default Value:** `7` (`u64`) + +* [too_many_arguments](https://rust-lang.github.io/rust-clippy/master/index.html#too_many_arguments) + + +### type-complexity-threshold +The maximum complexity a type can have + +**Default Value:** `250` (`u64`) + +* [type_complexity](https://rust-lang.github.io/rust-clippy/master/index.html#type_complexity) + + +### single-char-binding-names-threshold +The maximum number of single char bindings a scope may have + +**Default Value:** `4` (`u64`) + +* [many_single_char_names](https://rust-lang.github.io/rust-clippy/master/index.html#many_single_char_names) + + +### too-large-for-stack +The maximum size of objects (in bytes) that will be linted. Larger objects are ok on the heap + +**Default Value:** `200` (`u64`) + +* [boxed_local](https://rust-lang.github.io/rust-clippy/master/index.html#boxed_local) +* [useless_vec](https://rust-lang.github.io/rust-clippy/master/index.html#useless_vec) + + +### enum-variant-name-threshold +The minimum number of enum variants for the lints about variant names to trigger + +**Default Value:** `3` (`u64`) + +* [enum_variant_names](https://rust-lang.github.io/rust-clippy/master/index.html#enum_variant_names) + + +### enum-variant-size-threshold +The maximum size of an enum's variant to avoid box suggestion + +**Default Value:** `200` (`u64`) + +* [large_enum_variant](https://rust-lang.github.io/rust-clippy/master/index.html#large_enum_variant) + + +### verbose-bit-mask-threshold +The maximum allowed size of a bit mask before suggesting to use 'trailing_zeros' + +**Default Value:** `1` (`u64`) + +* [verbose_bit_mask](https://rust-lang.github.io/rust-clippy/master/index.html#verbose_bit_mask) + + +### literal-representation-threshold +The lower bound for linting decimal literals + +**Default Value:** `16384` (`u64`) + +* [decimal_literal_representation](https://rust-lang.github.io/rust-clippy/master/index.html#decimal_literal_representation) + + +### trivial-copy-size-limit +The maximum size (in bytes) to consider a `Copy` type for passing by value instead of by reference. + +**Default Value:** `None` (`Option`) + +* [trivially_copy_pass_by_ref](https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref) + + +### pass-by-value-size-limit +The minimum size (in bytes) to consider a type for passing by reference instead of by value. + +**Default Value:** `256` (`u64`) + +* [large_type_pass_by_move](https://rust-lang.github.io/rust-clippy/master/index.html#large_type_pass_by_move) + + +### too-many-lines-threshold +The maximum number of lines a function or method can have + +**Default Value:** `100` (`u64`) + +* [too_many_lines](https://rust-lang.github.io/rust-clippy/master/index.html#too_many_lines) + + +### array-size-threshold +The maximum allowed size for arrays on the stack + +**Default Value:** `512000` (`u128`) + +* [large_stack_arrays](https://rust-lang.github.io/rust-clippy/master/index.html#large_stack_arrays) +* [large_const_arrays](https://rust-lang.github.io/rust-clippy/master/index.html#large_const_arrays) + + +### vec-box-size-threshold +The size of the boxed type in bytes, where boxing in a `Vec` is allowed + +**Default Value:** `4096` (`u64`) + +* [vec_box](https://rust-lang.github.io/rust-clippy/master/index.html#vec_box) + + +### max-trait-bounds +The maximum number of bounds a trait can have to be linted + +**Default Value:** `3` (`u64`) + +* [type_repetition_in_bounds](https://rust-lang.github.io/rust-clippy/master/index.html#type_repetition_in_bounds) + + +### max-struct-bools +The maximum number of bool fields a struct can have + +**Default Value:** `3` (`u64`) + +* [struct_excessive_bools](https://rust-lang.github.io/rust-clippy/master/index.html#struct_excessive_bools) + + +### max-fn-params-bools +The maximum number of bool parameters a function can have + +**Default Value:** `3` (`u64`) + +* [fn_params_excessive_bools](https://rust-lang.github.io/rust-clippy/master/index.html#fn_params_excessive_bools) + + +### warn-on-all-wildcard-imports +Whether to allow certain wildcard imports (prelude, super in tests). + +**Default Value:** `false` (`bool`) + +* [wildcard_imports](https://rust-lang.github.io/rust-clippy/master/index.html#wildcard_imports) + + +### disallowed-macros +The list of disallowed macros, written as fully qualified paths. + +**Default Value:** `[]` (`Vec`) + +* [disallowed_macros](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_macros) + + +### disallowed-methods +The list of disallowed methods, written as fully qualified paths. + +**Default Value:** `[]` (`Vec`) + +* [disallowed_methods](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_methods) + + +### disallowed-types +The list of disallowed types, written as fully qualified paths. + +**Default Value:** `[]` (`Vec`) + +* [disallowed_types](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_types) + + +### unreadable-literal-lint-fractions +Should the fraction of a decimal be linted to include separators. + +**Default Value:** `true` (`bool`) + +* [unreadable_literal](https://rust-lang.github.io/rust-clippy/master/index.html#unreadable_literal) + + +### upper-case-acronyms-aggressive +Enables verbose mode. Triggers if there is more than one uppercase char next to each other + +**Default Value:** `false` (`bool`) + +* [upper_case_acronyms](https://rust-lang.github.io/rust-clippy/master/index.html#upper_case_acronyms) + + +### matches-for-let-else +Whether the matches should be considered by the lint, and whether there should +be filtering for common types. + +**Default Value:** `WellKnownTypes` (`crate::manual_let_else::MatchLintBehaviour`) + +* [manual_let_else](https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else) + + +### cargo-ignore-publish +For internal testing only, ignores the current `publish` settings in the Cargo manifest. + +**Default Value:** `false` (`bool`) + +* [_cargo_common_metadata](https://rust-lang.github.io/rust-clippy/master/index.html#_cargo_common_metadata) + + +### standard-macro-braces +Enforce the named macros always use the braces specified. + +A `MacroMatcher` can be added like so `{ name = "macro_name", brace = "(" }`. If the macro +is could be used with a full path two `MacroMatcher`s have to be added one with the full path +`crate_name::macro_name` and one with just the macro name. + +**Default Value:** `[]` (`Vec`) + +* [nonstandard_macro_braces](https://rust-lang.github.io/rust-clippy/master/index.html#nonstandard_macro_braces) + + +### enforced-import-renames +The list of imports to always rename, a fully qualified path followed by the rename. + +**Default Value:** `[]` (`Vec`) + +* [missing_enforced_import_renames](https://rust-lang.github.io/rust-clippy/master/index.html#missing_enforced_import_renames) + + +### allowed-scripts +The list of unicode scripts allowed to be used in the scope. + +**Default Value:** `["Latin"]` (`Vec`) + +* [disallowed_script_idents](https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_script_idents) + + +### enable-raw-pointer-heuristic-for-send +Whether to apply the raw pointer heuristic to determine if a type is `Send`. + +**Default Value:** `true` (`bool`) + +* [non_send_fields_in_send_ty](https://rust-lang.github.io/rust-clippy/master/index.html#non_send_fields_in_send_ty) + + +### max-suggested-slice-pattern-length +When Clippy suggests using a slice pattern, this is the maximum number of elements allowed in +the slice pattern that is suggested. If more elements would be necessary, the lint is suppressed. +For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements. + +**Default Value:** `3` (`u64`) + +* [index_refutable_slice](https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice) + + +### max-include-file-size +The maximum size of a file included via `include_bytes!()` or `include_str!()`, in bytes + +**Default Value:** `1000000` (`u64`) + +* [large_include_file](https://rust-lang.github.io/rust-clippy/master/index.html#large_include_file) + + +### allow-expect-in-tests +Whether `expect` should be allowed within `#[cfg(test)]` + +**Default Value:** `false` (`bool`) + +* [expect_used](https://rust-lang.github.io/rust-clippy/master/index.html#expect_used) + + +### allow-unwrap-in-tests +Whether `unwrap` should be allowed in test cfg + +**Default Value:** `false` (`bool`) + +* [unwrap_used](https://rust-lang.github.io/rust-clippy/master/index.html#unwrap_used) + + +### allow-dbg-in-tests +Whether `dbg!` should be allowed in test functions + +**Default Value:** `false` (`bool`) + +* [dbg_macro](https://rust-lang.github.io/rust-clippy/master/index.html#dbg_macro) + + +### allow-print-in-tests +Whether print macros (ex. `println!`) should be allowed in test functions + +**Default Value:** `false` (`bool`) + +* [print_stdout](https://rust-lang.github.io/rust-clippy/master/index.html#print_stdout) +* [print_stderr](https://rust-lang.github.io/rust-clippy/master/index.html#print_stderr) + + +### large-error-threshold +The maximum size of the `Err`-variant in a `Result` returned from a function + +**Default Value:** `128` (`u64`) + +* [result_large_err](https://rust-lang.github.io/rust-clippy/master/index.html#result_large_err) + + +### ignore-interior-mutability +A list of paths to types that should be treated like `Arc`, i.e. ignored but +for the generic parameters for determining interior mutability + +**Default Value:** `["bytes::Bytes"]` (`Vec`) + +* [mutable_key](https://rust-lang.github.io/rust-clippy/master/index.html#mutable_key) + + +### allow-mixed-uninlined-format-args +Whether to allow mixed uninlined format args, e.g. `format!("{} {}", a, foo.bar)` + +**Default Value:** `true` (`bool`) + +* [uninlined_format_args](https://rust-lang.github.io/rust-clippy/master/index.html#uninlined_format_args) + + +### suppress-restriction-lint-in-const +In same +cases the restructured operation might not be unavoidable, as the +suggested counterparts are unavailable in constant code. This +configuration will cause restriction lints to trigger even +if no suggestion can be made. + +**Default Value:** `false` (`bool`) + +* [indexing_slicing](https://rust-lang.github.io/rust-clippy/master/index.html#indexing_slicing) + + + diff --git a/clippy_lints/Cargo.toml b/clippy_lints/Cargo.toml index a9f69b1ba6300..4c40483e3ec94 100644 --- a/clippy_lints/Cargo.toml +++ b/clippy_lints/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "clippy_lints" -version = "0.1.68" +version = "0.1.69" description = "A bunch of helpful lints to avoid common pitfalls in Rust" repository = "https://github.com/rust-lang/rust-clippy" readme = "README.md" diff --git a/clippy_lints/src/bool_assert_comparison.rs b/clippy_lints/src/bool_assert_comparison.rs index 82d368bb8bc2c..556fa579000c6 100644 --- a/clippy_lints/src/bool_assert_comparison.rs +++ b/clippy_lints/src/bool_assert_comparison.rs @@ -1,10 +1,11 @@ +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::macros::{find_assert_eq_args, root_macro_call_first_node}; -use clippy_utils::{diagnostics::span_lint_and_sugg, ty::implements_trait}; +use clippy_utils::ty::{implements_trait, is_copy}; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Lit}; -use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty; +use rustc_lint::{LateContext, LateLintPass, LintContext}; +use rustc_middle::ty::{self, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::symbol::Ident; @@ -43,9 +44,7 @@ fn is_bool_lit(e: &Expr<'_>) -> bool { ) && !e.span.from_expansion() } -fn is_impl_not_trait_with_bool_out(cx: &LateContext<'_>, e: &Expr<'_>) -> bool { - let ty = cx.typeck_results().expr_ty(e); - +fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { cx.tcx .lang_items() .not_trait() @@ -77,31 +76,57 @@ impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison { return; } let Some ((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return }; - if !(is_bool_lit(a) ^ is_bool_lit(b)) { + + let a_span = a.span.source_callsite(); + let b_span = b.span.source_callsite(); + + let (lit_span, non_lit_expr) = match (is_bool_lit(a), is_bool_lit(b)) { + // assert_eq!(true, b) + // ^^^^^^ + (true, false) => (a_span.until(b_span), b), + // assert_eq!(a, true) + // ^^^^^^ + (false, true) => (b_span.with_lo(a_span.hi()), a), // If there are two boolean arguments, we definitely don't understand // what's going on, so better leave things as is... // // Or there is simply no boolean and then we can leave things as is! - return; - } + _ => return, + }; - if !is_impl_not_trait_with_bool_out(cx, a) || !is_impl_not_trait_with_bool_out(cx, b) { + let non_lit_ty = cx.typeck_results().expr_ty(non_lit_expr); + + if !is_impl_not_trait_with_bool_out(cx, non_lit_ty) { // At this point the expression which is not a boolean // literal does not implement Not trait with a bool output, // so we cannot suggest to rewrite our code return; } + if !is_copy(cx, non_lit_ty) { + // Only lint with types that are `Copy` because `assert!(x)` takes + // ownership of `x` whereas `assert_eq(x, true)` does not + return; + } + let macro_name = macro_name.as_str(); let non_eq_mac = ¯o_name[..macro_name.len() - 3]; - span_lint_and_sugg( + span_lint_and_then( cx, BOOL_ASSERT_COMPARISON, macro_call.span, &format!("used `{macro_name}!` with a literal bool"), - "replace it with", - format!("{non_eq_mac}!(..)"), - Applicability::MaybeIncorrect, + |diag| { + // assert_eq!(...) + // ^^^^^^^^^ + let name_span = cx.sess().source_map().span_until_char(macro_call.span, '!'); + + diag.multipart_suggestion( + format!("replace it with `{non_eq_mac}!(..)`"), + vec![(name_span, non_eq_mac.to_string()), (lit_span, String::new())], + Applicability::MachineApplicable, + ); + }, ); } } diff --git a/clippy_lints/src/casts/cast_possible_truncation.rs b/clippy_lints/src/casts/cast_possible_truncation.rs index a6376484914ba..f3f8b8d87982e 100644 --- a/clippy_lints/src/casts/cast_possible_truncation.rs +++ b/clippy_lints/src/casts/cast_possible_truncation.rs @@ -1,11 +1,14 @@ use clippy_utils::consts::{constant, Constant}; -use clippy_utils::diagnostics::span_lint; +use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::expr_or_init; +use clippy_utils::source::snippet; use clippy_utils::ty::{get_discriminant_value, is_isize_or_usize}; +use rustc_errors::{Applicability, SuggestionStyle}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, FloatTy, Ty}; +use rustc_span::Span; use rustc_target::abi::IntegerType; use super::{utils, CAST_ENUM_TRUNCATION, CAST_POSSIBLE_TRUNCATION}; @@ -74,7 +77,14 @@ fn apply_reductions(cx: &LateContext<'_>, nbits: u64, expr: &Expr<'_>, signed: b } } -pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) { +pub(super) fn check( + cx: &LateContext<'_>, + expr: &Expr<'_>, + cast_expr: &Expr<'_>, + cast_from: Ty<'_>, + cast_to: Ty<'_>, + cast_to_span: Span, +) { let msg = match (cast_from.kind(), cast_to.is_integral()) { (ty::Int(_) | ty::Uint(_), true) => { let from_nbits = apply_reductions( @@ -139,7 +149,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, ); return; } - format!("casting `{cast_from}` to `{cast_to}` may truncate the value{suffix}",) + format!("casting `{cast_from}` to `{cast_to}` may truncate the value{suffix}") }, (ty::Float(_), true) => { @@ -153,5 +163,19 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, _ => return, }; - span_lint(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg); + let name_of_cast_from = snippet(cx, cast_expr.span, ".."); + let cast_to_snip = snippet(cx, cast_to_span, ".."); + let suggestion = format!("{cast_to_snip}::try_from({name_of_cast_from})"); + + span_lint_and_then(cx, CAST_POSSIBLE_TRUNCATION, expr.span, &msg, |diag| { + diag.help("if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ..."); + diag.span_suggestion_with_style( + expr.span, + "... or use `try_from` and handle the error accordingly", + suggestion, + Applicability::Unspecified, + // always show the suggestion in a separate line + SuggestionStyle::ShowAlways, + ); + }); } diff --git a/clippy_lints/src/casts/mod.rs b/clippy_lints/src/casts/mod.rs index 161e3a698e9ea..362f70d12d185 100644 --- a/clippy_lints/src/casts/mod.rs +++ b/clippy_lints/src/casts/mod.rs @@ -80,7 +80,8 @@ declare_clippy_lint! { /// ### What it does /// Checks for casts between numerical types that may /// truncate large values. This is expected behavior, so the cast is `Allow` by - /// default. + /// default. It suggests user either explicitly ignore the lint, + /// or use `try_from()` and handle the truncation, default, or panic explicitly. /// /// ### Why is this bad? /// In some problem domains, it is good practice to avoid @@ -93,6 +94,21 @@ declare_clippy_lint! { /// x as u8 /// } /// ``` + /// Use instead: + /// ``` + /// fn as_u8(x: u64) -> u8 { + /// if let Ok(x) = u8::try_from(x) { + /// x + /// } else { + /// todo!(); + /// } + /// } + /// // Or + /// #[allow(clippy::cast_possible_truncation)] + /// fn as_u16(x: u64) -> u16 { + /// x as u16 + /// } + /// ``` #[clippy::version = "pre 1.29.0"] pub CAST_POSSIBLE_TRUNCATION, pedantic, @@ -712,7 +728,7 @@ impl<'tcx> LateLintPass<'tcx> for Casts { fn_to_numeric_cast_with_truncation::check(cx, expr, cast_expr, cast_from, cast_to); if cast_to.is_numeric() && !in_external_macro(cx.sess(), expr.span) { - cast_possible_truncation::check(cx, expr, cast_expr, cast_from, cast_to); + cast_possible_truncation::check(cx, expr, cast_expr, cast_from, cast_to, cast_to_hir.span); if cast_from.is_numeric() { cast_possible_wrap::check(cx, expr, cast_from, cast_to); cast_precision_loss::check(cx, expr, cast_from, cast_to); diff --git a/clippy_lints/src/declared_lints.rs b/clippy_lints/src/declared_lints.rs index 91ca73633f062..36a366fc97474 100644 --- a/clippy_lints/src/declared_lints.rs +++ b/clippy_lints/src/declared_lints.rs @@ -422,6 +422,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::module_style::MOD_MODULE_FILES_INFO, crate::module_style::SELF_NAMED_MODULE_FILES_INFO, crate::multi_assignments::MULTI_ASSIGNMENTS_INFO, + crate::multiple_unsafe_ops_per_block::MULTIPLE_UNSAFE_OPS_PER_BLOCK_INFO, crate::mut_key::MUTABLE_KEY_TYPE_INFO, crate::mut_mut::MUT_MUT_INFO, crate::mut_reference::UNNECESSARY_MUT_PASSED_INFO, diff --git a/clippy_lints/src/doc.rs b/clippy_lints/src/doc.rs index cdc23a4d22739..f7a3d6d53f714 100644 --- a/clippy_lints/src/doc.rs +++ b/clippy_lints/src/doc.rs @@ -251,7 +251,7 @@ declare_clippy_lint! { /// unimplemented!(); /// } /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.67.0"] pub UNNECESSARY_SAFETY_DOC, restriction, "`pub fn` or `pub trait` with `# Safety` docs" diff --git a/clippy_lints/src/enum_variants.rs b/clippy_lints/src/enum_variants.rs index b77b5621b4c68..4c69dacf381ad 100644 --- a/clippy_lints/src/enum_variants.rs +++ b/clippy_lints/src/enum_variants.rs @@ -277,7 +277,7 @@ impl LateLintPass<'_> for EnumVariantNames { Some(c) if is_word_beginning(c) => span_lint( cx, MODULE_NAME_REPETITIONS, - item.span, + item.ident.span, "item name starts with its containing module's name", ), _ => (), @@ -287,7 +287,7 @@ impl LateLintPass<'_> for EnumVariantNames { span_lint( cx, MODULE_NAME_REPETITIONS, - item.span, + item.ident.span, "item name ends with its containing module's name", ); } diff --git a/clippy_lints/src/from_raw_with_void_ptr.rs b/clippy_lints/src/from_raw_with_void_ptr.rs index 00f5ba56496ec..096508dc4f11e 100644 --- a/clippy_lints/src/from_raw_with_void_ptr.rs +++ b/clippy_lints/src/from_raw_with_void_ptr.rs @@ -31,7 +31,7 @@ declare_clippy_lint! { /// let _ = unsafe { Box::from_raw(ptr as *mut usize) }; /// ``` /// - #[clippy::version = "1.66.0"] + #[clippy::version = "1.67.0"] pub FROM_RAW_WITH_VOID_PTR, suspicious, "creating a `Box` from a void raw pointer" diff --git a/clippy_lints/src/instant_subtraction.rs b/clippy_lints/src/instant_subtraction.rs index 9f6e89405713c..668110c7cc081 100644 --- a/clippy_lints/src/instant_subtraction.rs +++ b/clippy_lints/src/instant_subtraction.rs @@ -59,7 +59,7 @@ declare_clippy_lint! { /// /// [`Duration`]: std::time::Duration /// [`Instant::now()`]: std::time::Instant::now; - #[clippy::version = "1.65.0"] + #[clippy::version = "1.67.0"] pub UNCHECKED_DURATION_SUBTRACTION, pedantic, "finds unchecked subtraction of a 'Duration' from an 'Instant'" diff --git a/clippy_lints/src/let_underscore.rs b/clippy_lints/src/let_underscore.rs index 61f87b91400d7..f8e3595098088 100644 --- a/clippy_lints/src/let_underscore.rs +++ b/clippy_lints/src/let_underscore.rs @@ -84,7 +84,7 @@ declare_clippy_lint! { /// let _ = foo().await; /// # } /// ``` - #[clippy::version = "1.66"] + #[clippy::version = "1.67.0"] pub LET_UNDERSCORE_FUTURE, suspicious, "non-binding `let` on a future" diff --git a/clippy_lints/src/lib.rs b/clippy_lints/src/lib.rs index d8e2ae02c5a65..5c4b604104417 100644 --- a/clippy_lints/src/lib.rs +++ b/clippy_lints/src/lib.rs @@ -198,6 +198,7 @@ mod missing_trait_methods; mod mixed_read_write_in_expression; mod module_style; mod multi_assignments; +mod multiple_unsafe_ops_per_block; mod mut_key; mod mut_mut; mod mut_reference; @@ -908,6 +909,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: store.register_late_pass(|_| Box::new(fn_null_check::FnNullCheck)); store.register_late_pass(|_| Box::new(permissions_set_readonly_false::PermissionsSetReadonlyFalse)); store.register_late_pass(|_| Box::new(size_of_ref::SizeOfRef)); + store.register_late_pass(|_| Box::new(multiple_unsafe_ops_per_block::MultipleUnsafeOpsPerBlock)); // add lints here, do not remove this comment, it's used in `new_lint` } diff --git a/clippy_lints/src/manual_is_ascii_check.rs b/clippy_lints/src/manual_is_ascii_check.rs index d9ef7dffa020d..2fd32c009eaa7 100644 --- a/clippy_lints/src/manual_is_ascii_check.rs +++ b/clippy_lints/src/manual_is_ascii_check.rs @@ -43,7 +43,7 @@ declare_clippy_lint! { /// 'A'.is_ascii_uppercase(); /// } /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.67.0"] pub MANUAL_IS_ASCII_CHECK, style, "use dedicated method to check ascii range" diff --git a/clippy_lints/src/methods/mod.rs b/clippy_lints/src/methods/mod.rs index a7e45d5126ab0..0c465e5daf9fd 100644 --- a/clippy_lints/src/methods/mod.rs +++ b/clippy_lints/src/methods/mod.rs @@ -3102,7 +3102,7 @@ declare_clippy_lint! { /// Ok(()) /// } /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.67.0"] pub SEEK_FROM_CURRENT, complexity, "use dedicated method for seek from current position" @@ -3133,7 +3133,7 @@ declare_clippy_lint! { /// t.rewind(); /// } /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.67.0"] pub SEEK_TO_START_INSTEAD_OF_REWIND, complexity, "jumping to the start of stream using `seek` method" diff --git a/clippy_lints/src/missing_trait_methods.rs b/clippy_lints/src/missing_trait_methods.rs index 3371b4cce32c1..e99081ad06202 100644 --- a/clippy_lints/src/missing_trait_methods.rs +++ b/clippy_lints/src/missing_trait_methods.rs @@ -94,7 +94,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingTraitMethods { "implement the method", ); } - }) + }); } } } diff --git a/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/clippy_lints/src/multiple_unsafe_ops_per_block.rs new file mode 100644 index 0000000000000..2814c92e67a45 --- /dev/null +++ b/clippy_lints/src/multiple_unsafe_ops_per_block.rs @@ -0,0 +1,185 @@ +use clippy_utils::{ + diagnostics::span_lint_and_then, + visitors::{for_each_expr_with_closures, Descend, Visitable}, +}; +use core::ops::ControlFlow::Continue; +use hir::{ + def::{DefKind, Res}, + BlockCheckMode, ExprKind, QPath, UnOp, Unsafety, +}; +use rustc_ast::Mutability; +use rustc_hir as hir; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::Span; + +declare_clippy_lint! { + /// ### What it does + /// Checks for `unsafe` blocks that contain more than one unsafe operation. + /// + /// ### Why is this bad? + /// Combined with `undocumented_unsafe_blocks`, + /// this lint ensures that each unsafe operation must be independently justified. + /// Combined with `unused_unsafe`, this lint also ensures + /// elimination of unnecessary unsafe blocks through refactoring. + /// + /// ### Example + /// ```rust + /// /// Reads a `char` from the given pointer. + /// /// + /// /// # Safety + /// /// + /// /// `ptr` must point to four consecutive, initialized bytes which + /// /// form a valid `char` when interpreted in the native byte order. + /// fn read_char(ptr: *const u8) -> char { + /// // SAFETY: The caller has guaranteed that the value pointed + /// // to by `bytes` is a valid `char`. + /// unsafe { char::from_u32_unchecked(*ptr.cast::()) } + /// } + /// ``` + /// Use instead: + /// ```rust + /// /// Reads a `char` from the given pointer. + /// /// + /// /// # Safety + /// /// + /// /// - `ptr` must be 4-byte aligned, point to four consecutive + /// /// initialized bytes, and be valid for reads of 4 bytes. + /// /// - The bytes pointed to by `ptr` must represent a valid + /// /// `char` when interpreted in the native byte order. + /// fn read_char(ptr: *const u8) -> char { + /// // SAFETY: `ptr` is 4-byte aligned, points to four consecutive + /// // initialized bytes, and is valid for reads of 4 bytes. + /// let int_value = unsafe { *ptr.cast::() }; + /// + /// // SAFETY: The caller has guaranteed that the four bytes + /// // pointed to by `bytes` represent a valid `char`. + /// unsafe { char::from_u32_unchecked(int_value) } + /// } + /// ``` + #[clippy::version = "1.68.0"] + pub MULTIPLE_UNSAFE_OPS_PER_BLOCK, + restriction, + "more than one unsafe operation per `unsafe` block" +} +declare_lint_pass!(MultipleUnsafeOpsPerBlock => [MULTIPLE_UNSAFE_OPS_PER_BLOCK]); + +impl<'tcx> LateLintPass<'tcx> for MultipleUnsafeOpsPerBlock { + fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) { + if !matches!(block.rules, BlockCheckMode::UnsafeBlock(_)) { + return; + } + let mut unsafe_ops = vec![]; + collect_unsafe_exprs(cx, block, &mut unsafe_ops); + if unsafe_ops.len() > 1 { + span_lint_and_then( + cx, + MULTIPLE_UNSAFE_OPS_PER_BLOCK, + block.span, + &format!( + "this `unsafe` block contains {} unsafe operations, expected only one", + unsafe_ops.len() + ), + |diag| { + for (msg, span) in unsafe_ops { + diag.span_note(span, msg); + } + }, + ); + } + } +} + +fn collect_unsafe_exprs<'tcx>( + cx: &LateContext<'tcx>, + node: impl Visitable<'tcx>, + unsafe_ops: &mut Vec<(&'static str, Span)>, +) { + for_each_expr_with_closures(cx, node, |expr| { + match expr.kind { + ExprKind::InlineAsm(_) => unsafe_ops.push(("inline assembly used here", expr.span)), + + ExprKind::Field(e, _) => { + if cx.typeck_results().expr_ty(e).is_union() { + unsafe_ops.push(("union field access occurs here", expr.span)); + } + }, + + ExprKind::Path(QPath::Resolved( + _, + hir::Path { + res: Res::Def(DefKind::Static(Mutability::Mut), _), + .. + }, + )) => { + unsafe_ops.push(("access of a mutable static occurs here", expr.span)); + }, + + ExprKind::Unary(UnOp::Deref, e) if cx.typeck_results().expr_ty_adjusted(e).is_unsafe_ptr() => { + unsafe_ops.push(("raw pointer dereference occurs here", expr.span)); + }, + + ExprKind::Call(path_expr, _) => match path_expr.kind { + ExprKind::Path(QPath::Resolved( + _, + hir::Path { + res: Res::Def(kind, def_id), + .. + }, + )) if kind.is_fn_like() => { + let sig = cx.tcx.fn_sig(*def_id); + if sig.0.unsafety() == Unsafety::Unsafe { + unsafe_ops.push(("unsafe function call occurs here", expr.span)); + } + }, + + ExprKind::Path(QPath::TypeRelative(..)) => { + if let Some(sig) = cx + .typeck_results() + .type_dependent_def_id(path_expr.hir_id) + .map(|def_id| cx.tcx.fn_sig(def_id)) + { + if sig.0.unsafety() == Unsafety::Unsafe { + unsafe_ops.push(("unsafe function call occurs here", expr.span)); + } + } + }, + + _ => {}, + }, + + ExprKind::MethodCall(..) => { + if let Some(sig) = cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .map(|def_id| cx.tcx.fn_sig(def_id)) + { + if sig.0.unsafety() == Unsafety::Unsafe { + unsafe_ops.push(("unsafe method call occurs here", expr.span)); + } + } + }, + + ExprKind::AssignOp(_, lhs, rhs) | ExprKind::Assign(lhs, rhs, _) => { + if matches!( + lhs.kind, + ExprKind::Path(QPath::Resolved( + _, + hir::Path { + res: Res::Def(DefKind::Static(Mutability::Mut), _), + .. + } + )) + ) { + unsafe_ops.push(("modification of a mutable static occurs here", expr.span)); + collect_unsafe_exprs(cx, rhs, unsafe_ops); + return Continue(Descend::No); + } + }, + + _ => {}, + }; + + Continue::<(), _>(Descend::Yes) + }); +} diff --git a/clippy_lints/src/only_used_in_recursion.rs b/clippy_lints/src/only_used_in_recursion.rs index 7b1d974f2f877..8b77a5c99f767 100644 --- a/clippy_lints/src/only_used_in_recursion.rs +++ b/clippy_lints/src/only_used_in_recursion.rs @@ -7,7 +7,7 @@ use rustc_hir::def_id::DefId; use rustc_hir::hir_id::HirIdMap; use rustc_hir::{Body, Expr, ExprKind, HirId, ImplItem, ImplItemKind, Node, PatKind, TraitItem, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::subst::{GenericArgKind, SubstsRef}; +use rustc_middle::ty::subst::{EarlyBinder, GenericArgKind, SubstsRef}; use rustc_middle::ty::{self, ConstKind}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::symbol::{kw, Ident}; @@ -244,7 +244,7 @@ impl<'tcx> LateLintPass<'tcx> for OnlyUsedInRecursion { })) => { #[allow(trivial_casts)] if let Some(Node::Item(item)) = get_parent_node(cx.tcx, owner_id.into()) - && let Some(trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(|t| t.subst_identity()) + && let Some(trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(EarlyBinder::subst_identity) && let Some(trait_item_id) = cx.tcx.associated_item(owner_id).trait_item_def_id { ( diff --git a/clippy_lints/src/returns.rs b/clippy_lints/src/returns.rs index dc1275a3686d0..a3e0811700bee 100644 --- a/clippy_lints/src/returns.rs +++ b/clippy_lints/src/returns.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then}; use clippy_utils::source::{snippet_opt, snippet_with_context}; use clippy_utils::visitors::{for_each_expr, Descend}; -use clippy_utils::{fn_def_id, path_to_local_id}; +use clippy_utils::{fn_def_id, path_to_local_id, span_find_starting_semi}; use core::ops::ControlFlow; use if_chain::if_chain; use rustc_errors::Applicability; @@ -151,7 +151,7 @@ impl<'tcx> LateLintPass<'tcx> for Return { kind: FnKind<'tcx>, _: &'tcx FnDecl<'tcx>, body: &'tcx Body<'tcx>, - _: Span, + sp: Span, _: HirId, ) { match kind { @@ -166,14 +166,14 @@ impl<'tcx> LateLintPass<'tcx> for Return { check_final_expr(cx, body.value, vec![], replacement); }, FnKind::ItemFn(..) | FnKind::Method(..) => { - check_block_return(cx, &body.value.kind, vec![]); + check_block_return(cx, &body.value.kind, sp, vec![]); }, } } } // if `expr` is a block, check if there are needless returns in it -fn check_block_return<'tcx>(cx: &LateContext<'tcx>, expr_kind: &ExprKind<'tcx>, semi_spans: Vec) { +fn check_block_return<'tcx>(cx: &LateContext<'tcx>, expr_kind: &ExprKind<'tcx>, sp: Span, mut semi_spans: Vec) { if let ExprKind::Block(block, _) = expr_kind { if let Some(block_expr) = block.expr { check_final_expr(cx, block_expr, semi_spans, RetReplacement::Empty); @@ -183,12 +183,14 @@ fn check_block_return<'tcx>(cx: &LateContext<'tcx>, expr_kind: &ExprKind<'tcx>, check_final_expr(cx, expr, semi_spans, RetReplacement::Empty); }, StmtKind::Semi(semi_expr) => { - let mut semi_spans_and_this_one = semi_spans; - // we only want the span containing the semicolon so we can remove it later. From `entry.rs:382` - if let Some(semicolon_span) = stmt.span.trim_start(semi_expr.span) { - semi_spans_and_this_one.push(semicolon_span); - check_final_expr(cx, semi_expr, semi_spans_and_this_one, RetReplacement::Empty); + // Remove ending semicolons and any whitespace ' ' in between. + // Without `return`, the suggestion might not compile if the semicolon is retained + if let Some(semi_span) = stmt.span.trim_start(semi_expr.span) { + let semi_span_to_remove = + span_find_starting_semi(cx.sess().source_map(), semi_span.with_hi(sp.hi())); + semi_spans.push(semi_span_to_remove); } + check_final_expr(cx, semi_expr, semi_spans, RetReplacement::Empty); }, _ => (), } @@ -231,9 +233,9 @@ fn check_final_expr<'tcx>( emit_return_lint(cx, ret_span, semi_spans, inner.as_ref().map(|i| i.span), replacement); }, ExprKind::If(_, then, else_clause_opt) => { - check_block_return(cx, &then.kind, semi_spans.clone()); + check_block_return(cx, &then.kind, peeled_drop_expr.span, semi_spans.clone()); if let Some(else_clause) = else_clause_opt { - check_block_return(cx, &else_clause.kind, semi_spans); + check_block_return(cx, &else_clause.kind, peeled_drop_expr.span, semi_spans); } }, // a match expr, check all arms @@ -246,7 +248,7 @@ fn check_final_expr<'tcx>( } }, // if it's a whole block, check it - other_expr_kind => check_block_return(cx, other_expr_kind, semi_spans), + other_expr_kind => check_block_return(cx, other_expr_kind, peeled_drop_expr.span, semi_spans), } } diff --git a/clippy_lints/src/suspicious_xor_used_as_pow.rs b/clippy_lints/src/suspicious_xor_used_as_pow.rs index 301aa5798bf55..9c0dc8096d0dc 100644 --- a/clippy_lints/src/suspicious_xor_used_as_pow.rs +++ b/clippy_lints/src/suspicious_xor_used_as_pow.rs @@ -9,7 +9,7 @@ declare_clippy_lint! { /// ### What it does /// Warns for a Bitwise XOR (`^`) operator being probably confused as a powering. It will not trigger if any of the numbers are not in decimal. /// ### Why is this bad? - /// It's most probably a typo and may lead to unexpected behaviours. + /// It's most probably a typo and may lead to unexpected behaviours. /// ### Example /// ```rust /// let x = 3_i32 ^ 4_i32; @@ -18,7 +18,7 @@ declare_clippy_lint! { /// ```rust /// let x = 3_i32.pow(4); /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.67.0"] pub SUSPICIOUS_XOR_USED_AS_POW, restriction, "XOR (`^`) operator possibly used as exponentiation operator" diff --git a/clippy_lints/src/transmute/mod.rs b/clippy_lints/src/transmute/mod.rs index 691d759d7739d..c0d290b5adc42 100644 --- a/clippy_lints/src/transmute/mod.rs +++ b/clippy_lints/src/transmute/mod.rs @@ -479,7 +479,10 @@ impl<'tcx> LateLintPass<'tcx> for Transmute { // - char conversions (https://github.com/rust-lang/rust/issues/89259) let const_context = in_constant(cx, e.hir_id); - let from_ty = cx.typeck_results().expr_ty_adjusted(arg); + let (from_ty, from_ty_adjusted) = match cx.typeck_results().expr_adjustments(arg) { + [] => (cx.typeck_results().expr_ty(arg), false), + [.., a] => (a.target, true), + }; // Adjustments for `to_ty` happen after the call to `transmute`, so don't use them. let to_ty = cx.typeck_results().expr_ty(e); @@ -506,7 +509,7 @@ impl<'tcx> LateLintPass<'tcx> for Transmute { ); if !linted { - transmutes_expressible_as_ptr_casts::check(cx, e, from_ty, to_ty, arg); + transmutes_expressible_as_ptr_casts::check(cx, e, from_ty, from_ty_adjusted, to_ty, arg); } } } diff --git a/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs b/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs index b79d4e915a271..8530b43243fa3 100644 --- a/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs +++ b/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs @@ -1,11 +1,11 @@ -use super::utils::can_be_expressed_as_pointer_cast; +use super::utils::check_cast; use super::TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS; -use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::sugg; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::sugg::Sugg; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::Ty; +use rustc_middle::ty::{cast::CastKind, Ty}; /// Checks for `transmutes_expressible_as_ptr_casts` lint. /// Returns `true` if it's triggered, otherwise returns `false`. @@ -13,24 +13,40 @@ pub(super) fn check<'tcx>( cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, + from_ty_adjusted: bool, to_ty: Ty<'tcx>, arg: &'tcx Expr<'_>, ) -> bool { - if can_be_expressed_as_pointer_cast(cx, e, from_ty, to_ty) { - span_lint_and_then( - cx, - TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS, - e.span, - &format!("transmute from `{from_ty}` to `{to_ty}` which could be expressed as a pointer cast instead"), - |diag| { - if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) { - let sugg = arg.as_ty(to_ty.to_string()).to_string(); - diag.span_suggestion(e.span, "try", sugg, Applicability::MachineApplicable); - } - }, - ); - true - } else { - false - } + use CastKind::{AddrPtrCast, ArrayPtrCast, FnPtrAddrCast, FnPtrPtrCast, PtrAddrCast, PtrPtrCast}; + let mut app = Applicability::MachineApplicable; + let sugg = match check_cast(cx, e, from_ty, to_ty) { + Some(PtrPtrCast | AddrPtrCast | ArrayPtrCast | FnPtrPtrCast | FnPtrAddrCast) => { + Sugg::hir_with_context(cx, arg, e.span.ctxt(), "..", &mut app) + .as_ty(to_ty.to_string()) + .to_string() + }, + Some(PtrAddrCast) if !from_ty_adjusted => Sugg::hir_with_context(cx, arg, e.span.ctxt(), "..", &mut app) + .as_ty(to_ty.to_string()) + .to_string(), + + // The only adjustments here would be ref-to-ptr and unsize coercions. The result of an unsize coercions can't + // be transmuted to a usize. For ref-to-ptr coercions, borrows need to be cast to a pointer before being cast to + // a usize. + Some(PtrAddrCast) => format!( + "{} as {to_ty}", + Sugg::hir_with_context(cx, arg, e.span.ctxt(), "..", &mut app).as_ty(from_ty) + ), + _ => return false, + }; + + span_lint_and_sugg( + cx, + TRANSMUTES_EXPRESSIBLE_AS_PTR_CASTS, + e.span, + &format!("transmute from `{from_ty}` to `{to_ty}` which could be expressed as a pointer cast instead"), + "try", + sugg, + app, + ); + true } diff --git a/clippy_lints/src/transmute/utils.rs b/clippy_lints/src/transmute/utils.rs index b59d52dfc4d31..cddaf9450eabc 100644 --- a/clippy_lints/src/transmute/utils.rs +++ b/clippy_lints/src/transmute/utils.rs @@ -20,28 +20,16 @@ pub(super) fn is_layout_incompatible<'tcx>(cx: &LateContext<'tcx>, from: Ty<'tcx } } -/// Check if the type conversion can be expressed as a pointer cast, instead of -/// a transmute. In certain cases, including some invalid casts from array -/// references to pointers, this may cause additional errors to be emitted and/or -/// ICE error messages. This function will panic if that occurs. -pub(super) fn can_be_expressed_as_pointer_cast<'tcx>( - cx: &LateContext<'tcx>, - e: &'tcx Expr<'_>, - from_ty: Ty<'tcx>, - to_ty: Ty<'tcx>, -) -> bool { - use CastKind::{AddrPtrCast, ArrayPtrCast, FnPtrAddrCast, FnPtrPtrCast, PtrAddrCast, PtrPtrCast}; - matches!( - check_cast(cx, e, from_ty, to_ty), - Some(PtrPtrCast | PtrAddrCast | AddrPtrCast | ArrayPtrCast | FnPtrPtrCast | FnPtrAddrCast) - ) -} - /// If a cast from `from_ty` to `to_ty` is valid, returns an Ok containing the kind of /// the cast. In certain cases, including some invalid casts from array references /// to pointers, this may cause additional errors to be emitted and/or ICE error /// messages. This function will panic if that occurs. -fn check_cast<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, from_ty: Ty<'tcx>, to_ty: Ty<'tcx>) -> Option { +pub(super) fn check_cast<'tcx>( + cx: &LateContext<'tcx>, + e: &'tcx Expr<'_>, + from_ty: Ty<'tcx>, + to_ty: Ty<'tcx>, +) -> Option { let hir_id = e.hir_id; let local_def_id = hir_id.owner.def_id; diff --git a/clippy_lints/src/undocumented_unsafe_blocks.rs b/clippy_lints/src/undocumented_unsafe_blocks.rs index 2e1b6d8d4ea7f..2920684ade33c 100644 --- a/clippy_lints/src/undocumented_unsafe_blocks.rs +++ b/clippy_lints/src/undocumented_unsafe_blocks.rs @@ -263,6 +263,18 @@ fn expr_has_unnecessary_safety_comment<'tcx>( expr: &'tcx hir::Expr<'tcx>, comment_pos: BytePos, ) -> Option { + if cx.tcx.hir().parent_iter(expr.hir_id).any(|(_, ref node)| { + matches!( + node, + Node::Block(&Block { + rules: BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided), + .. + }), + ) + }) { + return None; + } + // this should roughly be the reverse of `block_parents_have_safety_comment` if for_each_expr_with_closures(cx, expr, |expr| match expr.kind { hir::ExprKind::Block( diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index c1589c771c462..f48be27592b7e 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -219,7 +219,8 @@ define_Conf! { /// /// #### Noteworthy /// - /// A type, say `SomeType`, listed in this configuration has the same behavior of `["SomeType" , "*"], ["*", "SomeType"]` in `arithmetic_side_effects_allowed_binary`. + /// A type, say `SomeType`, listed in this configuration has the same behavior of + /// `["SomeType" , "*"], ["*", "SomeType"]` in `arithmetic_side_effects_allowed_binary`. (arithmetic_side_effects_allowed: rustc_data_structures::fx::FxHashSet = <_>::default()), /// Lint: ARITHMETIC_SIDE_EFFECTS. /// diff --git a/clippy_lints/src/utils/internal_lints/metadata_collector.rs b/clippy_lints/src/utils/internal_lints/metadata_collector.rs index c4d8c28f06061..b1b5164ffb3ef 100644 --- a/clippy_lints/src/utils/internal_lints/metadata_collector.rs +++ b/clippy_lints/src/utils/internal_lints/metadata_collector.rs @@ -14,6 +14,7 @@ use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::{match_type, walk_ptrs_ty_depth}; use clippy_utils::{last_path_segment, match_def_path, match_function_call, match_path, paths}; use if_chain::if_chain; +use itertools::Itertools; use rustc_ast as ast; use rustc_data_structures::fx::FxHashMap; use rustc_hir::{ @@ -34,8 +35,10 @@ use std::path::Path; use std::path::PathBuf; use std::process::Command; -/// This is the output file of the lint collector. -const OUTPUT_FILE: &str = "../util/gh-pages/lints.json"; +/// This is the json output file of the lint collector. +const JSON_OUTPUT_FILE: &str = "../util/gh-pages/lints.json"; +/// This is the markdown output file of the lint collector. +const MARKDOWN_OUTPUT_FILE: &str = "../book/src/lint_configuration.md"; /// These lints are excluded from the export. const BLACK_LISTED_LINTS: &[&str] = &["lint_author", "dump_hir", "internal_metadata_collector"]; /// These groups will be ignored by the lint group matcher. This is useful for collections like @@ -176,6 +179,23 @@ This lint has the following configuration variables: ) }) } + + fn configs_to_markdown(&self, map_fn: fn(&ClippyConfiguration) -> String) -> String { + self.config + .iter() + .filter(|config| config.deprecation_reason.is_none()) + .filter(|config| !config.lints.is_empty()) + .map(map_fn) + .join("\n") + } + + fn get_markdown_docs(&self) -> String { + format!( + "## Lint Configuration Options\n|
Option
| Default Value |\n|--|--|\n{}\n\n{}\n", + self.configs_to_markdown(ClippyConfiguration::to_markdown_table_entry), + self.configs_to_markdown(ClippyConfiguration::to_markdown_paragraph), + ) + } } impl Drop for MetadataCollector { @@ -199,12 +219,37 @@ impl Drop for MetadataCollector { collect_renames(&mut lints); - // Outputting - if Path::new(OUTPUT_FILE).exists() { - fs::remove_file(OUTPUT_FILE).unwrap(); + // Outputting json + if Path::new(JSON_OUTPUT_FILE).exists() { + fs::remove_file(JSON_OUTPUT_FILE).unwrap(); } - let mut file = OpenOptions::new().write(true).create(true).open(OUTPUT_FILE).unwrap(); + let mut file = OpenOptions::new() + .write(true) + .create(true) + .open(JSON_OUTPUT_FILE) + .unwrap(); writeln!(file, "{}", serde_json::to_string_pretty(&lints).unwrap()).unwrap(); + + // Outputting markdown + if Path::new(MARKDOWN_OUTPUT_FILE).exists() { + fs::remove_file(MARKDOWN_OUTPUT_FILE).unwrap(); + } + let mut file = OpenOptions::new() + .write(true) + .create(true) + .open(MARKDOWN_OUTPUT_FILE) + .unwrap(); + writeln!( + file, + " + +{}", + self.get_markdown_docs(), + ) + .unwrap(); } } @@ -505,6 +550,28 @@ impl ClippyConfiguration { deprecation_reason, } } + + fn to_markdown_paragraph(&self) -> String { + format!( + "### {}\n{}\n\n**Default Value:** `{}` (`{}`)\n\n{}\n\n", + self.name, + self.doc + .lines() + .map(|line| line.strip_prefix(" ").unwrap_or(line)) + .join("\n"), + self.default, + self.config_type, + self.lints + .iter() + .map(|name| name.to_string().split_whitespace().next().unwrap().to_string()) + .map(|name| format!("* [{name}](https://rust-lang.github.io/rust-clippy/master/index.html#{name})")) + .join("\n"), + ) + } + + fn to_markdown_table_entry(&self) -> String { + format!("| [{}](#{}) | `{}` |", self.name, self.name, self.default) + } } fn collect_configs() -> Vec { diff --git a/clippy_utils/Cargo.toml b/clippy_utils/Cargo.toml index ac6a566b9cd3a..173469f6cdc7d 100644 --- a/clippy_utils/Cargo.toml +++ b/clippy_utils/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "clippy_utils" -version = "0.1.68" +version = "0.1.69" edition = "2021" publish = false diff --git a/clippy_utils/src/lib.rs b/clippy_utils/src/lib.rs index 23791ebe92254..e2965146cfe6d 100644 --- a/clippy_utils/src/lib.rs +++ b/clippy_utils/src/lib.rs @@ -2491,6 +2491,10 @@ pub fn span_extract_comment(sm: &SourceMap, span: Span) -> String { comments_buf.join("\n") } +pub fn span_find_starting_semi(sm: &SourceMap, span: Span) -> Span { + sm.span_take_while(span, |&ch| ch == ' ' || ch == ';') +} + macro_rules! op_utils { ($($name:ident $assign:ident)*) => { /// Binary operation traits like `LangItem::Add` diff --git a/clippy_utils/src/ty.rs b/clippy_utils/src/ty.rs index 1d5d55d5b54cf..c48d27b05f045 100644 --- a/clippy_utils/src/ty.rs +++ b/clippy_utils/src/ty.rs @@ -647,9 +647,12 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option Some(ExprFnSig::Sig(cx.tcx.fn_sig(id).subst(cx.tcx, subs), Some(id))), - ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => { - sig_from_bounds(cx, ty, cx.tcx.item_bounds(def_id).subst(cx.tcx, substs), cx.tcx.opt_parent(def_id)) - }, + ty::Alias(ty::Opaque, ty::AliasTy { def_id, substs, .. }) => sig_from_bounds( + cx, + ty, + cx.tcx.item_bounds(def_id).subst(cx.tcx, substs), + cx.tcx.opt_parent(def_id), + ), ty::FnPtr(sig) => Some(ExprFnSig::Sig(sig, None)), ty::Dynamic(bounds, _, _) => { let lang_items = cx.tcx.lang_items(); diff --git a/declare_clippy_lint/Cargo.toml b/declare_clippy_lint/Cargo.toml index c01e1062cb544..80eee368178e1 100644 --- a/declare_clippy_lint/Cargo.toml +++ b/declare_clippy_lint/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "declare_clippy_lint" -version = "0.1.68" +version = "0.1.69" edition = "2021" publish = false diff --git a/rust-toolchain b/rust-toolchain index 40a6f47095ec2..4e7fc565a322a 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-01-12" +channel = "nightly-2023-01-27" components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] diff --git a/src/main.rs b/src/main.rs index 7a78b32620d0b..82147eba33f07 100644 --- a/src/main.rs +++ b/src/main.rs @@ -28,7 +28,7 @@ with: -D --deny OPT Set lint denied -F --forbid OPT Set lint forbidden -You can use tool lints to allow or deny lints from your code, eg.: +You can use tool lints to allow or deny lints from your code, e.g.: #[allow(clippy::needless_lifetimes)] "#; diff --git a/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr b/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr index 3ca45404e44bb..2a240cc249b0c 100644 --- a/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr +++ b/tests/ui-internal/unnecessary_def_path_hardcoded_path.stderr @@ -7,14 +7,6 @@ LL | const DEREF_TRAIT: [&str; 4] = ["core", "ops", "deref", "Deref"]; = help: convert all references to use `sym::Deref` = note: `-D clippy::unnecessary-def-path` implied by `-D warnings` -error: hardcoded path to a language item - --> $DIR/unnecessary_def_path_hardcoded_path.rs:11:40 - | -LL | const DEREF_MUT_TRAIT: [&str; 4] = ["core", "ops", "deref", "DerefMut"]; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | - = help: convert all references to use `LangItem::DerefMut` - error: hardcoded path to a diagnostic item --> $DIR/unnecessary_def_path_hardcoded_path.rs:12:43 | @@ -23,5 +15,13 @@ LL | const DEREF_TRAIT_METHOD: [&str; 5] = ["core", "ops", "deref", "Deref", | = help: convert all references to use `sym::deref_method` +error: hardcoded path to a language item + --> $DIR/unnecessary_def_path_hardcoded_path.rs:11:40 + | +LL | const DEREF_MUT_TRAIT: [&str; 4] = ["core", "ops", "deref", "DerefMut"]; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: convert all references to use `LangItem::DerefMut` + error: aborting due to 3 previous errors diff --git a/tests/ui/bool_assert_comparison.fixed b/tests/ui/bool_assert_comparison.fixed new file mode 100644 index 0000000000000..95f35a61bb289 --- /dev/null +++ b/tests/ui/bool_assert_comparison.fixed @@ -0,0 +1,161 @@ +// run-rustfix + +#![allow(unused, clippy::assertions_on_constants)] +#![warn(clippy::bool_assert_comparison)] + +use std::ops::Not; + +macro_rules! a { + () => { + true + }; +} +macro_rules! b { + () => { + true + }; +} + +// Implements the Not trait but with an output type +// that's not bool. Should not suggest a rewrite +#[derive(Debug, Clone, Copy)] +enum ImplNotTraitWithoutBool { + VariantX(bool), + VariantY(u32), +} + +impl PartialEq for ImplNotTraitWithoutBool { + fn eq(&self, other: &bool) -> bool { + match *self { + ImplNotTraitWithoutBool::VariantX(b) => b == *other, + _ => false, + } + } +} + +impl Not for ImplNotTraitWithoutBool { + type Output = Self; + + fn not(self) -> Self::Output { + match self { + ImplNotTraitWithoutBool::VariantX(b) => ImplNotTraitWithoutBool::VariantX(!b), + ImplNotTraitWithoutBool::VariantY(0) => ImplNotTraitWithoutBool::VariantY(1), + ImplNotTraitWithoutBool::VariantY(_) => ImplNotTraitWithoutBool::VariantY(0), + } + } +} + +// This type implements the Not trait with an Output of +// type bool. Using assert!(..) must be suggested +#[derive(Debug, Clone, Copy)] +struct ImplNotTraitWithBool; + +impl PartialEq for ImplNotTraitWithBool { + fn eq(&self, other: &bool) -> bool { + false + } +} + +impl Not for ImplNotTraitWithBool { + type Output = bool; + + fn not(self) -> Self::Output { + true + } +} + +#[derive(Debug)] +struct NonCopy; + +impl PartialEq for NonCopy { + fn eq(&self, other: &bool) -> bool { + false + } +} + +impl Not for NonCopy { + type Output = bool; + + fn not(self) -> Self::Output { + true + } +} + +fn main() { + let a = ImplNotTraitWithoutBool::VariantX(true); + let b = ImplNotTraitWithBool; + + assert_eq!("a".len(), 1); + assert!("a".is_empty()); + assert!("".is_empty()); + assert!("".is_empty()); + assert_eq!(a!(), b!()); + assert_eq!(a!(), "".is_empty()); + assert_eq!("".is_empty(), b!()); + assert_eq!(a, true); + assert!(b); + + assert_ne!("a".len(), 1); + assert!("a".is_empty()); + assert!("".is_empty()); + assert!("".is_empty()); + assert_ne!(a!(), b!()); + assert_ne!(a!(), "".is_empty()); + assert_ne!("".is_empty(), b!()); + assert_ne!(a, true); + assert!(b); + + debug_assert_eq!("a".len(), 1); + debug_assert!("a".is_empty()); + debug_assert!("".is_empty()); + debug_assert!("".is_empty()); + debug_assert_eq!(a!(), b!()); + debug_assert_eq!(a!(), "".is_empty()); + debug_assert_eq!("".is_empty(), b!()); + debug_assert_eq!(a, true); + debug_assert!(b); + + debug_assert_ne!("a".len(), 1); + debug_assert!("a".is_empty()); + debug_assert!("".is_empty()); + debug_assert!("".is_empty()); + debug_assert_ne!(a!(), b!()); + debug_assert_ne!(a!(), "".is_empty()); + debug_assert_ne!("".is_empty(), b!()); + debug_assert_ne!(a, true); + debug_assert!(b); + + // assert with error messages + assert_eq!("a".len(), 1, "tadam {}", 1); + assert_eq!("a".len(), 1, "tadam {}", true); + assert!("a".is_empty(), "tadam {}", 1); + assert!("a".is_empty(), "tadam {}", true); + assert!("a".is_empty(), "tadam {}", true); + assert_eq!(a, true, "tadam {}", false); + + debug_assert_eq!("a".len(), 1, "tadam {}", 1); + debug_assert_eq!("a".len(), 1, "tadam {}", true); + debug_assert!("a".is_empty(), "tadam {}", 1); + debug_assert!("a".is_empty(), "tadam {}", true); + debug_assert!("a".is_empty(), "tadam {}", true); + debug_assert_eq!(a, true, "tadam {}", false); + + assert!(a!()); + assert!(b!()); + + use debug_assert_eq as renamed; + renamed!(a, true); + debug_assert!(b); + + let non_copy = NonCopy; + assert_eq!(non_copy, true); + // changing the above to `assert!(non_copy)` would cause a `borrow of moved value` + println!("{non_copy:?}"); + + macro_rules! in_macro { + ($v:expr) => {{ + assert_eq!($v, true); + }}; + } + in_macro!(a); +} diff --git a/tests/ui/bool_assert_comparison.rs b/tests/ui/bool_assert_comparison.rs index ec4d6f3ff8401..88e7560b4f984 100644 --- a/tests/ui/bool_assert_comparison.rs +++ b/tests/ui/bool_assert_comparison.rs @@ -1,3 +1,6 @@ +// run-rustfix + +#![allow(unused, clippy::assertions_on_constants)] #![warn(clippy::bool_assert_comparison)] use std::ops::Not; @@ -15,7 +18,7 @@ macro_rules! b { // Implements the Not trait but with an output type // that's not bool. Should not suggest a rewrite -#[derive(Debug)] +#[derive(Debug, Clone, Copy)] enum ImplNotTraitWithoutBool { VariantX(bool), VariantY(u32), @@ -44,7 +47,7 @@ impl Not for ImplNotTraitWithoutBool { // This type implements the Not trait with an Output of // type bool. Using assert!(..) must be suggested -#[derive(Debug)] +#[derive(Debug, Clone, Copy)] struct ImplNotTraitWithBool; impl PartialEq for ImplNotTraitWithBool { @@ -61,6 +64,23 @@ impl Not for ImplNotTraitWithBool { } } +#[derive(Debug)] +struct NonCopy; + +impl PartialEq for NonCopy { + fn eq(&self, other: &bool) -> bool { + false + } +} + +impl Not for NonCopy { + type Output = bool; + + fn not(self) -> Self::Output { + true + } +} + fn main() { let a = ImplNotTraitWithoutBool::VariantX(true); let b = ImplNotTraitWithBool; @@ -119,4 +139,23 @@ fn main() { debug_assert_eq!("a".is_empty(), false, "tadam {}", true); debug_assert_eq!(false, "a".is_empty(), "tadam {}", true); debug_assert_eq!(a, true, "tadam {}", false); + + assert_eq!(a!(), true); + assert_eq!(true, b!()); + + use debug_assert_eq as renamed; + renamed!(a, true); + renamed!(b, true); + + let non_copy = NonCopy; + assert_eq!(non_copy, true); + // changing the above to `assert!(non_copy)` would cause a `borrow of moved value` + println!("{non_copy:?}"); + + macro_rules! in_macro { + ($v:expr) => {{ + assert_eq!($v, true); + }}; + } + in_macro!(a); } diff --git a/tests/ui/bool_assert_comparison.stderr b/tests/ui/bool_assert_comparison.stderr index 377d51be4cde7..3d9f8573e617c 100644 --- a/tests/ui/bool_assert_comparison.stderr +++ b/tests/ui/bool_assert_comparison.stderr @@ -1,136 +1,303 @@ error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:69:5 + --> $DIR/bool_assert_comparison.rs:89:5 | LL | assert_eq!("a".is_empty(), false); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::bool-assert-comparison` implied by `-D warnings` +help: replace it with `assert!(..)` + | +LL - assert_eq!("a".is_empty(), false); +LL + assert!("a".is_empty()); + | error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:70:5 + --> $DIR/bool_assert_comparison.rs:90:5 | LL | assert_eq!("".is_empty(), true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!("".is_empty(), true); +LL + assert!("".is_empty()); + | error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:71:5 + --> $DIR/bool_assert_comparison.rs:91:5 | LL | assert_eq!(true, "".is_empty()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!(true, "".is_empty()); +LL + assert!("".is_empty()); + | error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:76:5 + --> $DIR/bool_assert_comparison.rs:96:5 | LL | assert_eq!(b, true); - | ^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!(b, true); +LL + assert!(b); + | error: used `assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:79:5 + --> $DIR/bool_assert_comparison.rs:99:5 | LL | assert_ne!("a".is_empty(), false); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_ne!("a".is_empty(), false); +LL + assert!("a".is_empty()); + | error: used `assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:80:5 + --> $DIR/bool_assert_comparison.rs:100:5 | LL | assert_ne!("".is_empty(), true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_ne!("".is_empty(), true); +LL + assert!("".is_empty()); + | error: used `assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:81:5 + --> $DIR/bool_assert_comparison.rs:101:5 | LL | assert_ne!(true, "".is_empty()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_ne!(true, "".is_empty()); +LL + assert!("".is_empty()); + | error: used `assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:86:5 + --> $DIR/bool_assert_comparison.rs:106:5 | LL | assert_ne!(b, true); - | ^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_ne!(b, true); +LL + assert!(b); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:89:5 + --> $DIR/bool_assert_comparison.rs:109:5 | LL | debug_assert_eq!("a".is_empty(), false); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!("a".is_empty(), false); +LL + debug_assert!("a".is_empty()); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:90:5 + --> $DIR/bool_assert_comparison.rs:110:5 | LL | debug_assert_eq!("".is_empty(), true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!("".is_empty(), true); +LL + debug_assert!("".is_empty()); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:91:5 + --> $DIR/bool_assert_comparison.rs:111:5 | LL | debug_assert_eq!(true, "".is_empty()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!(true, "".is_empty()); +LL + debug_assert!("".is_empty()); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:96:5 + --> $DIR/bool_assert_comparison.rs:116:5 | LL | debug_assert_eq!(b, true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!(b, true); +LL + debug_assert!(b); + | error: used `debug_assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:99:5 + --> $DIR/bool_assert_comparison.rs:119:5 | LL | debug_assert_ne!("a".is_empty(), false); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_ne!("a".is_empty(), false); +LL + debug_assert!("a".is_empty()); + | error: used `debug_assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:100:5 + --> $DIR/bool_assert_comparison.rs:120:5 | LL | debug_assert_ne!("".is_empty(), true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_ne!("".is_empty(), true); +LL + debug_assert!("".is_empty()); + | error: used `debug_assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:101:5 + --> $DIR/bool_assert_comparison.rs:121:5 | LL | debug_assert_ne!(true, "".is_empty()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_ne!(true, "".is_empty()); +LL + debug_assert!("".is_empty()); + | error: used `debug_assert_ne!` with a literal bool - --> $DIR/bool_assert_comparison.rs:106:5 + --> $DIR/bool_assert_comparison.rs:126:5 | LL | debug_assert_ne!(b, true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_ne!(b, true); +LL + debug_assert!(b); + | error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:111:5 + --> $DIR/bool_assert_comparison.rs:131:5 | LL | assert_eq!("a".is_empty(), false, "tadam {}", 1); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!("a".is_empty(), false, "tadam {}", 1); +LL + assert!("a".is_empty(), "tadam {}", 1); + | error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:112:5 + --> $DIR/bool_assert_comparison.rs:132:5 | LL | assert_eq!("a".is_empty(), false, "tadam {}", true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!("a".is_empty(), false, "tadam {}", true); +LL + assert!("a".is_empty(), "tadam {}", true); + | error: used `assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:113:5 + --> $DIR/bool_assert_comparison.rs:133:5 | LL | assert_eq!(false, "a".is_empty(), "tadam {}", true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!(false, "a".is_empty(), "tadam {}", true); +LL + assert!("a".is_empty(), "tadam {}", true); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:118:5 + --> $DIR/bool_assert_comparison.rs:138:5 | LL | debug_assert_eq!("a".is_empty(), false, "tadam {}", 1); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!("a".is_empty(), false, "tadam {}", 1); +LL + debug_assert!("a".is_empty(), "tadam {}", 1); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:119:5 + --> $DIR/bool_assert_comparison.rs:139:5 | LL | debug_assert_eq!("a".is_empty(), false, "tadam {}", true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!("a".is_empty(), false, "tadam {}", true); +LL + debug_assert!("a".is_empty(), "tadam {}", true); + | error: used `debug_assert_eq!` with a literal bool - --> $DIR/bool_assert_comparison.rs:120:5 + --> $DIR/bool_assert_comparison.rs:140:5 | LL | debug_assert_eq!(false, "a".is_empty(), "tadam {}", true); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `debug_assert!(..)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!(false, "a".is_empty(), "tadam {}", true); +LL + debug_assert!("a".is_empty(), "tadam {}", true); + | + +error: used `assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:143:5 + | +LL | assert_eq!(a!(), true); + | ^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!(a!(), true); +LL + assert!(a!()); + | + +error: used `assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:144:5 + | +LL | assert_eq!(true, b!()); + | ^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!(true, b!()); +LL + assert!(b!()); + | + +error: used `debug_assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:148:5 + | +LL | renamed!(b, true); + | ^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - renamed!(b, true); +LL + debug_assert!(b); + | -error: aborting due to 22 previous errors +error: aborting due to 25 previous errors diff --git a/tests/ui/cast.rs b/tests/ui/cast.rs index e6031e9adaeb6..8b2673c2a7fdb 100644 --- a/tests/ui/cast.rs +++ b/tests/ui/cast.rs @@ -28,6 +28,7 @@ fn main() { 1i32 as u8; 1f64 as isize; 1f64 as usize; + 1f32 as u32 as u16; // Test clippy::cast_possible_wrap 1u8 as i8; 1u16 as i16; diff --git a/tests/ui/cast.stderr b/tests/ui/cast.stderr index 0c63b4af30865..4af1de9aa38d3 100644 --- a/tests/ui/cast.stderr +++ b/tests/ui/cast.stderr @@ -42,13 +42,24 @@ error: casting `f32` to `i32` may truncate the value LL | 1f32 as i32; | ^^^^^^^^^^^ | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... = note: `-D clippy::cast-possible-truncation` implied by `-D warnings` +help: ... or use `try_from` and handle the error accordingly + | +LL | i32::try_from(1f32); + | ~~~~~~~~~~~~~~~~~~~ error: casting `f32` to `u32` may truncate the value --> $DIR/cast.rs:25:5 | LL | 1f32 as u32; | ^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u32::try_from(1f32); + | ~~~~~~~~~~~~~~~~~~~ error: casting `f32` to `u32` may lose the sign of the value --> $DIR/cast.rs:25:5 @@ -63,30 +74,60 @@ error: casting `f64` to `f32` may truncate the value | LL | 1f64 as f32; | ^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | f32::try_from(1f64); + | ~~~~~~~~~~~~~~~~~~~ error: casting `i32` to `i8` may truncate the value --> $DIR/cast.rs:27:5 | LL | 1i32 as i8; | ^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | i8::try_from(1i32); + | ~~~~~~~~~~~~~~~~~~ error: casting `i32` to `u8` may truncate the value --> $DIR/cast.rs:28:5 | LL | 1i32 as u8; | ^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u8::try_from(1i32); + | ~~~~~~~~~~~~~~~~~~ error: casting `f64` to `isize` may truncate the value --> $DIR/cast.rs:29:5 | LL | 1f64 as isize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | isize::try_from(1f64); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `f64` to `usize` may truncate the value --> $DIR/cast.rs:30:5 | LL | 1f64 as usize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | usize::try_from(1f64); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `f64` to `usize` may lose the sign of the value --> $DIR/cast.rs:30:5 @@ -94,8 +135,38 @@ error: casting `f64` to `usize` may lose the sign of the value LL | 1f64 as usize; | ^^^^^^^^^^^^^ +error: casting `u32` to `u16` may truncate the value + --> $DIR/cast.rs:31:5 + | +LL | 1f32 as u32 as u16; + | ^^^^^^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u16::try_from(1f32 as u32); + | ~~~~~~~~~~~~~~~~~~~~~~~~~~ + +error: casting `f32` to `u32` may truncate the value + --> $DIR/cast.rs:31:5 + | +LL | 1f32 as u32 as u16; + | ^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u32::try_from(1f32) as u16; + | ~~~~~~~~~~~~~~~~~~~ + +error: casting `f32` to `u32` may lose the sign of the value + --> $DIR/cast.rs:31:5 + | +LL | 1f32 as u32 as u16; + | ^^^^^^^^^^^ + error: casting `u8` to `i8` may wrap around the value - --> $DIR/cast.rs:32:5 + --> $DIR/cast.rs:33:5 | LL | 1u8 as i8; | ^^^^^^^^^ @@ -103,61 +174,79 @@ LL | 1u8 as i8; = note: `-D clippy::cast-possible-wrap` implied by `-D warnings` error: casting `u16` to `i16` may wrap around the value - --> $DIR/cast.rs:33:5 + --> $DIR/cast.rs:34:5 | LL | 1u16 as i16; | ^^^^^^^^^^^ error: casting `u32` to `i32` may wrap around the value - --> $DIR/cast.rs:34:5 + --> $DIR/cast.rs:35:5 | LL | 1u32 as i32; | ^^^^^^^^^^^ error: casting `u64` to `i64` may wrap around the value - --> $DIR/cast.rs:35:5 + --> $DIR/cast.rs:36:5 | LL | 1u64 as i64; | ^^^^^^^^^^^ error: casting `usize` to `isize` may wrap around the value - --> $DIR/cast.rs:36:5 + --> $DIR/cast.rs:37:5 | LL | 1usize as isize; | ^^^^^^^^^^^^^^^ error: casting `i32` to `u32` may lose the sign of the value - --> $DIR/cast.rs:39:5 + --> $DIR/cast.rs:40:5 | LL | -1i32 as u32; | ^^^^^^^^^^^^ error: casting `isize` to `usize` may lose the sign of the value - --> $DIR/cast.rs:41:5 + --> $DIR/cast.rs:42:5 | LL | -1isize as usize; | ^^^^^^^^^^^^^^^^ error: casting `i64` to `i8` may truncate the value - --> $DIR/cast.rs:108:5 + --> $DIR/cast.rs:109:5 | LL | (-99999999999i64).min(1) as i8; // should be linted because signed | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | i8::try_from((-99999999999i64).min(1)); // should be linted because signed + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ error: casting `u64` to `u8` may truncate the value - --> $DIR/cast.rs:120:5 + --> $DIR/cast.rs:121:5 | LL | 999999u64.clamp(0, 256) as u8; // should still be linted | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u8::try_from(999999u64.clamp(0, 256)); // should still be linted + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ error: casting `main::E2` to `u8` may truncate the value - --> $DIR/cast.rs:141:21 + --> $DIR/cast.rs:142:21 | LL | let _ = self as u8; | ^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let _ = u8::try_from(self); + | ~~~~~~~~~~~~~~~~~~ error: casting `main::E2::B` to `u8` will truncate the value - --> $DIR/cast.rs:142:21 + --> $DIR/cast.rs:143:21 | LL | let _ = Self::B as u8; | ^^^^^^^^^^^^^ @@ -165,46 +254,82 @@ LL | let _ = Self::B as u8; = note: `-D clippy::cast-enum-truncation` implied by `-D warnings` error: casting `main::E5` to `i8` may truncate the value - --> $DIR/cast.rs:178:21 + --> $DIR/cast.rs:179:21 | LL | let _ = self as i8; | ^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let _ = i8::try_from(self); + | ~~~~~~~~~~~~~~~~~~ error: casting `main::E5::A` to `i8` will truncate the value - --> $DIR/cast.rs:179:21 + --> $DIR/cast.rs:180:21 | LL | let _ = Self::A as i8; | ^^^^^^^^^^^^^ error: casting `main::E6` to `i16` may truncate the value - --> $DIR/cast.rs:193:21 + --> $DIR/cast.rs:194:21 | LL | let _ = self as i16; | ^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let _ = i16::try_from(self); + | ~~~~~~~~~~~~~~~~~~~ error: casting `main::E7` to `usize` may truncate the value on targets with 32-bit wide pointers - --> $DIR/cast.rs:208:21 + --> $DIR/cast.rs:209:21 | LL | let _ = self as usize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let _ = usize::try_from(self); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `main::E10` to `u16` may truncate the value - --> $DIR/cast.rs:249:21 + --> $DIR/cast.rs:250:21 | LL | let _ = self as u16; | ^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let _ = u16::try_from(self); + | ~~~~~~~~~~~~~~~~~~~ error: casting `u32` to `u8` may truncate the value - --> $DIR/cast.rs:257:13 + --> $DIR/cast.rs:258:13 | LL | let c = (q >> 16) as u8; | ^^^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let c = u8::try_from((q >> 16)); + | ~~~~~~~~~~~~~~~~~~~~~~~ error: casting `u32` to `u8` may truncate the value - --> $DIR/cast.rs:260:13 + --> $DIR/cast.rs:261:13 | LL | let c = (q / 1000) as u8; | ^^^^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | let c = u8::try_from((q / 1000)); + | ~~~~~~~~~~~~~~~~~~~~~~~~ -error: aborting due to 33 previous errors +error: aborting due to 36 previous errors diff --git a/tests/ui/cast_size.stderr b/tests/ui/cast_size.stderr index 95552f2e28539..8acf26049f4d1 100644 --- a/tests/ui/cast_size.stderr +++ b/tests/ui/cast_size.stderr @@ -4,7 +4,12 @@ error: casting `isize` to `i8` may truncate the value LL | 1isize as i8; | ^^^^^^^^^^^^ | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... = note: `-D clippy::cast-possible-truncation` implied by `-D warnings` +help: ... or use `try_from` and handle the error accordingly + | +LL | i8::try_from(1isize); + | ~~~~~~~~~~~~~~~~~~~~ error: casting `isize` to `f64` causes a loss of precision on targets with 64-bit wide pointers (`isize` is 64 bits wide, but `f64`'s mantissa is only 52 bits wide) --> $DIR/cast_size.rs:15:5 @@ -37,24 +42,48 @@ error: casting `isize` to `i32` may truncate the value on targets with 64-bit wi | LL | 1isize as i32; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | i32::try_from(1isize); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `isize` to `u32` may truncate the value on targets with 64-bit wide pointers --> $DIR/cast_size.rs:20:5 | LL | 1isize as u32; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u32::try_from(1isize); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `usize` to `u32` may truncate the value on targets with 64-bit wide pointers --> $DIR/cast_size.rs:21:5 | LL | 1usize as u32; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | u32::try_from(1usize); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `usize` to `i32` may truncate the value on targets with 64-bit wide pointers --> $DIR/cast_size.rs:22:5 | LL | 1usize as i32; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | i32::try_from(1usize); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `usize` to `i32` may wrap around the value on targets with 32-bit wide pointers --> $DIR/cast_size.rs:22:5 @@ -69,18 +98,36 @@ error: casting `i64` to `isize` may truncate the value on targets with 32-bit wi | LL | 1i64 as isize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | isize::try_from(1i64); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `i64` to `usize` may truncate the value on targets with 32-bit wide pointers --> $DIR/cast_size.rs:25:5 | LL | 1i64 as usize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | usize::try_from(1i64); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `u64` to `isize` may truncate the value on targets with 32-bit wide pointers --> $DIR/cast_size.rs:26:5 | LL | 1u64 as isize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | isize::try_from(1u64); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `u64` to `isize` may wrap around the value on targets with 64-bit wide pointers --> $DIR/cast_size.rs:26:5 @@ -93,6 +140,12 @@ error: casting `u64` to `usize` may truncate the value on targets with 32-bit wi | LL | 1u64 as usize; | ^^^^^^^^^^^^^ + | + = help: if this is intentional allow the lint with `#[allow(clippy::cast_precision_loss)]` ... +help: ... or use `try_from` and handle the error accordingly + | +LL | usize::try_from(1u64); + | ~~~~~~~~~~~~~~~~~~~~~ error: casting `u32` to `isize` may wrap around the value on targets with 32-bit wide pointers --> $DIR/cast_size.rs:28:5 diff --git a/tests/ui/module_name_repetitions.stderr b/tests/ui/module_name_repetitions.stderr index 3f343a3e43018..277801194a1d5 100644 --- a/tests/ui/module_name_repetitions.stderr +++ b/tests/ui/module_name_repetitions.stderr @@ -1,34 +1,34 @@ error: item name starts with its containing module's name - --> $DIR/module_name_repetitions.rs:8:5 + --> $DIR/module_name_repetitions.rs:8:12 | LL | pub fn foo_bar() {} - | ^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^ | = note: `-D clippy::module-name-repetitions` implied by `-D warnings` error: item name ends with its containing module's name - --> $DIR/module_name_repetitions.rs:9:5 + --> $DIR/module_name_repetitions.rs:9:12 | LL | pub fn bar_foo() {} - | ^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^ error: item name starts with its containing module's name - --> $DIR/module_name_repetitions.rs:10:5 + --> $DIR/module_name_repetitions.rs:10:16 | LL | pub struct FooCake; - | ^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^ error: item name ends with its containing module's name - --> $DIR/module_name_repetitions.rs:11:5 + --> $DIR/module_name_repetitions.rs:11:14 | LL | pub enum CakeFoo {} - | ^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^ error: item name starts with its containing module's name - --> $DIR/module_name_repetitions.rs:12:5 + --> $DIR/module_name_repetitions.rs:12:16 | LL | pub struct Foo7Bar; - | ^^^^^^^^^^^^^^^^^^^ + | ^^^^^^^ error: aborting due to 5 previous errors diff --git a/tests/ui/multiple_unsafe_ops_per_block.rs b/tests/ui/multiple_unsafe_ops_per_block.rs new file mode 100644 index 0000000000000..41263535df673 --- /dev/null +++ b/tests/ui/multiple_unsafe_ops_per_block.rs @@ -0,0 +1,110 @@ +#![allow(unused)] +#![allow(deref_nullptr)] +#![allow(clippy::unnecessary_operation)] +#![allow(clippy::drop_copy)] +#![warn(clippy::multiple_unsafe_ops_per_block)] + +use core::arch::asm; + +fn raw_ptr() -> *const () { + core::ptr::null() +} + +unsafe fn not_very_safe() {} + +struct Sample; + +impl Sample { + unsafe fn not_very_safe(&self) {} +} + +#[allow(non_upper_case_globals)] +const sample: Sample = Sample; + +union U { + i: i32, + u: u32, +} + +static mut STATIC: i32 = 0; + +fn test1() { + unsafe { + STATIC += 1; + not_very_safe(); + } +} + +fn test2() { + let u = U { i: 0 }; + + unsafe { + drop(u.u); + *raw_ptr(); + } +} + +fn test3() { + unsafe { + asm!("nop"); + sample.not_very_safe(); + STATIC = 0; + } +} + +fn test_all() { + let u = U { i: 0 }; + unsafe { + drop(u.u); + drop(STATIC); + sample.not_very_safe(); + not_very_safe(); + *raw_ptr(); + asm!("nop"); + } +} + +// no lint +fn correct1() { + unsafe { + STATIC += 1; + } +} + +// no lint +fn correct2() { + unsafe { + STATIC += 1; + } + + unsafe { + *raw_ptr(); + } +} + +// no lint +fn correct3() { + let u = U { u: 0 }; + + unsafe { + not_very_safe(); + } + + unsafe { + drop(u.i); + } +} + +// tests from the issue (https://github.com/rust-lang/rust-clippy/issues/10064) + +unsafe fn read_char_bad(ptr: *const u8) -> char { + unsafe { char::from_u32_unchecked(*ptr.cast::()) } +} + +// no lint +unsafe fn read_char_good(ptr: *const u8) -> char { + let int_value = unsafe { *ptr.cast::() }; + unsafe { core::char::from_u32_unchecked(int_value) } +} + +fn main() {} diff --git a/tests/ui/multiple_unsafe_ops_per_block.stderr b/tests/ui/multiple_unsafe_ops_per_block.stderr new file mode 100644 index 0000000000000..f6b8341795d23 --- /dev/null +++ b/tests/ui/multiple_unsafe_ops_per_block.stderr @@ -0,0 +1,129 @@ +error: this `unsafe` block contains 2 unsafe operations, expected only one + --> $DIR/multiple_unsafe_ops_per_block.rs:32:5 + | +LL | / unsafe { +LL | | STATIC += 1; +LL | | not_very_safe(); +LL | | } + | |_____^ + | +note: modification of a mutable static occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:33:9 + | +LL | STATIC += 1; + | ^^^^^^^^^^^ +note: unsafe function call occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:34:9 + | +LL | not_very_safe(); + | ^^^^^^^^^^^^^^^ + = note: `-D clippy::multiple-unsafe-ops-per-block` implied by `-D warnings` + +error: this `unsafe` block contains 2 unsafe operations, expected only one + --> $DIR/multiple_unsafe_ops_per_block.rs:41:5 + | +LL | / unsafe { +LL | | drop(u.u); +LL | | *raw_ptr(); +LL | | } + | |_____^ + | +note: union field access occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:42:14 + | +LL | drop(u.u); + | ^^^ +note: raw pointer dereference occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:43:9 + | +LL | *raw_ptr(); + | ^^^^^^^^^^ + +error: this `unsafe` block contains 3 unsafe operations, expected only one + --> $DIR/multiple_unsafe_ops_per_block.rs:48:5 + | +LL | / unsafe { +LL | | asm!("nop"); +LL | | sample.not_very_safe(); +LL | | STATIC = 0; +LL | | } + | |_____^ + | +note: inline assembly used here + --> $DIR/multiple_unsafe_ops_per_block.rs:49:9 + | +LL | asm!("nop"); + | ^^^^^^^^^^^ +note: unsafe method call occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:50:9 + | +LL | sample.not_very_safe(); + | ^^^^^^^^^^^^^^^^^^^^^^ +note: modification of a mutable static occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:51:9 + | +LL | STATIC = 0; + | ^^^^^^^^^^ + +error: this `unsafe` block contains 6 unsafe operations, expected only one + --> $DIR/multiple_unsafe_ops_per_block.rs:57:5 + | +LL | / unsafe { +LL | | drop(u.u); +LL | | drop(STATIC); +LL | | sample.not_very_safe(); +... | +LL | | asm!("nop"); +LL | | } + | |_____^ + | +note: union field access occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:58:14 + | +LL | drop(u.u); + | ^^^ +note: access of a mutable static occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:59:14 + | +LL | drop(STATIC); + | ^^^^^^ +note: unsafe method call occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:60:9 + | +LL | sample.not_very_safe(); + | ^^^^^^^^^^^^^^^^^^^^^^ +note: unsafe function call occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:61:9 + | +LL | not_very_safe(); + | ^^^^^^^^^^^^^^^ +note: raw pointer dereference occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:62:9 + | +LL | *raw_ptr(); + | ^^^^^^^^^^ +note: inline assembly used here + --> $DIR/multiple_unsafe_ops_per_block.rs:63:9 + | +LL | asm!("nop"); + | ^^^^^^^^^^^ + +error: this `unsafe` block contains 2 unsafe operations, expected only one + --> $DIR/multiple_unsafe_ops_per_block.rs:101:5 + | +LL | unsafe { char::from_u32_unchecked(*ptr.cast::()) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +note: unsafe function call occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:101:14 + | +LL | unsafe { char::from_u32_unchecked(*ptr.cast::()) } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +note: raw pointer dereference occurs here + --> $DIR/multiple_unsafe_ops_per_block.rs:101:39 + | +LL | unsafe { char::from_u32_unchecked(*ptr.cast::()) } + | ^^^^^^^^^^^^^^^^^^ + +error: aborting due to 5 previous errors + diff --git a/tests/ui/needless_return.fixed b/tests/ui/needless_return.fixed index ab1c0e590bbc7..079e3531def1b 100644 --- a/tests/ui/needless_return.fixed +++ b/tests/ui/needless_return.fixed @@ -31,6 +31,16 @@ fn test_no_semicolon() -> bool { true } +#[rustfmt::skip] +fn test_multiple_semicolon() -> bool { + true +} + +#[rustfmt::skip] +fn test_multiple_semicolon_with_spaces() -> bool { + true +} + fn test_if_block() -> bool { if true { true diff --git a/tests/ui/needless_return.rs b/tests/ui/needless_return.rs index abed338bb9b29..c1c48284f0869 100644 --- a/tests/ui/needless_return.rs +++ b/tests/ui/needless_return.rs @@ -31,6 +31,16 @@ fn test_no_semicolon() -> bool { return true; } +#[rustfmt::skip] +fn test_multiple_semicolon() -> bool { + return true;;; +} + +#[rustfmt::skip] +fn test_multiple_semicolon_with_spaces() -> bool { + return true;; ; ; +} + fn test_if_block() -> bool { if true { return true; diff --git a/tests/ui/needless_return.stderr b/tests/ui/needless_return.stderr index 52eabf6e1370d..08b04bfe9d8bf 100644 --- a/tests/ui/needless_return.stderr +++ b/tests/ui/needless_return.stderr @@ -16,7 +16,23 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:36:9 + --> $DIR/needless_return.rs:36:5 + | +LL | return true;;; + | ^^^^^^^^^^^ + | + = help: remove `return` + +error: unneeded `return` statement + --> $DIR/needless_return.rs:41:5 + | +LL | return true;; ; ; + | ^^^^^^^^^^^ + | + = help: remove `return` + +error: unneeded `return` statement + --> $DIR/needless_return.rs:46:9 | LL | return true; | ^^^^^^^^^^^ @@ -24,7 +40,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:38:9 + --> $DIR/needless_return.rs:48:9 | LL | return false; | ^^^^^^^^^^^^ @@ -32,7 +48,7 @@ LL | return false; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:44:17 + --> $DIR/needless_return.rs:54:17 | LL | true => return false, | ^^^^^^^^^^^^ @@ -40,7 +56,7 @@ LL | true => return false, = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:46:13 + --> $DIR/needless_return.rs:56:13 | LL | return true; | ^^^^^^^^^^^ @@ -48,7 +64,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:53:9 + --> $DIR/needless_return.rs:63:9 | LL | return true; | ^^^^^^^^^^^ @@ -56,7 +72,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:55:16 + --> $DIR/needless_return.rs:65:16 | LL | let _ = || return true; | ^^^^^^^^^^^ @@ -64,7 +80,7 @@ LL | let _ = || return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:59:5 + --> $DIR/needless_return.rs:69:5 | LL | return the_answer!(); | ^^^^^^^^^^^^^^^^^^^^ @@ -72,7 +88,7 @@ LL | return the_answer!(); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:62:21 + --> $DIR/needless_return.rs:72:21 | LL | fn test_void_fun() { | _____________________^ @@ -82,7 +98,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:67:11 + --> $DIR/needless_return.rs:77:11 | LL | if b { | ___________^ @@ -92,7 +108,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:69:13 + --> $DIR/needless_return.rs:79:13 | LL | } else { | _____________^ @@ -102,7 +118,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:77:14 + --> $DIR/needless_return.rs:87:14 | LL | _ => return, | ^^^^^^ @@ -110,7 +126,7 @@ LL | _ => return, = help: replace `return` with a unit value error: unneeded `return` statement - --> $DIR/needless_return.rs:85:24 + --> $DIR/needless_return.rs:95:24 | LL | let _ = 42; | ________________________^ @@ -120,7 +136,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:88:14 + --> $DIR/needless_return.rs:98:14 | LL | _ => return, | ^^^^^^ @@ -128,7 +144,7 @@ LL | _ => return, = help: replace `return` with a unit value error: unneeded `return` statement - --> $DIR/needless_return.rs:101:9 + --> $DIR/needless_return.rs:111:9 | LL | return String::from("test"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -136,7 +152,7 @@ LL | return String::from("test"); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:103:9 + --> $DIR/needless_return.rs:113:9 | LL | return String::new(); | ^^^^^^^^^^^^^^^^^^^^ @@ -144,7 +160,7 @@ LL | return String::new(); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:125:32 + --> $DIR/needless_return.rs:135:32 | LL | bar.unwrap_or_else(|_| return) | ^^^^^^ @@ -152,7 +168,7 @@ LL | bar.unwrap_or_else(|_| return) = help: replace `return` with an empty block error: unneeded `return` statement - --> $DIR/needless_return.rs:129:21 + --> $DIR/needless_return.rs:139:21 | LL | let _ = || { | _____________________^ @@ -162,7 +178,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:132:20 + --> $DIR/needless_return.rs:142:20 | LL | let _ = || return; | ^^^^^^ @@ -170,7 +186,7 @@ LL | let _ = || return; = help: replace `return` with an empty block error: unneeded `return` statement - --> $DIR/needless_return.rs:138:32 + --> $DIR/needless_return.rs:148:32 | LL | res.unwrap_or_else(|_| return Foo) | ^^^^^^^^^^ @@ -178,7 +194,7 @@ LL | res.unwrap_or_else(|_| return Foo) = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:147:5 + --> $DIR/needless_return.rs:157:5 | LL | return true; | ^^^^^^^^^^^ @@ -186,7 +202,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:151:5 + --> $DIR/needless_return.rs:161:5 | LL | return true; | ^^^^^^^^^^^ @@ -194,7 +210,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:156:9 + --> $DIR/needless_return.rs:166:9 | LL | return true; | ^^^^^^^^^^^ @@ -202,7 +218,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:158:9 + --> $DIR/needless_return.rs:168:9 | LL | return false; | ^^^^^^^^^^^^ @@ -210,7 +226,7 @@ LL | return false; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:164:17 + --> $DIR/needless_return.rs:174:17 | LL | true => return false, | ^^^^^^^^^^^^ @@ -218,7 +234,7 @@ LL | true => return false, = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:166:13 + --> $DIR/needless_return.rs:176:13 | LL | return true; | ^^^^^^^^^^^ @@ -226,7 +242,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:173:9 + --> $DIR/needless_return.rs:183:9 | LL | return true; | ^^^^^^^^^^^ @@ -234,7 +250,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:175:16 + --> $DIR/needless_return.rs:185:16 | LL | let _ = || return true; | ^^^^^^^^^^^ @@ -242,7 +258,7 @@ LL | let _ = || return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:179:5 + --> $DIR/needless_return.rs:189:5 | LL | return the_answer!(); | ^^^^^^^^^^^^^^^^^^^^ @@ -250,7 +266,7 @@ LL | return the_answer!(); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:182:33 + --> $DIR/needless_return.rs:192:33 | LL | async fn async_test_void_fun() { | _________________________________^ @@ -260,7 +276,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:187:11 + --> $DIR/needless_return.rs:197:11 | LL | if b { | ___________^ @@ -270,7 +286,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:189:13 + --> $DIR/needless_return.rs:199:13 | LL | } else { | _____________^ @@ -280,7 +296,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:197:14 + --> $DIR/needless_return.rs:207:14 | LL | _ => return, | ^^^^^^ @@ -288,7 +304,7 @@ LL | _ => return, = help: replace `return` with a unit value error: unneeded `return` statement - --> $DIR/needless_return.rs:210:9 + --> $DIR/needless_return.rs:220:9 | LL | return String::from("test"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -296,7 +312,7 @@ LL | return String::from("test"); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:212:9 + --> $DIR/needless_return.rs:222:9 | LL | return String::new(); | ^^^^^^^^^^^^^^^^^^^^ @@ -304,7 +320,7 @@ LL | return String::new(); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:228:5 + --> $DIR/needless_return.rs:238:5 | LL | return format!("Hello {}", "world!"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -312,7 +328,7 @@ LL | return format!("Hello {}", "world!"); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:239:9 + --> $DIR/needless_return.rs:249:9 | LL | return true; | ^^^^^^^^^^^ @@ -320,7 +336,7 @@ LL | return true; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:241:9 + --> $DIR/needless_return.rs:251:9 | LL | return false; | ^^^^^^^^^^^^ @@ -328,7 +344,7 @@ LL | return false; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:248:13 + --> $DIR/needless_return.rs:258:13 | LL | return 10; | ^^^^^^^^^ @@ -336,7 +352,7 @@ LL | return 10; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:251:13 + --> $DIR/needless_return.rs:261:13 | LL | return 100; | ^^^^^^^^^^ @@ -344,7 +360,7 @@ LL | return 100; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:259:9 + --> $DIR/needless_return.rs:269:9 | LL | return 0; | ^^^^^^^^ @@ -352,7 +368,7 @@ LL | return 0; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:266:13 + --> $DIR/needless_return.rs:276:13 | LL | return *(x as *const isize); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -360,7 +376,7 @@ LL | return *(x as *const isize); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:268:13 + --> $DIR/needless_return.rs:278:13 | LL | return !*(x as *const isize); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -368,7 +384,7 @@ LL | return !*(x as *const isize); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:275:20 + --> $DIR/needless_return.rs:285:20 | LL | let _ = 42; | ____________________^ @@ -379,7 +395,7 @@ LL | | return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:282:20 + --> $DIR/needless_return.rs:292:20 | LL | let _ = 42; return; | ^^^^^^^ @@ -387,7 +403,7 @@ LL | let _ = 42; return; = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:294:9 + --> $DIR/needless_return.rs:304:9 | LL | return Ok(format!("ok!")); | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -395,12 +411,12 @@ LL | return Ok(format!("ok!")); = help: remove `return` error: unneeded `return` statement - --> $DIR/needless_return.rs:296:9 + --> $DIR/needless_return.rs:306:9 | LL | return Err(format!("err!")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = help: remove `return` -error: aborting due to 48 previous errors +error: aborting due to 50 previous errors diff --git a/tests/ui/transmutes_expressible_as_ptr_casts.fixed b/tests/ui/transmutes_expressible_as_ptr_casts.fixed index 7263abac15dfb..55307506eb3c7 100644 --- a/tests/ui/transmutes_expressible_as_ptr_casts.fixed +++ b/tests/ui/transmutes_expressible_as_ptr_casts.fixed @@ -51,6 +51,8 @@ fn main() { // e is a function pointer type and U is an integer; fptr-addr-cast let _usize_from_fn_ptr_transmute = unsafe { foo as usize }; let _usize_from_fn_ptr = foo as *const usize; + + let _usize_from_ref = unsafe { &1u32 as *const u32 as usize }; } // If a ref-to-ptr cast of this form where the pointer type points to a type other diff --git a/tests/ui/transmutes_expressible_as_ptr_casts.rs b/tests/ui/transmutes_expressible_as_ptr_casts.rs index d8e4421d4c18e..e7360f3f9dcba 100644 --- a/tests/ui/transmutes_expressible_as_ptr_casts.rs +++ b/tests/ui/transmutes_expressible_as_ptr_casts.rs @@ -51,6 +51,8 @@ fn main() { // e is a function pointer type and U is an integer; fptr-addr-cast let _usize_from_fn_ptr_transmute = unsafe { transmute:: u8, usize>(foo) }; let _usize_from_fn_ptr = foo as *const usize; + + let _usize_from_ref = unsafe { transmute::<*const u32, usize>(&1u32) }; } // If a ref-to-ptr cast of this form where the pointer type points to a type other diff --git a/tests/ui/transmutes_expressible_as_ptr_casts.stderr b/tests/ui/transmutes_expressible_as_ptr_casts.stderr index de9418c8d1adc..e862fcb67a4a0 100644 --- a/tests/ui/transmutes_expressible_as_ptr_casts.stderr +++ b/tests/ui/transmutes_expressible_as_ptr_casts.stderr @@ -46,11 +46,17 @@ error: transmute from `fn(usize) -> u8` to `usize` which could be expressed as a LL | let _usize_from_fn_ptr_transmute = unsafe { transmute:: u8, usize>(foo) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `foo as usize` +error: transmute from `*const u32` to `usize` which could be expressed as a pointer cast instead + --> $DIR/transmutes_expressible_as_ptr_casts.rs:55:36 + | +LL | let _usize_from_ref = unsafe { transmute::<*const u32, usize>(&1u32) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&1u32 as *const u32 as usize` + error: transmute from a reference to a pointer - --> $DIR/transmutes_expressible_as_ptr_casts.rs:64:14 + --> $DIR/transmutes_expressible_as_ptr_casts.rs:66:14 | LL | unsafe { transmute::<&[i32; 1], *const u8>(in_param) } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `in_param as *const [i32; 1] as *const u8` -error: aborting due to 8 previous errors +error: aborting due to 9 previous errors diff --git a/tests/ui/unnecessary_safety_comment.rs b/tests/ui/unnecessary_safety_comment.rs index 7fefea7051d69..89fedb145f88b 100644 --- a/tests/ui/unnecessary_safety_comment.rs +++ b/tests/ui/unnecessary_safety_comment.rs @@ -48,4 +48,21 @@ fn unnecessary_on_stmt_and_expr() -> u32 { 24 } +mod issue_10084 { + unsafe fn bar() -> i32 { + 42 + } + + macro_rules! foo { + () => { + // SAFETY: This is necessary + unsafe { bar() } + }; + } + + fn main() { + foo!(); + } +} + fn main() {} From e65a7ff0b39f15eb9ff4b3c7c12d719aa140870a Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 27 Jan 2023 20:02:51 -0800 Subject: [PATCH 175/501] Remove from librustdoc and clippy too --- clippy_lints/src/lib.rs | 1 - clippy_lints/src/methods/collapsible_str_replace.rs | 2 +- clippy_utils/src/lib.rs | 1 - clippy_utils/src/macros.rs | 2 +- clippy_utils/src/mir/possible_borrower.rs | 2 +- 5 files changed, 3 insertions(+), 5 deletions(-) diff --git a/clippy_lints/src/lib.rs b/clippy_lints/src/lib.rs index 5c4b604104417..d06830397761b 100644 --- a/clippy_lints/src/lib.rs +++ b/clippy_lints/src/lib.rs @@ -1,7 +1,6 @@ #![feature(array_windows)] #![feature(binary_heap_into_iter_sorted)] #![feature(box_patterns)] -#![feature(control_flow_enum)] #![feature(drain_filter)] #![feature(iter_intersperse)] #![feature(let_chains)] diff --git a/clippy_lints/src/methods/collapsible_str_replace.rs b/clippy_lints/src/methods/collapsible_str_replace.rs index ac61b4377885b..5e01ed90ff099 100644 --- a/clippy_lints/src/methods/collapsible_str_replace.rs +++ b/clippy_lints/src/methods/collapsible_str_replace.rs @@ -54,7 +54,7 @@ fn collect_replace_calls<'tcx>( from_args.push_front(from); ControlFlow::Continue(()) } else { - ControlFlow::BREAK + ControlFlow::Break(()) } } else { ControlFlow::Continue(()) diff --git a/clippy_utils/src/lib.rs b/clippy_utils/src/lib.rs index e2965146cfe6d..a246291f9a9bd 100644 --- a/clippy_utils/src/lib.rs +++ b/clippy_utils/src/lib.rs @@ -1,6 +1,5 @@ #![feature(array_chunks)] #![feature(box_patterns)] -#![feature(control_flow_enum)] #![feature(let_chains)] #![feature(lint_reasons)] #![feature(never_type)] diff --git a/clippy_utils/src/macros.rs b/clippy_utils/src/macros.rs index 659063b97e74a..d7f466c197636 100644 --- a/clippy_utils/src/macros.rs +++ b/clippy_utils/src/macros.rs @@ -327,7 +327,7 @@ fn is_assert_arg(cx: &LateContext<'_>, expr: &Expr<'_>, assert_expn: ExpnId) -> } else { match cx.tcx.item_name(macro_call.def_id) { // `cfg!(debug_assertions)` in `debug_assert!` - sym::cfg => ControlFlow::CONTINUE, + sym::cfg => ControlFlow::Continue(()), // assert!(other_macro!(..)) _ => ControlFlow::Break(true), } diff --git a/clippy_utils/src/mir/possible_borrower.rs b/clippy_utils/src/mir/possible_borrower.rs index 9adae77338945..5836eb73bd94c 100644 --- a/clippy_utils/src/mir/possible_borrower.rs +++ b/clippy_utils/src/mir/possible_borrower.rs @@ -140,7 +140,7 @@ impl TypeVisitor<'_> for ContainsRegion { type BreakTy = (); fn visit_region(&mut self, _: ty::Region<'_>) -> ControlFlow { - ControlFlow::BREAK + ControlFlow::Break(()) } } From 92c4f1e2d9db43ebc0449fbbc2150eeb9429e65b Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sun, 22 Jan 2023 18:00:33 +0000 Subject: [PATCH 176/501] Take a LocalDefId in hir::Visitor::visit_fn. --- clippy_lints/src/booleans.rs | 5 +-- clippy_lints/src/cognitive_complexity.rs | 6 ++-- clippy_lints/src/derive.rs | 5 +-- clippy_lints/src/doc.rs | 18 +++++----- clippy_lints/src/escape.rs | 12 ++++--- clippy_lints/src/excessive_bools.rs | 6 ++-- clippy_lints/src/exhaustive_items.rs | 3 +- .../src/functions/misnamed_getters.rs | 3 +- clippy_lints/src/functions/mod.rs | 8 +++-- clippy_lints/src/functions/must_use.rs | 22 ++++++------- .../src/functions/not_unsafe_ptr_arg_deref.rs | 4 +-- clippy_lints/src/future_not_send.rs | 10 +++--- clippy_lints/src/implicit_return.rs | 3 +- clippy_lints/src/inherent_to_string.rs | 2 +- clippy_lints/src/lifetimes.rs | 11 +++---- clippy_lints/src/manual_async_fn.rs | 5 +-- clippy_lints/src/manual_non_exhaustive.rs | 3 +- clippy_lints/src/methods/mod.rs | 4 +-- clippy_lints/src/methods/suspicious_map.rs | 5 ++- .../src/methods/unnecessary_to_owned.rs | 7 ++-- clippy_lints/src/misc.rs | 7 ++-- clippy_lints/src/missing_const_for_fn.rs | 9 ++--- clippy_lints/src/mut_key.rs | 10 +++--- clippy_lints/src/needless_pass_by_value.rs | 7 ++-- clippy_lints/src/new_without_default.rs | 6 ++-- .../src/operators/arithmetic_side_effects.rs | 3 +- .../src/operators/numeric_arithmetic.rs | 2 +- clippy_lints/src/panic_in_result_fn.rs | 9 +++-- clippy_lints/src/pass_by_ref_or_value.rs | 7 ++-- clippy_lints/src/pattern_type_mismatch.rs | 7 ++-- clippy_lints/src/redundant_clone.rs | 9 +++-- clippy_lints/src/return_self_not_must_use.rs | 18 +++++----- clippy_lints/src/returns.rs | 5 +-- clippy_lints/src/self_named_constructors.rs | 2 +- clippy_lints/src/trailing_empty_array.rs | 3 +- clippy_lints/src/types/mod.rs | 33 +++++++++++++------ clippy_lints/src/unnecessary_wraps.rs | 26 ++++++++------- clippy_lints/src/unused_async.rs | 7 ++-- clippy_lints/src/unwrap.rs | 3 +- clippy_lints/src/unwrap_in_result.rs | 4 +-- clippy_utils/src/lib.rs | 3 +- 41 files changed, 175 insertions(+), 147 deletions(-) diff --git a/clippy_lints/src/booleans.rs b/clippy_lints/src/booleans.rs index 939bdbcdc7cd7..e8106beec3742 100644 --- a/clippy_lints/src/booleans.rs +++ b/clippy_lints/src/booleans.rs @@ -6,9 +6,10 @@ use if_chain::if_chain; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::intravisit::{walk_expr, FnKind, Visitor}; -use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, HirId, UnOp}; +use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::sym; @@ -82,7 +83,7 @@ impl<'tcx> LateLintPass<'tcx> for NonminimalBool { _: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, _: Span, - _: HirId, + _: LocalDefId, ) { NonminimalBoolVisitor { cx }.visit_body(body); } diff --git a/clippy_lints/src/cognitive_complexity.rs b/clippy_lints/src/cognitive_complexity.rs index 1c3a89a97824c..e8531157e0f7a 100644 --- a/clippy_lints/src/cognitive_complexity.rs +++ b/clippy_lints/src/cognitive_complexity.rs @@ -8,9 +8,10 @@ use clippy_utils::{get_async_fn_body, is_async_fn, LimitStack}; use core::ops::ControlFlow; use rustc_ast::ast::Attribute; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId}; +use rustc_hir::{Body, Expr, ExprKind, FnDecl}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::{sym, BytePos}; @@ -140,9 +141,8 @@ impl<'tcx> LateLintPass<'tcx> for CognitiveComplexity { decl: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, span: Span, - hir_id: HirId, + def_id: LocalDefId, ) { - let def_id = cx.tcx.hir().local_def_id(hir_id); if !cx.tcx.has_attr(def_id.to_def_id(), sym::test) { let expr = if is_async_fn(kind) { match get_async_fn_body(cx.tcx, body) { diff --git a/clippy_lints/src/derive.rs b/clippy_lints/src/derive.rs index 248d738841067..f8fc726d603f8 100644 --- a/clippy_lints/src/derive.rs +++ b/clippy_lints/src/derive.rs @@ -7,7 +7,7 @@ use rustc_errors::Applicability; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{walk_expr, walk_fn, walk_item, FnKind, Visitor}; use rustc_hir::{ - self as hir, BlockCheckMode, BodyId, Constness, Expr, ExprKind, FnDecl, HirId, Impl, Item, ItemKind, UnsafeSource, + self as hir, BlockCheckMode, BodyId, Constness, Expr, ExprKind, FnDecl, Impl, Item, ItemKind, UnsafeSource, Unsafety, }; use rustc_lint::{LateContext, LateLintPass}; @@ -18,6 +18,7 @@ use rustc_middle::ty::{ TraitPredicate, Ty, TyCtxt, }; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::sym; @@ -425,7 +426,7 @@ struct UnsafeVisitor<'a, 'tcx> { impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> { type NestedFilter = nested_filter::All; - fn visit_fn(&mut self, kind: FnKind<'tcx>, decl: &'tcx FnDecl<'_>, body_id: BodyId, _: Span, id: HirId) { + fn visit_fn(&mut self, kind: FnKind<'tcx>, decl: &'tcx FnDecl<'_>, body_id: BodyId, _: Span, id: LocalDefId) { if self.has_unsafe { return; } diff --git a/clippy_lints/src/doc.rs b/clippy_lints/src/doc.rs index f7a3d6d53f714..127201b72e275 100644 --- a/clippy_lints/src/doc.rs +++ b/clippy_lints/src/doc.rs @@ -23,7 +23,6 @@ use rustc_parse::maybe_new_parser_from_source_str; use rustc_parse::parser::ForceCollect; use rustc_session::parse::ParseSess; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::def_id::LocalDefId; use rustc_span::edition::Edition; use rustc_span::source_map::{BytePos, FilePathMapping, SourceMap, Span}; use rustc_span::{sym, FileName, Pos}; @@ -302,7 +301,7 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown { panic_span: None, }; fpu.visit_expr(body.value); - lint_for_missing_headers(cx, item.owner_id.def_id, sig, headers, Some(body_id), fpu.panic_span); + lint_for_missing_headers(cx, item.owner_id, sig, headers, Some(body_id), fpu.panic_span); } }, hir::ItemKind::Impl(impl_) => { @@ -338,7 +337,7 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown { let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return }; if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind { if !in_external_macro(cx.tcx.sess, item.span) { - lint_for_missing_headers(cx, item.owner_id.def_id, sig, headers, None, None); + lint_for_missing_headers(cx, item.owner_id, sig, headers, None, None); } } } @@ -357,20 +356,20 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown { panic_span: None, }; fpu.visit_expr(body.value); - lint_for_missing_headers(cx, item.owner_id.def_id, sig, headers, Some(body_id), fpu.panic_span); + lint_for_missing_headers(cx, item.owner_id, sig, headers, Some(body_id), fpu.panic_span); } } } fn lint_for_missing_headers( cx: &LateContext<'_>, - def_id: LocalDefId, + owner_id: hir::OwnerId, sig: &hir::FnSig<'_>, headers: DocHeaders, body_id: Option, panic_span: Option, ) { - if !cx.effective_visibilities.is_exported(def_id) { + if !cx.effective_visibilities.is_exported(owner_id.def_id) { return; // Private functions do not require doc comments } @@ -378,13 +377,13 @@ fn lint_for_missing_headers( if cx .tcx .hir() - .parent_iter(cx.tcx.hir().local_def_id_to_hir_id(def_id)) + .parent_iter(owner_id.into()) .any(|(id, _node)| is_doc_hidden(cx.tcx.hir().attrs(id))) { return; } - let span = cx.tcx.def_span(def_id); + let span = cx.tcx.def_span(owner_id); match (headers.safety, sig.header.unsafety) { (false, hir::Unsafety::Unsafe) => span_lint( cx, @@ -411,8 +410,7 @@ fn lint_for_missing_headers( ); } if !headers.errors { - let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id); - if is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) { + if is_type_diagnostic_item(cx, return_ty(cx, owner_id), sym::Result) { span_lint( cx, MISSING_ERRORS_DOC, diff --git a/clippy_lints/src/escape.rs b/clippy_lints/src/escape.rs index dfb43893326eb..d6ab4c25e83ef 100644 --- a/clippy_lints/src/escape.rs +++ b/clippy_lints/src/escape.rs @@ -8,6 +8,7 @@ use rustc_middle::mir::FakeReadCause; use rustc_middle::ty::layout::LayoutOf; use rustc_middle::ty::{self, TraitRef, Ty}; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::symbol::kw; use rustc_target::spec::abi::Abi; @@ -63,7 +64,7 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { _: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, _: Span, - hir_id: HirId, + fn_def_id: LocalDefId, ) { if let Some(header) = fn_kind.header() { if header.abi != Abi::Rust { @@ -71,7 +72,11 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { } } - let parent_id = cx.tcx.hir().get_parent_item(hir_id).def_id; + let parent_id = cx + .tcx + .hir() + .get_parent_item(cx.tcx.hir().local_def_id_to_hir_id(fn_def_id)) + .def_id; let parent_node = cx.tcx.hir().find_by_def_id(parent_id); let mut trait_self_ty = None; @@ -84,7 +89,7 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { // find `self` ty for this trait if relevant if let ItemKind::Trait(_, _, _, _, items) = item.kind { for trait_item in items { - if trait_item.id.hir_id() == hir_id { + if trait_item.id.owner_id.def_id == fn_def_id { // be sure we have `self` parameter in this function if trait_item.kind == (AssocItemKind::Fn { has_self: true }) { trait_self_ty = Some( @@ -105,7 +110,6 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { too_large_for_stack: self.too_large_for_stack, }; - let fn_def_id = cx.tcx.hir().local_def_id(hir_id); let infcx = cx.tcx.infer_ctxt().build(); ExprUseVisitor::new(&mut v, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body); diff --git a/clippy_lints/src/excessive_bools.rs b/clippy_lints/src/excessive_bools.rs index fc2912f696e03..9d089fcad70e6 100644 --- a/clippy_lints/src/excessive_bools.rs +++ b/clippy_lints/src/excessive_bools.rs @@ -1,10 +1,11 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::{get_parent_as_impl, has_repr_attr, is_bool}; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, FnDecl, HirId, Item, ItemKind, TraitFn, TraitItem, TraitItemKind, Ty}; +use rustc_hir::{Body, FnDecl, Item, ItemKind, TraitFn, TraitItem, TraitItemKind, Ty}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::Span; +use rustc_span::def_id::LocalDefId; use rustc_target::spec::abi::Abi; declare_clippy_lint! { @@ -168,8 +169,9 @@ impl<'tcx> LateLintPass<'tcx> for ExcessiveBools { fn_decl: &'tcx FnDecl<'tcx>, _: &'tcx Body<'tcx>, span: Span, - hir_id: HirId, + def_id: LocalDefId, ) { + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id); if let Some(fn_header) = fn_kind.header() && fn_header.abi == Abi::Rust && get_parent_as_impl(cx.tcx, hir_id) diff --git a/clippy_lints/src/exhaustive_items.rs b/clippy_lints/src/exhaustive_items.rs index 1fece5d1c4809..9fd13084dc9e8 100644 --- a/clippy_lints/src/exhaustive_items.rs +++ b/clippy_lints/src/exhaustive_items.rs @@ -79,8 +79,7 @@ impl LateLintPass<'_> for ExhaustiveItems { then { let (lint, msg) = if let ItemKind::Struct(ref v, ..) = item.kind { if v.fields().iter().any(|f| { - let def_id = cx.tcx.hir().local_def_id(f.hir_id); - !cx.tcx.visibility(def_id).is_public() + !cx.tcx.visibility(f.def_id).is_public() }) { // skip structs with private fields return; diff --git a/clippy_lints/src/functions/misnamed_getters.rs b/clippy_lints/src/functions/misnamed_getters.rs index 27acad45ccf72..d6b50537c2e1d 100644 --- a/clippy_lints/src/functions/misnamed_getters.rs +++ b/clippy_lints/src/functions/misnamed_getters.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet; use rustc_errors::Applicability; -use rustc_hir::{intravisit::FnKind, Body, ExprKind, FnDecl, HirId, ImplicitSelfKind, Unsafety}; +use rustc_hir::{intravisit::FnKind, Body, ExprKind, FnDecl, ImplicitSelfKind, Unsafety}; use rustc_lint::LateContext; use rustc_middle::ty; use rustc_span::Span; @@ -16,7 +16,6 @@ pub fn check_fn( decl: &FnDecl<'_>, body: &Body<'_>, span: Span, - _hir_id: HirId, ) { let FnKind::Method(ref ident, sig) = kind else { return; diff --git a/clippy_lints/src/functions/mod.rs b/clippy_lints/src/functions/mod.rs index 9dbce3f889bef..4399c68e130f7 100644 --- a/clippy_lints/src/functions/mod.rs +++ b/clippy_lints/src/functions/mod.rs @@ -9,6 +9,7 @@ use rustc_hir as hir; use rustc_hir::intravisit; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::Span; declare_clippy_lint! { @@ -363,12 +364,13 @@ impl<'tcx> LateLintPass<'tcx> for Functions { decl: &'tcx hir::FnDecl<'_>, body: &'tcx hir::Body<'_>, span: Span, - hir_id: hir::HirId, + def_id: LocalDefId, ) { + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id); too_many_arguments::check_fn(cx, kind, decl, span, hir_id, self.too_many_arguments_threshold); too_many_lines::check_fn(cx, kind, span, body, self.too_many_lines_threshold); - not_unsafe_ptr_arg_deref::check_fn(cx, kind, decl, body, hir_id); - misnamed_getters::check_fn(cx, kind, decl, body, span, hir_id); + not_unsafe_ptr_arg_deref::check_fn(cx, kind, decl, body, def_id); + misnamed_getters::check_fn(cx, kind, decl, body, span); } fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) { diff --git a/clippy_lints/src/functions/must_use.rs b/clippy_lints/src/functions/must_use.rs index d22bede36b419..29bdc46b647d5 100644 --- a/clippy_lints/src/functions/must_use.rs +++ b/clippy_lints/src/functions/must_use.rs @@ -1,6 +1,6 @@ use rustc_ast::ast::Attribute; use rustc_errors::Applicability; -use rustc_hir::def_id::{DefIdSet, LocalDefId}; +use rustc_hir::def_id::DefIdSet; use rustc_hir::{self as hir, def::Res, QPath}; use rustc_lint::{LateContext, LintContext}; use rustc_middle::{ @@ -27,14 +27,14 @@ pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> let is_public = cx.effective_visibilities.is_exported(item.owner_id.def_id); let fn_header_span = item.span.with_hi(sig.decl.output.span().hi()); if let Some(attr) = attr { - check_needless_must_use(cx, sig.decl, item.hir_id(), item.span, fn_header_span, attr); + check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr); } else if is_public && !is_proc_macro(cx.sess(), attrs) && !attrs.iter().any(|a| a.has_name(sym::no_mangle)) { check_must_use_candidate( cx, sig.decl, cx.tcx.hir().body(*body_id), item.span, - item.owner_id.def_id, + item.owner_id, item.span.with_hi(sig.decl.output.span().hi()), "this function could have a `#[must_use]` attribute", ); @@ -49,7 +49,7 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Imp let attrs = cx.tcx.hir().attrs(item.hir_id()); let attr = cx.tcx.get_attr(item.owner_id.to_def_id(), sym::must_use); if let Some(attr) = attr { - check_needless_must_use(cx, sig.decl, item.hir_id(), item.span, fn_header_span, attr); + check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr); } else if is_public && !is_proc_macro(cx.sess(), attrs) && trait_ref_of_method(cx, item.owner_id.def_id).is_none() @@ -59,7 +59,7 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Imp sig.decl, cx.tcx.hir().body(*body_id), item.span, - item.owner_id.def_id, + item.owner_id, item.span.with_hi(sig.decl.output.span().hi()), "this method could have a `#[must_use]` attribute", ); @@ -75,7 +75,7 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr let attrs = cx.tcx.hir().attrs(item.hir_id()); let attr = cx.tcx.get_attr(item.owner_id.to_def_id(), sym::must_use); if let Some(attr) = attr { - check_needless_must_use(cx, sig.decl, item.hir_id(), item.span, fn_header_span, attr); + check_needless_must_use(cx, sig.decl, item.owner_id, item.span, fn_header_span, attr); } else if let hir::TraitFn::Provided(eid) = *eid { let body = cx.tcx.hir().body(eid); if attr.is_none() && is_public && !is_proc_macro(cx.sess(), attrs) { @@ -84,7 +84,7 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr sig.decl, body, item.span, - item.owner_id.def_id, + item.owner_id, item.span.with_hi(sig.decl.output.span().hi()), "this method could have a `#[must_use]` attribute", ); @@ -96,7 +96,7 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr fn check_needless_must_use( cx: &LateContext<'_>, decl: &hir::FnDecl<'_>, - item_id: hir::HirId, + item_id: hir::OwnerId, item_span: Span, fn_header_span: Span, attr: &Attribute, @@ -131,7 +131,7 @@ fn check_must_use_candidate<'tcx>( decl: &'tcx hir::FnDecl<'_>, body: &'tcx hir::Body<'_>, item_span: Span, - item_id: LocalDefId, + item_id: hir::OwnerId, fn_span: Span, msg: &str, ) { @@ -139,8 +139,8 @@ fn check_must_use_candidate<'tcx>( || mutates_static(cx, body) || in_external_macro(cx.sess(), item_span) || returns_unit(decl) - || !cx.effective_visibilities.is_exported(item_id) - || is_must_use_ty(cx, return_ty(cx, cx.tcx.hir().local_def_id_to_hir_id(item_id))) + || !cx.effective_visibilities.is_exported(item_id.def_id) + || is_must_use_ty(cx, return_ty(cx, item_id)) { return; } diff --git a/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs b/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs index cdb5e22e75982..a13909a2cdb8f 100644 --- a/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs +++ b/clippy_lints/src/functions/not_unsafe_ptr_arg_deref.rs @@ -17,7 +17,7 @@ pub(super) fn check_fn<'tcx>( kind: intravisit::FnKind<'tcx>, decl: &'tcx hir::FnDecl<'tcx>, body: &'tcx hir::Body<'tcx>, - hir_id: hir::HirId, + def_id: LocalDefId, ) { let unsafety = match kind { intravisit::FnKind::ItemFn(_, _, hir::FnHeader { unsafety, .. }) => unsafety, @@ -25,7 +25,7 @@ pub(super) fn check_fn<'tcx>( intravisit::FnKind::Closure => return, }; - check_raw_ptr(cx, unsafety, decl, body, cx.tcx.hir().local_def_id(hir_id)); + check_raw_ptr(cx, unsafety, decl, body, def_id) } pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) { diff --git a/clippy_lints/src/future_not_send.rs b/clippy_lints/src/future_not_send.rs index 2a79b18b82994..9fb73a371b8f4 100644 --- a/clippy_lints/src/future_not_send.rs +++ b/clippy_lints/src/future_not_send.rs @@ -1,11 +1,12 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::return_ty; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, FnDecl, HirId}; +use rustc_hir::{Body, FnDecl}; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, AliasTy, Clause, EarlyBinder, PredicateKind}; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::{sym, Span}; use rustc_trait_selection::traits::error_reporting::suggestions::TypeErrCtxtExt; use rustc_trait_selection::traits::{self, FulfillmentError}; @@ -56,12 +57,12 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend { decl: &'tcx FnDecl<'tcx>, _: &'tcx Body<'tcx>, _: Span, - hir_id: HirId, + fn_def_id: LocalDefId, ) { if let FnKind::Closure = kind { return; } - let ret_ty = return_ty(cx, hir_id); + let ret_ty = return_ty(cx, cx.tcx.hir().local_def_id_to_hir_id(fn_def_id).expect_owner()); if let ty::Alias(ty::Opaque, AliasTy { def_id, substs, .. }) = *ret_ty.kind() { let preds = cx.tcx.explicit_item_bounds(def_id); let mut is_future = false; @@ -78,8 +79,7 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend { let send_trait = cx.tcx.get_diagnostic_item(sym::Send).unwrap(); let span = decl.output.span(); let infcx = cx.tcx.infer_ctxt().build(); - let def_id = cx.tcx.hir().local_def_id(hir_id); - let cause = traits::ObligationCause::misc(span, def_id); + let cause = traits::ObligationCause::misc(span, fn_def_id); let send_errors = traits::fully_solve_bound(&infcx, cause, cx.param_env, ret_ty, send_trait); if !send_errors.is_empty() { span_lint_and_then( diff --git a/clippy_lints/src/implicit_return.rs b/clippy_lints/src/implicit_return.rs index 946d04eff6f98..372b6ead3fe46 100644 --- a/clippy_lints/src/implicit_return.rs +++ b/clippy_lints/src/implicit_return.rs @@ -11,6 +11,7 @@ use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, FnRetTy, HirId}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::{Span, SyntaxContext}; declare_clippy_lint! { @@ -223,7 +224,7 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitReturn { decl: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, span: Span, - _: HirId, + _: LocalDefId, ) { if (!matches!(kind, FnKind::Closure) && matches!(decl.output, FnRetTy::DefaultReturn(_))) || span.ctxt() != body.value.span.ctxt() diff --git a/clippy_lints/src/inherent_to_string.rs b/clippy_lints/src/inherent_to_string.rs index 612c3ea8fdfd8..d43e5cc9b2c3d 100644 --- a/clippy_lints/src/inherent_to_string.rs +++ b/clippy_lints/src/inherent_to_string.rs @@ -105,7 +105,7 @@ impl<'tcx> LateLintPass<'tcx> for InherentToString { if impl_item.generics.params.iter().all(|p| matches!(p.kind, GenericParamKind::Lifetime { .. })); // Check if return type is String - if is_type_lang_item(cx, return_ty(cx, impl_item.hir_id()), LangItem::String); + if is_type_lang_item(cx, return_ty(cx, impl_item.owner_id), LangItem::String); // Filters instances of to_string which are required by a trait if trait_ref_of_method(cx, impl_item.owner_id.def_id).is_none(); diff --git a/clippy_lints/src/lifetimes.rs b/clippy_lints/src/lifetimes.rs index 7cf1a6b8084a6..747a94ba5a6ed 100644 --- a/clippy_lints/src/lifetimes.rs +++ b/clippy_lints/src/lifetimes.rs @@ -15,7 +15,6 @@ use rustc_hir::{ }; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter as middle_nested_filter; -use rustc_middle::ty::TyCtxt; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; @@ -154,7 +153,7 @@ fn check_fn_inner<'tcx>( .filter(|param| matches!(param.kind, GenericParamKind::Type { .. })); for typ in types { - for pred in generics.bounds_for_param(cx.tcx.hir().local_def_id(typ.hir_id)) { + for pred in generics.bounds_for_param(typ.def_id) { if pred.origin == PredicateOrigin::WhereClause { // has_where_lifetimes checked that this predicate contains no lifetime. continue; @@ -251,7 +250,7 @@ fn could_use_elision<'tcx>( // level of the current item. // check named LTs - let allowed_lts = allowed_lts_from(cx.tcx, named_generics); + let allowed_lts = allowed_lts_from(named_generics); // these will collect all the lifetimes for references in arg/return types let mut input_visitor = RefVisitor::new(cx); @@ -360,11 +359,11 @@ fn could_use_elision<'tcx>( } } -fn allowed_lts_from(tcx: TyCtxt<'_>, named_generics: &[GenericParam<'_>]) -> FxHashSet { +fn allowed_lts_from(named_generics: &[GenericParam<'_>]) -> FxHashSet { let mut allowed_lts = FxHashSet::default(); for par in named_generics.iter() { if let GenericParamKind::Lifetime { .. } = par.kind { - allowed_lts.insert(RefLt::Named(tcx.hir().local_def_id(par.hir_id))); + allowed_lts.insert(RefLt::Named(par.def_id)); } } allowed_lts.insert(RefLt::Unnamed); @@ -516,7 +515,7 @@ fn has_where_lifetimes<'tcx>(cx: &LateContext<'tcx>, generics: &'tcx Generics<'_ return true; } // if the bounds define new lifetimes, they are fine to occur - let allowed_lts = allowed_lts_from(cx.tcx, pred.bound_generic_params); + let allowed_lts = allowed_lts_from(pred.bound_generic_params); // now walk the bounds for bound in pred.bounds.iter() { walk_param_bound(&mut visitor, bound); diff --git a/clippy_lints/src/manual_async_fn.rs b/clippy_lints/src/manual_async_fn.rs index 63212beaa63dd..3778eb4c732d7 100644 --- a/clippy_lints/src/manual_async_fn.rs +++ b/clippy_lints/src/manual_async_fn.rs @@ -6,10 +6,11 @@ use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; use rustc_hir::{ AsyncGeneratorKind, Block, Body, Closure, Expr, ExprKind, FnDecl, FnRetTy, GeneratorKind, GenericArg, GenericBound, - HirId, ItemKind, LifetimeName, Term, TraitRef, Ty, TyKind, TypeBindingKind, + ItemKind, LifetimeName, Term, TraitRef, Ty, TyKind, TypeBindingKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::{sym, Span}; declare_clippy_lint! { @@ -45,7 +46,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAsyncFn { decl: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, span: Span, - _: HirId, + _: LocalDefId, ) { if_chain! { if let Some(header) = kind.header(); diff --git a/clippy_lints/src/manual_non_exhaustive.rs b/clippy_lints/src/manual_non_exhaustive.rs index bca193be9e711..7dfa155c5c35f 100644 --- a/clippy_lints/src/manual_non_exhaustive.rs +++ b/clippy_lints/src/manual_non_exhaustive.rs @@ -157,11 +157,10 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum { && def.variants.len() > 1 { let mut iter = def.variants.iter().filter_map(|v| { - let id = cx.tcx.hir().local_def_id(v.hir_id); (matches!(v.data, hir::VariantData::Unit(..)) && v.ident.as_str().starts_with('_') && is_doc_hidden(cx.tcx.hir().attrs(v.hir_id))) - .then_some((id, v.span)) + .then_some((v.def_id, v.span)) }); if let Some((id, span)) = iter.next() && iter.next().is_none() diff --git a/clippy_lints/src/methods/mod.rs b/clippy_lints/src/methods/mod.rs index 0c465e5daf9fd..fb94dfa5980b7 100644 --- a/clippy_lints/src/methods/mod.rs +++ b/clippy_lints/src/methods/mod.rs @@ -3412,7 +3412,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods { } if let hir::ImplItemKind::Fn(_, _) = impl_item.kind { - let ret_ty = return_ty(cx, impl_item.hir_id()); + let ret_ty = return_ty(cx, impl_item.owner_id); if contains_ty_adt_constructor_opaque(cx, ret_ty, self_ty) { return; @@ -3460,7 +3460,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods { if_chain! { if item.ident.name == sym::new; if let TraitItemKind::Fn(_, _) = item.kind; - let ret_ty = return_ty(cx, item.hir_id()); + let ret_ty = return_ty(cx, item.owner_id); let self_ty = TraitRef::identity(cx.tcx, item.owner_id.to_def_id()) .self_ty() .skip_binder(); diff --git a/clippy_lints/src/methods/suspicious_map.rs b/clippy_lints/src/methods/suspicious_map.rs index 2ac0786b37b1e..6050226434f27 100644 --- a/clippy_lints/src/methods/suspicious_map.rs +++ b/clippy_lints/src/methods/suspicious_map.rs @@ -12,9 +12,8 @@ pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, count_recv: &hir::Expr< if_chain! { if is_trait_method(cx, count_recv, sym::Iterator); let closure = expr_or_init(cx, map_arg); - if let Some(def_id) = cx.tcx.hir().opt_local_def_id(closure.hir_id); - if let Some(body_id) = cx.tcx.hir().maybe_body_owned_by(def_id); - let closure_body = cx.tcx.hir().body(body_id); + if let hir::ExprKind::Closure(closure) = closure.kind; + let closure_body = cx.tcx.hir().body(closure.body); if !cx.typeck_results().expr_ty(closure_body.value).is_unit(); then { if let Some(map_mutated_vars) = mutated_variables(closure_body.value, cx) { diff --git a/clippy_lints/src/methods/unnecessary_to_owned.rs b/clippy_lints/src/methods/unnecessary_to_owned.rs index 12e053cb2134d..4e5af1c7c7124 100644 --- a/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -368,10 +368,9 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty< Node::Block(..) => continue, Node::Item(item) => { if let ItemKind::Fn(_, _, body_id) = &item.kind - && let output_ty = return_ty(cx, item.hir_id()) - && let local_def_id = cx.tcx.hir().local_def_id(item.hir_id()) - && Inherited::build(cx.tcx, local_def_id).enter(|inherited| { - let fn_ctxt = FnCtxt::new(inherited, cx.param_env, local_def_id); + && let output_ty = return_ty(cx, item.owner_id) + && Inherited::build(cx.tcx, item.owner_id.def_id).enter(|inherited| { + let fn_ctxt = FnCtxt::new(inherited, cx.param_env, item.owner_id.def_id); fn_ctxt.can_coerce(ty, output_ty) }) { if has_lifetime(output_ty) && has_lifetime(ty) { diff --git a/clippy_lints/src/misc.rs b/clippy_lints/src/misc.rs index 9f4beb92b9d2e..0705029a613bb 100644 --- a/clippy_lints/src/misc.rs +++ b/clippy_lints/src/misc.rs @@ -4,12 +4,13 @@ use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; use rustc_hir::{ - self as hir, def, BinOpKind, BindingAnnotation, Body, ByRef, Expr, ExprKind, FnDecl, HirId, Mutability, PatKind, - Stmt, StmtKind, TyKind, + self as hir, def, BinOpKind, BindingAnnotation, Body, ByRef, Expr, ExprKind, FnDecl, Mutability, PatKind, Stmt, + StmtKind, TyKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::hygiene::DesugaringKind; use rustc_span::source_map::{ExpnKind, Span}; @@ -151,7 +152,7 @@ impl<'tcx> LateLintPass<'tcx> for LintPass { decl: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, span: Span, - _: HirId, + _: LocalDefId, ) { if let FnKind::Closure = k { // Does not apply to closures diff --git a/clippy_lints/src/missing_const_for_fn.rs b/clippy_lints/src/missing_const_for_fn.rs index 5bc04bc17fb4f..87bd007a26a24 100644 --- a/clippy_lints/src/missing_const_for_fn.rs +++ b/clippy_lints/src/missing_const_for_fn.rs @@ -6,11 +6,12 @@ use clippy_utils::{fn_has_unsatisfiable_preds, is_entrypoint_fn, is_from_proc_ma use rustc_hir as hir; use rustc_hir::def_id::CRATE_DEF_ID; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, Constness, FnDecl, GenericParamKind, HirId}; +use rustc_hir::{Body, Constness, FnDecl, GenericParamKind}; use rustc_hir_analysis::hir_ty_to_ty; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::Span; declare_clippy_lint! { @@ -91,14 +92,12 @@ impl<'tcx> LateLintPass<'tcx> for MissingConstForFn { _: &FnDecl<'_>, body: &Body<'tcx>, span: Span, - hir_id: HirId, + def_id: LocalDefId, ) { if !self.msrv.meets(msrvs::CONST_IF_MATCH) { return; } - let def_id = cx.tcx.hir().local_def_id(hir_id); - if in_external_macro(cx.tcx.sess, span) || is_entrypoint_fn(cx, def_id.to_def_id()) { return; } @@ -132,6 +131,8 @@ impl<'tcx> LateLintPass<'tcx> for MissingConstForFn { FnKind::Closure => return, } + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id); + // Const fns are not allowed as methods in a trait. { let parent = cx.tcx.hir().get_parent_item(hir_id).def_id; diff --git a/clippy_lints/src/mut_key.rs b/clippy_lints/src/mut_key.rs index 16947cd5e3548..5f7aac21e6eb0 100644 --- a/clippy_lints/src/mut_key.rs +++ b/clippy_lints/src/mut_key.rs @@ -6,6 +6,7 @@ use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::TypeVisitable; use rustc_middle::ty::{Adt, Array, Ref, Slice, Tuple, Ty}; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::symbol::sym; use std::iter; @@ -102,21 +103,21 @@ impl<'tcx> LateLintPass<'tcx> for MutableKeyType { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) { if let hir::ItemKind::Fn(ref sig, ..) = item.kind { - self.check_sig(cx, item.hir_id(), sig.decl); + self.check_sig(cx, item.owner_id.def_id, sig.decl); } } fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'tcx>) { if let hir::ImplItemKind::Fn(ref sig, ..) = item.kind { if trait_ref_of_method(cx, item.owner_id.def_id).is_none() { - self.check_sig(cx, item.hir_id(), sig.decl); + self.check_sig(cx, item.owner_id.def_id, sig.decl); } } } fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'tcx>) { if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind { - self.check_sig(cx, item.hir_id(), sig.decl); + self.check_sig(cx, item.owner_id.def_id, sig.decl); } } @@ -136,8 +137,7 @@ impl MutableKeyType { } } - fn check_sig(&self, cx: &LateContext<'_>, item_hir_id: hir::HirId, decl: &hir::FnDecl<'_>) { - let fn_def_id = cx.tcx.hir().local_def_id(item_hir_id); + fn check_sig(&self, cx: &LateContext<'_>, fn_def_id: LocalDefId, decl: &hir::FnDecl<'_>) { let fn_sig = cx.tcx.fn_sig(fn_def_id).subst_identity(); for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) { self.check_ty_(cx, hir_ty.span, *ty); diff --git a/clippy_lints/src/needless_pass_by_value.rs b/clippy_lints/src/needless_pass_by_value.rs index 25ec9082c7076..996ea6ed72316 100644 --- a/clippy_lints/src/needless_pass_by_value.rs +++ b/clippy_lints/src/needless_pass_by_value.rs @@ -20,6 +20,7 @@ use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::mir::FakeReadCause; use rustc_middle::ty::{self, TypeVisitable}; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::symbol::kw; use rustc_span::{sym, Span}; use rustc_target::spec::abi::Abi; @@ -82,12 +83,14 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { decl: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, span: Span, - hir_id: HirId, + fn_def_id: LocalDefId, ) { if span.from_expansion() { return; } + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def_id); + match kind { FnKind::ItemFn(.., header) => { let attrs = cx.tcx.hir().attrs(hir_id); @@ -119,8 +122,6 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { let sized_trait = need!(cx.tcx.lang_items().sized_trait()); - let fn_def_id = cx.tcx.hir().local_def_id(hir_id); - let preds = traits::elaborate_predicates(cx.tcx, cx.param_env.caller_bounds().iter()) .filter(|p| !p.is_global()) .filter_map(|obligation| { diff --git a/clippy_lints/src/new_without_default.rs b/clippy_lints/src/new_without_default.rs index 54a3c82b713da..faf9ec61ec504 100644 --- a/clippy_lints/src/new_without_default.rs +++ b/clippy_lints/src/new_without_default.rs @@ -75,7 +75,7 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { } if let hir::ImplItemKind::Fn(ref sig, _) = impl_item.kind { let name = impl_item.ident.name; - let id = impl_item.hir_id(); + let id = impl_item.owner_id; if sig.header.constness == hir::Constness::Const { // can't be implemented by default return; @@ -97,7 +97,7 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { if sig.decl.inputs.is_empty(); if name == sym::new; if cx.effective_visibilities.is_reachable(impl_item.owner_id.def_id); - let self_def_id = cx.tcx.hir().get_parent_item(id); + let self_def_id = cx.tcx.hir().get_parent_item(id.into()); let self_ty = cx.tcx.type_of(self_def_id); if self_ty == return_ty(cx, id); if let Some(default_trait_id) = cx.tcx.get_diagnostic_item(sym::Default); @@ -133,7 +133,7 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { span_lint_hir_and_then( cx, NEW_WITHOUT_DEFAULT, - id, + id.into(), impl_item.span, &format!( "you should consider adding a `Default` implementation for `{self_type_snip}`" diff --git a/clippy_lints/src/operators/arithmetic_side_effects.rs b/clippy_lints/src/operators/arithmetic_side_effects.rs index cff82b875f11a..d592f6e814c1d 100644 --- a/clippy_lints/src/operators/arithmetic_side_effects.rs +++ b/clippy_lints/src/operators/arithmetic_side_effects.rs @@ -209,7 +209,8 @@ impl<'tcx> LateLintPass<'tcx> for ArithmeticSideEffects { fn check_body(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) { let body_owner = cx.tcx.hir().body_owner(body.id()); - let body_owner_def_id = cx.tcx.hir().local_def_id(body_owner); + let body_owner_def_id = cx.tcx.hir().body_owner_def_id(body.id()); + let body_owner_kind = cx.tcx.hir().body_owner_kind(body_owner_def_id); if let hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) = body_owner_kind { let body_span = cx.tcx.hir().span_with_body(body_owner); diff --git a/clippy_lints/src/operators/numeric_arithmetic.rs b/clippy_lints/src/operators/numeric_arithmetic.rs index 0830a106f5568..777395f452c92 100644 --- a/clippy_lints/src/operators/numeric_arithmetic.rs +++ b/clippy_lints/src/operators/numeric_arithmetic.rs @@ -96,7 +96,7 @@ impl Context { pub fn enter_body(&mut self, cx: &LateContext<'_>, body: &hir::Body<'_>) { let body_owner = cx.tcx.hir().body_owner(body.id()); - let body_owner_def_id = cx.tcx.hir().local_def_id(body_owner); + let body_owner_def_id = cx.tcx.hir().body_owner_def_id(body.id()); match cx.tcx.hir().body_owner_kind(body_owner_def_id) { hir::BodyOwnerKind::Static(_) | hir::BodyOwnerKind::Const => { diff --git a/clippy_lints/src/panic_in_result_fn.rs b/clippy_lints/src/panic_in_result_fn.rs index efec12489a9ba..849cd03dd7bf6 100644 --- a/clippy_lints/src/panic_in_result_fn.rs +++ b/clippy_lints/src/panic_in_result_fn.rs @@ -8,6 +8,7 @@ use rustc_hir as hir; use rustc_hir::intravisit::FnKind; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::{sym, Span}; declare_clippy_lint! { @@ -49,9 +50,13 @@ impl<'tcx> LateLintPass<'tcx> for PanicInResultFn { _: &'tcx hir::FnDecl<'tcx>, body: &'tcx hir::Body<'tcx>, span: Span, - hir_id: hir::HirId, + def_id: LocalDefId, ) { - if !matches!(fn_kind, FnKind::Closure) && is_type_diagnostic_item(cx, return_ty(cx, hir_id), sym::Result) { + if matches!(fn_kind, FnKind::Closure) { + return; + } + let owner = cx.tcx.hir().local_def_id_to_hir_id(def_id).expect_owner(); + if is_type_diagnostic_item(cx, return_ty(cx, owner), sym::Result) { lint_impl_body(cx, span, body); } } diff --git a/clippy_lints/src/pass_by_ref_or_value.rs b/clippy_lints/src/pass_by_ref_or_value.rs index 954eeba751ffa..0d78c3048ba12 100644 --- a/clippy_lints/src/pass_by_ref_or_value.rs +++ b/clippy_lints/src/pass_by_ref_or_value.rs @@ -12,7 +12,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_hir::intravisit::FnKind; -use rustc_hir::{BindingAnnotation, Body, FnDecl, HirId, Impl, ItemKind, MutTy, Mutability, Node, PatKind}; +use rustc_hir::{BindingAnnotation, Body, FnDecl, Impl, ItemKind, MutTy, Mutability, Node, PatKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::adjustment::{Adjust, PointerCast}; use rustc_middle::ty::layout::LayoutOf; @@ -272,12 +272,13 @@ impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue { decl: &'tcx FnDecl<'_>, _body: &'tcx Body<'_>, span: Span, - hir_id: HirId, + def_id: LocalDefId, ) { if span.from_expansion() { return; } + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id); match kind { FnKind::ItemFn(.., header) => { if header.abi != Abi::Rust { @@ -308,6 +309,6 @@ impl<'tcx> LateLintPass<'tcx> for PassByRefOrValue { } } - self.check_poly_fn(cx, cx.tcx.hir().local_def_id(hir_id), decl, Some(span)); + self.check_poly_fn(cx, def_id, decl, Some(span)); } } diff --git a/clippy_lints/src/pattern_type_mismatch.rs b/clippy_lints/src/pattern_type_mismatch.rs index 97b5a4ce36413..9f98195d311fe 100644 --- a/clippy_lints/src/pattern_type_mismatch.rs +++ b/clippy_lints/src/pattern_type_mismatch.rs @@ -1,11 +1,10 @@ use clippy_utils::diagnostics::span_lint_and_help; -use rustc_hir::{ - intravisit, Body, Expr, ExprKind, FnDecl, HirId, Let, LocalSource, Mutability, Pat, PatKind, Stmt, StmtKind, -}; +use rustc_hir::{intravisit, Body, Expr, ExprKind, FnDecl, Let, LocalSource, Mutability, Pat, PatKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_middle::ty; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; declare_clippy_lint! { @@ -116,7 +115,7 @@ impl<'tcx> LateLintPass<'tcx> for PatternTypeMismatch { _: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, _: Span, - _: HirId, + _: LocalDefId, ) { for param in body.params { apply_lint(cx, param.pat, DerefPossible::Impossible); diff --git a/clippy_lints/src/redundant_clone.rs b/clippy_lints/src/redundant_clone.rs index c1677fb3da1c4..944a33cc3e53f 100644 --- a/clippy_lints/src/redundant_clone.rs +++ b/clippy_lints/src/redundant_clone.rs @@ -6,11 +6,12 @@ use clippy_utils::{fn_has_unsatisfiable_preds, match_def_path, paths}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; -use rustc_hir::{def_id, Body, FnDecl, HirId, LangItem}; +use rustc_hir::{def_id, Body, FnDecl, LangItem}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::mir; use rustc_middle::ty::{self, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::{BytePos, Span}; use rustc_span::sym; @@ -69,12 +70,10 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone { cx: &LateContext<'tcx>, _: FnKind<'tcx>, _: &'tcx FnDecl<'_>, - body: &'tcx Body<'_>, + _: &'tcx Body<'_>, _: Span, - _: HirId, + def_id: LocalDefId, ) { - let def_id = cx.tcx.hir().body_owner_def_id(body.id()); - // Building MIR for `fn`s with unsatisfiable preds results in ICE. if fn_has_unsatisfiable_preds(cx, def_id.to_def_id()) { return; diff --git a/clippy_lints/src/return_self_not_must_use.rs b/clippy_lints/src/return_self_not_must_use.rs index b77faf7322bd0..8c39b4fc5691b 100644 --- a/clippy_lints/src/return_self_not_must_use.rs +++ b/clippy_lints/src/return_self_not_must_use.rs @@ -3,7 +3,7 @@ use clippy_utils::ty::is_must_use_ty; use clippy_utils::{nth_arg, return_ty}; use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Body, FnDecl, HirId, TraitItem, TraitItemKind}; +use rustc_hir::{Body, FnDecl, OwnerId, TraitItem, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; @@ -68,7 +68,7 @@ declare_clippy_lint! { declare_lint_pass!(ReturnSelfNotMustUse => [RETURN_SELF_NOT_MUST_USE]); -fn check_method(cx: &LateContext<'_>, decl: &FnDecl<'_>, fn_def: LocalDefId, span: Span, hir_id: HirId) { +fn check_method(cx: &LateContext<'_>, decl: &FnDecl<'_>, fn_def: LocalDefId, span: Span, owner_id: OwnerId) { if_chain! { // If it comes from an external macro, better ignore it. if !in_external_macro(cx.sess(), span); @@ -76,10 +76,10 @@ fn check_method(cx: &LateContext<'_>, decl: &FnDecl<'_>, fn_def: LocalDefId, spa // We only show this warning for public exported methods. if cx.effective_visibilities.is_exported(fn_def); // We don't want to emit this lint if the `#[must_use]` attribute is already there. - if !cx.tcx.hir().attrs(hir_id).iter().any(|attr| attr.has_name(sym::must_use)); + if !cx.tcx.hir().attrs(owner_id.into()).iter().any(|attr| attr.has_name(sym::must_use)); if cx.tcx.visibility(fn_def.to_def_id()).is_public(); - let ret_ty = return_ty(cx, hir_id); - let self_arg = nth_arg(cx, hir_id, 0); + let ret_ty = return_ty(cx, owner_id.into()); + let self_arg = nth_arg(cx, owner_id.into(), 0); // If `Self` has the same type as the returned type, then we want to warn. // // For this check, we don't want to remove the reference on the returned type because if @@ -109,26 +109,26 @@ impl<'tcx> LateLintPass<'tcx> for ReturnSelfNotMustUse { decl: &'tcx FnDecl<'tcx>, _: &'tcx Body<'tcx>, span: Span, - hir_id: HirId, + fn_def: LocalDefId, ) { if_chain! { // We are only interested in methods, not in functions or associated functions. if matches!(kind, FnKind::Method(_, _)); - if let Some(fn_def) = cx.tcx.hir().opt_local_def_id(hir_id); if let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id()); // We don't want this method to be te implementation of a trait because the // `#[must_use]` should be put on the trait definition directly. if cx.tcx.trait_id_of_impl(impl_def).is_none(); then { - check_method(cx, decl, fn_def, span, hir_id); + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def); + check_method(cx, decl, fn_def, span, hir_id.expect_owner()); } } } fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx TraitItem<'tcx>) { if let TraitItemKind::Fn(ref sig, _) = item.kind { - check_method(cx, sig.decl, item.owner_id.def_id, item.span, item.hir_id()); + check_method(cx, sig.decl, item.owner_id.def_id, item.span, item.owner_id); } } } diff --git a/clippy_lints/src/returns.rs b/clippy_lints/src/returns.rs index a3e0811700bee..84a0c6b955853 100644 --- a/clippy_lints/src/returns.rs +++ b/clippy_lints/src/returns.rs @@ -6,11 +6,12 @@ use core::ops::ControlFlow; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; -use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, HirId, LangItem, MatchSource, PatKind, QPath, StmtKind}; +use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, LangItem, MatchSource, PatKind, QPath, StmtKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_middle::ty::subst::GenericArgKind; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::{BytePos, Pos}; @@ -152,7 +153,7 @@ impl<'tcx> LateLintPass<'tcx> for Return { _: &'tcx FnDecl<'tcx>, body: &'tcx Body<'tcx>, sp: Span, - _: HirId, + _: LocalDefId, ) { match kind { FnKind::Closure => { diff --git a/clippy_lints/src/self_named_constructors.rs b/clippy_lints/src/self_named_constructors.rs index 71b387c66a330..3ce030cd721a7 100644 --- a/clippy_lints/src/self_named_constructors.rs +++ b/clippy_lints/src/self_named_constructors.rs @@ -54,7 +54,7 @@ impl<'tcx> LateLintPass<'tcx> for SelfNamedConstructors { let parent = cx.tcx.hir().get_parent_item(impl_item.hir_id()).def_id; let item = cx.tcx.hir().expect_item(parent); let self_ty = cx.tcx.type_of(item.owner_id); - let ret_ty = return_ty(cx, impl_item.hir_id()); + let ret_ty = return_ty(cx, impl_item.owner_id); // Do not check trait impls if matches!(item.kind, ItemKind::Impl(Impl { of_trait: Some(_), .. })) { diff --git a/clippy_lints/src/trailing_empty_array.rs b/clippy_lints/src/trailing_empty_array.rs index 63b326048a48f..de0c5d56e4156 100644 --- a/clippy_lints/src/trailing_empty_array.rs +++ b/clippy_lints/src/trailing_empty_array.rs @@ -61,8 +61,7 @@ fn is_struct_with_trailing_zero_sized_array(cx: &LateContext<'_>, item: &Item<'_ if let rustc_hir::TyKind::Array(_, rustc_hir::ArrayLen::Body(length)) = last_field.ty.kind; // Then check if that that array zero-sized - let length_ldid = cx.tcx.hir().local_def_id(length.hir_id); - let length = Const::from_anon_const(cx.tcx, length_ldid); + let length = Const::from_anon_const(cx.tcx, length.def_id); let length = length.try_eval_usize(cx.tcx, cx.param_env); if let Some(length) = length; then { diff --git a/clippy_lints/src/types/mod.rs b/clippy_lints/src/types/mod.rs index 229478b7ce3c9..585e2075fa904 100644 --- a/clippy_lints/src/types/mod.rs +++ b/clippy_lints/src/types/mod.rs @@ -12,11 +12,12 @@ mod vec_box; use rustc_hir as hir; use rustc_hir::intravisit::FnKind; use rustc_hir::{ - Body, FnDecl, FnRetTy, GenericArg, HirId, ImplItem, ImplItemKind, Item, ItemKind, Local, MutTy, QPath, TraitItem, + Body, FnDecl, FnRetTy, GenericArg, ImplItem, ImplItemKind, Item, ItemKind, Local, MutTy, QPath, TraitItem, TraitItemKind, TyKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; declare_clippy_lint! { @@ -311,15 +312,27 @@ pub struct Types { impl_lint_pass!(Types => [BOX_COLLECTION, VEC_BOX, OPTION_OPTION, LINKEDLIST, BORROWED_BOX, REDUNDANT_ALLOCATION, RC_BUFFER, RC_MUTEX, TYPE_COMPLEXITY]); impl<'tcx> LateLintPass<'tcx> for Types { - fn check_fn(&mut self, cx: &LateContext<'_>, _: FnKind<'_>, decl: &FnDecl<'_>, _: &Body<'_>, _: Span, id: HirId) { - let is_in_trait_impl = - if let Some(hir::Node::Item(item)) = cx.tcx.hir().find_by_def_id(cx.tcx.hir().get_parent_item(id).def_id) { - matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. })) - } else { - false - }; + fn check_fn( + &mut self, + cx: &LateContext<'_>, + _: FnKind<'_>, + decl: &FnDecl<'_>, + _: &Body<'_>, + _: Span, + def_id: LocalDefId, + ) { + let is_in_trait_impl = if let Some(hir::Node::Item(item)) = cx.tcx.hir().find_by_def_id( + cx.tcx + .hir() + .get_parent_item(cx.tcx.hir().local_def_id_to_hir_id(def_id)) + .def_id, + ) { + matches!(item.kind, ItemKind::Impl(hir::Impl { of_trait: Some(_), .. })) + } else { + false + }; - let is_exported = cx.effective_visibilities.is_exported(cx.tcx.hir().local_def_id(id)); + let is_exported = cx.effective_visibilities.is_exported(def_id); self.check_fn_decl( cx, @@ -381,7 +394,7 @@ impl<'tcx> LateLintPass<'tcx> for Types { fn check_field_def(&mut self, cx: &LateContext<'_>, field: &hir::FieldDef<'_>) { let is_exported = cx .effective_visibilities - .is_exported(cx.tcx.hir().local_def_id(field.hir_id)); + .is_exported(field.def_id); self.check_ty( cx, diff --git a/clippy_lints/src/unnecessary_wraps.rs b/clippy_lints/src/unnecessary_wraps.rs index 84ec0d0fb1cf4..8b0e0ce5a3001 100644 --- a/clippy_lints/src/unnecessary_wraps.rs +++ b/clippy_lints/src/unnecessary_wraps.rs @@ -5,10 +5,11 @@ use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; use rustc_hir::LangItem::{OptionSome, ResultOk}; -use rustc_hir::{Body, ExprKind, FnDecl, HirId, Impl, ItemKind, Node}; +use rustc_hir::{Body, ExprKind, FnDecl, Impl, ItemKind, Node}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; use rustc_span::symbol::sym; use rustc_span::Span; @@ -77,12 +78,11 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps { fn_decl: &FnDecl<'tcx>, body: &Body<'tcx>, span: Span, - hir_id: HirId, + def_id: LocalDefId, ) { // Abort if public function/method or closure. match fn_kind { FnKind::ItemFn(..) | FnKind::Method(..) => { - let def_id = cx.tcx.hir().local_def_id(hir_id); if self.avoid_breaking_exported_api && cx.effective_visibilities.is_exported(def_id) { return; } @@ -91,6 +91,7 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps { } // Abort if the method is implementing a trait or of it a trait method. + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(def_id); if let Some(Node::Item(item)) = cx.tcx.hir().find_parent(hir_id) { if matches!( item.kind, @@ -101,17 +102,18 @@ impl<'tcx> LateLintPass<'tcx> for UnnecessaryWraps { } // Get the wrapper and inner types, if can't, abort. - let (return_type_label, lang_item, inner_type) = if let ty::Adt(adt_def, subst) = return_ty(cx, hir_id).kind() { - if cx.tcx.is_diagnostic_item(sym::Option, adt_def.did()) { - ("Option", OptionSome, subst.type_at(0)) - } else if cx.tcx.is_diagnostic_item(sym::Result, adt_def.did()) { - ("Result", ResultOk, subst.type_at(0)) + let (return_type_label, lang_item, inner_type) = + if let ty::Adt(adt_def, subst) = return_ty(cx, hir_id.expect_owner()).kind() { + if cx.tcx.is_diagnostic_item(sym::Option, adt_def.did()) { + ("Option", OptionSome, subst.type_at(0)) + } else if cx.tcx.is_diagnostic_item(sym::Result, adt_def.did()) { + ("Result", ResultOk, subst.type_at(0)) + } else { + return; + } } else { return; - } - } else { - return; - }; + }; // Check if all return expression respect the following condition and collect them. let mut suggs = Vec::new(); diff --git a/clippy_lints/src/unused_async.rs b/clippy_lints/src/unused_async.rs index 3538bef6e0618..55651a28be924 100644 --- a/clippy_lints/src/unused_async.rs +++ b/clippy_lints/src/unused_async.rs @@ -1,9 +1,10 @@ use clippy_utils::diagnostics::span_lint_and_help; use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, Visitor}; -use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId, YieldSource}; +use rustc_hir::{Body, Expr, ExprKind, FnDecl, YieldSource}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::Span; declare_clippy_lint! { @@ -66,11 +67,11 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAsync { fn_decl: &'tcx FnDecl<'tcx>, body: &Body<'tcx>, span: Span, - hir_id: HirId, + def_id: LocalDefId, ) { if !span.from_expansion() && fn_kind.asyncness().is_async() { let mut visitor = AsyncFnVisitor { cx, found_await: false }; - walk_fn(&mut visitor, fn_kind, fn_decl, body.id(), hir_id); + walk_fn(&mut visitor, fn_kind, fn_decl, body.id(), def_id); if !visitor.found_await { span_lint_and_help( cx, diff --git a/clippy_lints/src/unwrap.rs b/clippy_lints/src/unwrap.rs index ea878043c04e3..377d3fb6f4e1c 100644 --- a/clippy_lints/src/unwrap.rs +++ b/clippy_lints/src/unwrap.rs @@ -11,6 +11,7 @@ use rustc_middle::hir::nested_filter; use rustc_middle::lint::in_external_macro; use rustc_middle::ty::Ty; use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; use rustc_span::sym; @@ -312,7 +313,7 @@ impl<'tcx> LateLintPass<'tcx> for Unwrap { decl: &'tcx FnDecl<'_>, body: &'tcx Body<'_>, span: Span, - fn_id: HirId, + fn_id: LocalDefId, ) { if span.from_expansion() { return; diff --git a/clippy_lints/src/unwrap_in_result.rs b/clippy_lints/src/unwrap_in_result.rs index f3611d1743404..3a1845425a251 100644 --- a/clippy_lints/src/unwrap_in_result.rs +++ b/clippy_lints/src/unwrap_in_result.rs @@ -64,8 +64,8 @@ impl<'tcx> LateLintPass<'tcx> for UnwrapInResult { // first check if it's a method or function if let hir::ImplItemKind::Fn(ref _signature, _) = impl_item.kind; // checking if its return type is `result` or `option` - if is_type_diagnostic_item(cx, return_ty(cx, impl_item.hir_id()), sym::Result) - || is_type_diagnostic_item(cx, return_ty(cx, impl_item.hir_id()), sym::Option); + if is_type_diagnostic_item(cx, return_ty(cx, impl_item.owner_id), sym::Result) + || is_type_diagnostic_item(cx, return_ty(cx, impl_item.owner_id), sym::Option); then { lint_impl_body(cx, impl_item.span, impl_item); } diff --git a/clippy_utils/src/lib.rs b/clippy_utils/src/lib.rs index e2965146cfe6d..81c1a052b5869 100644 --- a/clippy_utils/src/lib.rs +++ b/clippy_utils/src/lib.rs @@ -1578,8 +1578,7 @@ pub fn is_direct_expn_of(span: Span, name: &str) -> Option { } /// Convenience function to get the return type of a function. -pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId) -> Ty<'tcx> { - let fn_def_id = cx.tcx.hir().local_def_id(fn_item); +pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId) -> Ty<'tcx> { let ret_ty = cx.tcx.fn_sig(fn_def_id).subst_identity().output(); cx.tcx.erase_late_bound_regions(ret_ty) } From 3e32533cc274348145db7e5c740acf751be9e5c5 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 5 Nov 2022 15:33:58 +0000 Subject: [PATCH 177/501] Remove `HirId -> LocalDefId` map from HIR. --- clippy_lints/src/dereference.rs | 6 +++--- clippy_lints/src/manual_non_exhaustive.rs | 2 +- clippy_lints/src/methods/suspicious_map.rs | 3 +-- clippy_utils/src/lib.rs | 8 +++----- clippy_utils/src/sugg.rs | 6 ++---- 5 files changed, 10 insertions(+), 15 deletions(-) diff --git a/clippy_lints/src/dereference.rs b/clippy_lints/src/dereference.rs index 8e921839e8b2f..6c333afacc648 100644 --- a/clippy_lints/src/dereference.rs +++ b/clippy_lints/src/dereference.rs @@ -778,20 +778,20 @@ fn walk_parents<'tcx>( Node::Expr(parent) if parent.span.ctxt() == ctxt => match parent.kind { ExprKind::Ret(_) => { - let owner_id = cx.tcx.hir().body_owner(cx.enclosing_body.unwrap()); + let owner_id = cx.tcx.hir().body_owner_def_id(cx.enclosing_body.unwrap()); Some( if let Node::Expr( closure_expr @ Expr { kind: ExprKind::Closure(closure), .. }, - ) = cx.tcx.hir().get(owner_id) + ) = cx.tcx.hir().get_by_def_id(owner_id) { closure_result_position(cx, closure, cx.typeck_results().expr_ty(closure_expr), precedence) } else { let output = cx .tcx - .erase_late_bound_regions(cx.tcx.fn_sig(cx.tcx.hir().local_def_id(owner_id)).subst_identity().output()); + .erase_late_bound_regions(cx.tcx.fn_sig(owner_id).subst_identity().output()); ty_auto_deref_stability(cx, output, precedence).position_for_result(cx) }, ) diff --git a/clippy_lints/src/manual_non_exhaustive.rs b/clippy_lints/src/manual_non_exhaustive.rs index 7dfa155c5c35f..9a84068d4487b 100644 --- a/clippy_lints/src/manual_non_exhaustive.rs +++ b/clippy_lints/src/manual_non_exhaustive.rs @@ -157,7 +157,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum { && def.variants.len() > 1 { let mut iter = def.variants.iter().filter_map(|v| { - (matches!(v.data, hir::VariantData::Unit(..)) + (matches!(v.data, hir::VariantData::Unit(_, _)) && v.ident.as_str().starts_with('_') && is_doc_hidden(cx.tcx.hir().attrs(v.hir_id))) .then_some((v.def_id, v.span)) diff --git a/clippy_lints/src/methods/suspicious_map.rs b/clippy_lints/src/methods/suspicious_map.rs index 6050226434f27..0dc7fe2a2c5a3 100644 --- a/clippy_lints/src/methods/suspicious_map.rs +++ b/clippy_lints/src/methods/suspicious_map.rs @@ -11,8 +11,7 @@ use super::SUSPICIOUS_MAP; pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, count_recv: &hir::Expr<'_>, map_arg: &hir::Expr<'_>) { if_chain! { if is_trait_method(cx, count_recv, sym::Iterator); - let closure = expr_or_init(cx, map_arg); - if let hir::ExprKind::Closure(closure) = closure.kind; + if let hir::ExprKind::Closure(closure) = expr_or_init(cx, map_arg).kind; let closure_body = cx.tcx.hir().body(closure.body); if !cx.typeck_results().expr_ty(closure_body.value).is_unit(); then { diff --git a/clippy_utils/src/lib.rs b/clippy_utils/src/lib.rs index 81c1a052b5869..0db3b93e7cc9f 100644 --- a/clippy_utils/src/lib.rs +++ b/clippy_utils/src/lib.rs @@ -1119,9 +1119,8 @@ pub fn can_move_expr_to_closure<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<' self.captures.entry(l).and_modify(|e| *e |= cap).or_insert(cap); } }, - ExprKind::Closure { .. } => { - let closure_id = self.cx.tcx.hir().local_def_id(e.hir_id); - for capture in self.cx.typeck_results().closure_min_captures_flattened(closure_id) { + ExprKind::Closure(closure) => { + for capture in self.cx.typeck_results().closure_min_captures_flattened(closure.def_id) { let local_id = match capture.place.base { PlaceBase::Local(id) => id, PlaceBase::Upvar(var) => var.var_path.hir_id, @@ -1584,8 +1583,7 @@ pub fn return_ty<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId) -> Ty<'t } /// Convenience function to get the nth argument type of a function. -pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_item: hir::HirId, nth: usize) -> Ty<'tcx> { - let fn_def_id = cx.tcx.hir().local_def_id(fn_item); +pub fn nth_arg<'tcx>(cx: &LateContext<'tcx>, fn_def_id: hir::OwnerId, nth: usize) -> Ty<'tcx> { let arg = cx.tcx.fn_sig(fn_def_id).subst_identity().input(nth); cx.tcx.erase_late_bound_regions(arg) } diff --git a/clippy_utils/src/sugg.rs b/clippy_utils/src/sugg.rs index 8d767f9d44d3a..b8c87aa5e1e42 100644 --- a/clippy_utils/src/sugg.rs +++ b/clippy_utils/src/sugg.rs @@ -809,7 +809,7 @@ pub struct DerefClosure { /// /// note: this only works on single line immutable closures with exactly one input parameter. pub fn deref_closure_args(cx: &LateContext<'_>, closure: &hir::Expr<'_>) -> Option { - if let hir::ExprKind::Closure(&Closure { fn_decl, body, .. }) = closure.kind { + if let hir::ExprKind::Closure(&Closure { fn_decl, def_id, body, .. }) = closure.kind { let closure_body = cx.tcx.hir().body(body); // is closure arg a type annotated double reference (i.e.: `|x: &&i32| ...`) // a type annotation is present if param `kind` is different from `TyKind::Infer` @@ -829,10 +829,8 @@ pub fn deref_closure_args(cx: &LateContext<'_>, closure: &hir::Expr<'_>) -> Opti applicability: Applicability::MachineApplicable, }; - let fn_def_id = cx.tcx.hir().local_def_id(closure.hir_id); let infcx = cx.tcx.infer_ctxt().build(); - ExprUseVisitor::new(&mut visitor, &infcx, fn_def_id, cx.param_env, cx.typeck_results()) - .consume_body(closure_body); + ExprUseVisitor::new(&mut visitor, &infcx, def_id, cx.param_env, cx.typeck_results()).consume_body(closure_body); if !visitor.suggestion_start.is_empty() { return Some(DerefClosure { From a0460cf37d010643c1c7fa1c26923c3d61075e3f Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 01:49:10 +0900 Subject: [PATCH 178/501] add COLLAPSIBLE_STR_REPLACE in msrv COLLAPSIBLE_STR_REPLACE uses msrvs::PATTERN_TRAIT_CHAR_ARRAY --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index f79dbb50ff490..6068b4aa95047 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -167,6 +167,7 @@ The minimum rust version that the project supports * [manual_clamp](https://rust-lang.github.io/rust-clippy/master/index.html#manual_clamp) * [manual_let_else](https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else) * [unchecked_duration_subtraction](https://rust-lang.github.io/rust-clippy/master/index.html#unchecked_duration_subtraction) +* [collapsible_str_replace](https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_str_replace) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index f48be27592b7e..e81e983e3c1fd 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE. /// /// The minimum rust version that the project supports (msrv: Option = None), From 5a9c4a009052ad5e5baf6f9ee8450136e1bde939 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 01:52:05 +0900 Subject: [PATCH 179/501] add SEEK_FROM_CURRENT in msrv SEEK_FROM_CURRENT uses msrvs::SEEK_FROM_CURRENT --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 6068b4aa95047..7c8a3291f9427 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -168,6 +168,7 @@ The minimum rust version that the project supports * [manual_let_else](https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else) * [unchecked_duration_subtraction](https://rust-lang.github.io/rust-clippy/master/index.html#unchecked_duration_subtraction) * [collapsible_str_replace](https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_str_replace) +* [seek_from_current](https://rust-lang.github.io/rust-clippy/master/index.html#seek_from_current) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index e81e983e3c1fd..36ca69c23f742 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT. /// /// The minimum rust version that the project supports (msrv: Option = None), From a05e86f5ddcc976ac1d44692bafd92dfd65b62a6 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 01:54:31 +0900 Subject: [PATCH 180/501] add SEEK_REWIND in msrv SEEK_REWIND uses msrvs::SEEK_REWIND --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 7c8a3291f9427..4c204d93e0aff 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -169,6 +169,7 @@ The minimum rust version that the project supports * [unchecked_duration_subtraction](https://rust-lang.github.io/rust-clippy/master/index.html#unchecked_duration_subtraction) * [collapsible_str_replace](https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_str_replace) * [seek_from_current](https://rust-lang.github.io/rust-clippy/master/index.html#seek_from_current) +* [seek_rewind](https://rust-lang.github.io/rust-clippy/master/index.html#seek_rewind) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 36ca69c23f742..33ca3c16a24c2 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND. /// /// The minimum rust version that the project supports (msrv: Option = None), From 532841fcae289ff328ddbb10587695b2c6965771 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:02:46 +0900 Subject: [PATCH 181/501] add UNNECESSARY_LAZY_EVALUATIONS to msrv UNNECESSARY_LAZY_EVALUATIONS uses msrvs::BOOL_THEN_SOME for `then` to `then_some` --- book/src/lint_configuration.md | 1 + clippy_lints/src/methods/mod.rs | 1 + clippy_lints/src/utils/conf.rs | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 4c204d93e0aff..d24b5259a0b8f 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -170,6 +170,7 @@ The minimum rust version that the project supports * [collapsible_str_replace](https://rust-lang.github.io/rust-clippy/master/index.html#collapsible_str_replace) * [seek_from_current](https://rust-lang.github.io/rust-clippy/master/index.html#seek_from_current) * [seek_rewind](https://rust-lang.github.io/rust-clippy/master/index.html#seek_rewind) +* [unnecessary_lazy_evaluations](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_lazy_evaluations) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/methods/mod.rs b/clippy_lints/src/methods/mod.rs index 42377a3d138c2..6a1575e182f0f 100644 --- a/clippy_lints/src/methods/mod.rs +++ b/clippy_lints/src/methods/mod.rs @@ -1818,6 +1818,7 @@ declare_clippy_lint! { /// - `or_else` to `or` /// - `get_or_insert_with` to `get_or_insert` /// - `ok_or_else` to `ok_or` + /// - `then` to `then_some` (for msrv >= 1.62.0) /// /// ### Why is this bad? /// Using eager evaluation is shorter and simpler in some cases. diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 33ca3c16a24c2..21301e2e31b08 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS. /// /// The minimum rust version that the project supports (msrv: Option = None), From 07a8bf15ff4fa044670bd9e6d5b823bfff84f1a9 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:14:38 +0900 Subject: [PATCH 182/501] add TRANSMUTE_PTR_TO_REF to msrv TRANSMUTE_PTR_TO_REF uses msrvs::POINTER_CAST --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index d24b5259a0b8f..c621dec1d607a 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -171,6 +171,7 @@ The minimum rust version that the project supports * [seek_from_current](https://rust-lang.github.io/rust-clippy/master/index.html#seek_from_current) * [seek_rewind](https://rust-lang.github.io/rust-clippy/master/index.html#seek_rewind) * [unnecessary_lazy_evaluations](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_lazy_evaluations) +* [transmute_ptr_to_ref](https://rust-lang.github.io/rust-clippy/master/index.html#transmute_ptr_to_ref) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 21301e2e31b08..5e7ae71ea3626 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF. /// /// The minimum rust version that the project supports (msrv: Option = None), From e65f9f9d32e53ce4edbd1bee96f4e5cdf67e1624 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:22:10 +0900 Subject: [PATCH 183/501] add ALMOST_COMPLETE_RANGE to msrv ALMOST_COMPLETE_RANGE uses msrvs::RANGE_INCLUSIVE --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index c621dec1d607a..9b4460fc6556d 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -172,6 +172,7 @@ The minimum rust version that the project supports * [seek_rewind](https://rust-lang.github.io/rust-clippy/master/index.html#seek_rewind) * [unnecessary_lazy_evaluations](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_lazy_evaluations) * [transmute_ptr_to_ref](https://rust-lang.github.io/rust-clippy/master/index.html#transmute_ptr_to_ref) +* [almost_complete_range](https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_range) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 5e7ae71ea3626..c7aec62c1eec8 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE. /// /// The minimum rust version that the project supports (msrv: Option = None), From 7716d69757ec65e11332ea222e1c83aafd1f53db Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:27:43 +0900 Subject: [PATCH 184/501] fix: add missing dot to AWAIT_HOLDING_INVALID_TYPE --- book/src/lint_configuration.md | 9 +++++++++ clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 9b4460fc6556d..d425b46be51a8 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -43,6 +43,7 @@ Please use that command to update the file and do not edit it by hand. | [allowed-scripts](#allowed-scripts) | `["Latin"]` | | [enable-raw-pointer-heuristic-for-send](#enable-raw-pointer-heuristic-for-send) | `true` | | [max-suggested-slice-pattern-length](#max-suggested-slice-pattern-length) | `3` | +| [await-holding-invalid-types](#await-holding-invalid-types) | `[]` | | [max-include-file-size](#max-include-file-size) | `1000000` | | [allow-expect-in-tests](#allow-expect-in-tests) | `false` | | [allow-unwrap-in-tests](#allow-unwrap-in-tests) | `false` | @@ -448,6 +449,14 @@ For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements. * [index_refutable_slice](https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice) +### await-holding-invalid-types + + +**Default Value:** `[]` (`Vec`) + +* [await_holding_invalid_type](https://rust-lang.github.io/rust-clippy/master/index.html#await_holding_invalid_type) + + ### max-include-file-size The maximum size of a file included via `include_bytes!()` or `include_str!()`, in bytes diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index c7aec62c1eec8..1b0aa9a63eda1 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -411,7 +411,7 @@ define_Conf! { /// the slice pattern that is suggested. If more elements would be necessary, the lint is suppressed. /// For example, `[_, _, _, e, ..]` is a slice pattern with 4 elements. (max_suggested_slice_pattern_length: u64 = 3), - /// Lint: AWAIT_HOLDING_INVALID_TYPE + /// Lint: AWAIT_HOLDING_INVALID_TYPE. (await_holding_invalid_types: Vec = Vec::new()), /// Lint: LARGE_INCLUDE_FILE. /// From fb77b027891daf4c730e3f80852ce446fef9beab Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:37:00 +0900 Subject: [PATCH 185/501] add NEEDLESS_BORROW to msrv NEEDLESS_BORROW uses msrvs::ARRAY_INTO_ITERATOR --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index d425b46be51a8..9bd4e72fe8f37 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -174,6 +174,7 @@ The minimum rust version that the project supports * [unnecessary_lazy_evaluations](https://rust-lang.github.io/rust-clippy/master/index.html#unnecessary_lazy_evaluations) * [transmute_ptr_to_ref](https://rust-lang.github.io/rust-clippy/master/index.html#transmute_ptr_to_ref) * [almost_complete_range](https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_range) +* [needless_borrow](https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 1b0aa9a63eda1..7364729606b31 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW. /// /// The minimum rust version that the project supports (msrv: Option = None), From e791522d35bf33a9008c367902ccaf3387325968 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:39:37 +0900 Subject: [PATCH 186/501] add DERIVABLE_IMPLS to msrv DERIVABLE_IMPLS uses msrvs::DEFAULT_ENUM_ATTRIBUTE --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 9bd4e72fe8f37..9f7cf9a61637a 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -175,6 +175,7 @@ The minimum rust version that the project supports * [transmute_ptr_to_ref](https://rust-lang.github.io/rust-clippy/master/index.html#transmute_ptr_to_ref) * [almost_complete_range](https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_range) * [needless_borrow](https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow) +* [derivable_impls](https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 7364729606b31..f7a35dc798ee0 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS. /// /// The minimum rust version that the project supports (msrv: Option = None), From 25d455bd1728ba28c5cf6a0e044f2194a7d4d2ca Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 02:54:26 +0900 Subject: [PATCH 187/501] fix: add missing dot to suppress_restriction_lint_in_const --- book/src/lint_configuration.md | 2 +- clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 9f7cf9a61637a..32a2b44392d4e 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -526,7 +526,7 @@ Whether to allow mixed uninlined format args, e.g. `format!("{} {}", a, foo.bar) ### suppress-restriction-lint-in-const -In same +Whether to suppress a restriction lint in constant code. In same cases the restructured operation might not be unavoidable, as the suggested counterparts are unavailable in constant code. This configuration will cause restriction lints to trigger even diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index f7a35dc798ee0..f8a5b5547dedb 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -446,7 +446,7 @@ define_Conf! { /// /// Whether to allow mixed uninlined format args, e.g. `format!("{} {}", a, foo.bar)` (allow_mixed_uninlined_format_args: bool = true), - /// Lint: INDEXING_SLICING + /// Lint: INDEXING_SLICING. /// /// Whether to suppress a restriction lint in constant code. In same /// cases the restructured operation might not be unavoidable, as the From 1766532b20a7af11cd4cd52545a643b50a117040 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 03:02:23 +0900 Subject: [PATCH 188/501] add MANUAL_IS_ASCII_CHECK to msrv MANUAL_IS_ASCII_CHECK uses msrvs::IS_ASCII_DIGIT and msrvs::IS_ASCII_DIGIT_CONST --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 32a2b44392d4e..07414f5408b0e 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -176,6 +176,7 @@ The minimum rust version that the project supports * [almost_complete_range](https://rust-lang.github.io/rust-clippy/master/index.html#almost_complete_range) * [needless_borrow](https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow) * [derivable_impls](https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls) +* [manual_is_ascii_check](https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index f8a5b5547dedb..380884cc21e79 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK. /// /// The minimum rust version that the project supports (msrv: Option = None), From 2f4b047b277b70ff0b43e07e00e58d54d92fda3c Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 03:04:56 +0900 Subject: [PATCH 189/501] add MANUAL_REM_EUCLID to msrv MANUAL_REM_EUCLID uses msrvs::REM_EUCLID --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 07414f5408b0e..78304ca48fe01 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -177,6 +177,7 @@ The minimum rust version that the project supports * [needless_borrow](https://rust-lang.github.io/rust-clippy/master/index.html#needless_borrow) * [derivable_impls](https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls) * [manual_is_ascii_check](https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check) +* [manual_rem_euclid](https://rust-lang.github.io/rust-clippy/master/index.html#manual_rem_euclid) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 380884cc21e79..e47546e3e4293 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID. /// /// The minimum rust version that the project supports (msrv: Option = None), From d87a6bc9b18594bdadb479f7ade1361527d3f46b Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 03:07:09 +0900 Subject: [PATCH 190/501] add MANUAL_RETAIN to msrv MANUAL_RETAIN uses - msrvs::STRING_RETAIN - msrvs::BTREE_SET_RETAIN - msrvs::BTREE_MAP_RETAIN - msrvs::HASH_SET_RETAIN - msrvs::HASH_MAP_RETAIN --- book/src/lint_configuration.md | 1 + clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 78304ca48fe01..1db2ad5859cbe 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -178,6 +178,7 @@ The minimum rust version that the project supports * [derivable_impls](https://rust-lang.github.io/rust-clippy/master/index.html#derivable_impls) * [manual_is_ascii_check](https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check) * [manual_rem_euclid](https://rust-lang.github.io/rust-clippy/master/index.html#manual_rem_euclid) +* [manual_retain](https://rust-lang.github.io/rust-clippy/master/index.html#manual_retain) ### cognitive-complexity-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index e47546e3e4293..5bdffab26b22e 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -253,7 +253,7 @@ define_Conf! { /// /// Suppress lints whenever the suggested change would cause breakage for other crates. (avoid_breaking_exported_api: bool = true), - /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID. + /// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID, MANUAL_RETAIN. /// /// The minimum rust version that the project supports (msrv: Option = None), From af62bf95a3fc2d8d1164698160507fa7d7035284 Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 03:13:30 +0900 Subject: [PATCH 191/501] fix key name of MUTABLE_KEY_TYPE --- book/src/lint_configuration.md | 2 +- clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 1db2ad5859cbe..9acacede5a886 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -517,7 +517,7 @@ for the generic parameters for determining interior mutability **Default Value:** `["bytes::Bytes"]` (`Vec`) -* [mutable_key](https://rust-lang.github.io/rust-clippy/master/index.html#mutable_key) +* [mutable_key_type](https://rust-lang.github.io/rust-clippy/master/index.html#mutable_key_type) ### allow-mixed-uninlined-format-args diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 5bdffab26b22e..4dfe23f474f61 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -437,7 +437,7 @@ define_Conf! { /// /// The maximum size of the `Err`-variant in a `Result` returned from a function (large_error_threshold: u64 = 128), - /// Lint: MUTABLE_KEY. + /// Lint: MUTABLE_KEY_TYPE. /// /// A list of paths to types that should be treated like `Arc`, i.e. ignored but /// for the generic parameters for determining interior mutability From deff5f22f6f4122b211211ae2f1ca2a7ad0c7e0f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 26 Jan 2023 17:19:35 +0200 Subject: [PATCH 192/501] Tweak change collapsing --- crates/rust-analyzer/src/global_state.rs | 98 ++++++++++++------------ 1 file changed, 48 insertions(+), 50 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index de11abdcf829f..aca6c92357070 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -3,7 +3,7 @@ //! //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. -use std::{mem, sync::Arc, time::Instant}; +use std::{sync::Arc, time::Instant}; use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; @@ -179,10 +179,9 @@ impl GlobalState { pub(crate) fn process_changes(&mut self) -> bool { let _p = profile::span("GlobalState::process_changes"); - // A file was added or deleted - let mut has_structure_changes = false; let mut workspace_structure_change = None; + let mut file_changes = FxHashMap::default(); let (change, changed_files) = { let mut change = Change::new(); let (vfs, line_endings_map) = &mut *self.vfs.write(); @@ -191,57 +190,56 @@ impl GlobalState { return false; } - // important: this needs to be a stable sort, the order between changes is relevant - // for the same file ids - changed_files.sort_by_key(|file| file.file_id); - // We need to fix up the changed events a bit, if we have a create or modify for a file - // id that is followed by a delete we actually no longer observe the file text from the - // create or modify which may cause problems later on - let mut collapsed_create_delete = false; - changed_files.dedup_by(|a, b| { + // We need to fix up the changed events a bit. If we have a create or modify for a file + // id that is followed by a delete we actually skip observing the file text from the + // earlier event, to avoid problems later on. + for changed_file in &changed_files { use vfs::ChangeKind::*; - let has_collapsed_create_delete = mem::replace(&mut collapsed_create_delete, false); + file_changes + .entry(changed_file.file_id) + .and_modify(|(change, just_created)| { + // None -> Delete => keep + // Create -> Delete => collapse + // + match (change, just_created, changed_file.change_kind) { + // latter `Delete` wins + (change, _, Delete) => *change = Delete, + // merge `Create` with `Create` or `Modify` + (Create, _, Create | Modify) => {} + // collapse identical `Modify`es + (Modify, _, Modify) => {} + // equivalent to `Modify` + (change @ Delete, just_created, Create) => { + *change = Modify; + *just_created = true; + } + // shouldn't occur, but collapse into `Create` + (change @ Delete, just_created, Modify) => { + *change = Create; + *just_created = true; + } + // shouldn't occur, but collapse into `Modify` + (Modify, _, Create) => {} + } + }) + .or_insert(( + changed_file.change_kind, + matches!(changed_file.change_kind, Create), + )); + } - if a.file_id != b.file_id { - return false; - } + changed_files.extend( + file_changes + .into_iter() + .filter(|(_, (change_kind, just_created))| { + !matches!((change_kind, just_created), (vfs::ChangeKind::Delete, true)) + }) + .map(|(file_id, (change_kind, _))| vfs::ChangedFile { file_id, change_kind }), + ); - // true => delete the second element (a), we swap them here as they are inverted by dedup_by - match (b.change_kind, a.change_kind) { - // duplicate can be merged - (Create, Create) | (Modify, Modify) | (Delete, Delete) => true, - // just leave the create, modify is irrelevant - (Create, Modify) => true, - // modify becomes irrelevant if the file is deleted - (Modify, Delete) => { - mem::swap(a, b); - true - } - // Remove the create message, and in the following loop, also remove the delete - (Create, Delete) => { - collapsed_create_delete = true; - b.change_kind = Delete; - true - } - // trailing delete from earlier - (Delete, Create | Modify) if has_collapsed_create_delete => { - b.change_kind = Create; - true - } - // this is equivalent to a modify - (Delete, Create) => { - b.change_kind = Modify; - true - } - // can't really occur - (Modify, Create) => false, - (Delete, Modify) => false, - } - }); - if collapsed_create_delete { - changed_files.pop(); - } + // A file was added or deleted + let mut has_structure_changes = false; for file in &changed_files { if let Some(path) = vfs.file_path(file.file_id).as_path() { let path = path.to_path_buf(); From 4d266d31ded825d43df64e496cea54bdcf2f65e4 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Sun, 29 Jan 2023 11:41:53 -0700 Subject: [PATCH 193/501] needless_range_loop: improve documentation --- clippy_lints/src/loops/mod.rs | 3 ++- clippy_lints/src/loops/needless_range_loop.rs | 2 +- tests/ui/needless_range_loop.stderr | 8 ++++---- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/clippy_lints/src/loops/mod.rs b/clippy_lints/src/loops/mod.rs index 8e52cac4323c3..610a0233eee15 100644 --- a/clippy_lints/src/loops/mod.rs +++ b/clippy_lints/src/loops/mod.rs @@ -61,7 +61,8 @@ declare_clippy_lint! { /// /// ### Why is this bad? /// Just iterating the collection itself makes the intent - /// more clear and is probably faster. + /// more clear and is probably faster because it eliminates + /// the bounds check that is done when indexing. /// /// ### Example /// ```rust diff --git a/clippy_lints/src/loops/needless_range_loop.rs b/clippy_lints/src/loops/needless_range_loop.rs index 3bca93d80aa7f..1336b80d88d6d 100644 --- a/clippy_lints/src/loops/needless_range_loop.rs +++ b/clippy_lints/src/loops/needless_range_loop.rs @@ -149,7 +149,7 @@ pub(super) fn check<'tcx>( |diag| { multispan_sugg( diag, - "consider using an iterator", + "consider using an iterator and enumerate", vec![ (pat.span, format!("({}, )", ident.name)), ( diff --git a/tests/ui/needless_range_loop.stderr b/tests/ui/needless_range_loop.stderr index b31544ec334a6..81b6f214bd11f 100644 --- a/tests/ui/needless_range_loop.stderr +++ b/tests/ui/needless_range_loop.stderr @@ -49,7 +49,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 0..vec.len() { | ^^^^^^^^^^^^ | -help: consider using an iterator +help: consider using an iterator and enumerate | LL | for (i, ) in vec.iter().enumerate() { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~ @@ -126,7 +126,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 5..vec.len() { | ^^^^^^^^^^^^ | -help: consider using an iterator +help: consider using an iterator and enumerate | LL | for (i, ) in vec.iter().enumerate().skip(5) { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -137,7 +137,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 5..10 { | ^^^^^ | -help: consider using an iterator +help: consider using an iterator and enumerate | LL | for (i, ) in vec.iter().enumerate().take(10).skip(5) { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -148,7 +148,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 0..vec.len() { | ^^^^^^^^^^^^ | -help: consider using an iterator +help: consider using an iterator and enumerate | LL | for (i, ) in vec.iter_mut().enumerate() { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~ From 2fd94a4e01d9e8049df3651d0b29b3964cf235d8 Mon Sep 17 00:00:00 2001 From: ksaleem Date: Sun, 29 Jan 2023 13:48:06 -0500 Subject: [PATCH 194/501] prevents `len_without_is_empty` from yielding positive when `len` takes more than just `&self` in non-standard implementations. changelog: Fix [`len_without_is_empty`] false positive when len has a non-standard method signature Fixes #9520 --- clippy_lints/src/len_zero.rs | 1 + tests/ui/len_without_is_empty.rs | 46 ++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/clippy_lints/src/len_zero.rs b/clippy_lints/src/len_zero.rs index 3c70c9cf19a51..920ab7f06336d 100644 --- a/clippy_lints/src/len_zero.rs +++ b/clippy_lints/src/len_zero.rs @@ -135,6 +135,7 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { if item.ident.name == sym::len; if let ImplItemKind::Fn(sig, _) = &item.kind; if sig.decl.implicit_self.has_implicit_self(); + if sig.decl.inputs.len() == 1; if cx.effective_visibilities.is_exported(item.owner_id.def_id); if matches!(sig.decl.output, FnRetTy::Return(_)); if let Some(imp) = get_parent_as_impl(cx.tcx, item.hir_id()); diff --git a/tests/ui/len_without_is_empty.rs b/tests/ui/len_without_is_empty.rs index 78397c2af346b..b5dec6c46bddd 100644 --- a/tests/ui/len_without_is_empty.rs +++ b/tests/ui/len_without_is_empty.rs @@ -282,4 +282,50 @@ impl AsyncLen { } } +// issue #9520 +pub struct NonStandardLenAndIsEmptySignature; +impl NonStandardLenAndIsEmptySignature { + // don't lint + pub fn len(&self, something: usize) -> usize { + something + } + + pub fn is_empty(&self, something: usize) -> bool { + something == 0 + } +} + +// test case for #9520 with generics in the function signature +pub trait TestResource { + type NonStandardSignatureWithGenerics: Copy; + fn lookup_content(&self, item: Self::NonStandardSignatureWithGenerics) -> Result, String>; +} +pub struct NonStandardSignatureWithGenerics(u32); +impl NonStandardSignatureWithGenerics { + pub fn is_empty(self, resource: &T) -> bool + where + T: TestResource, + U: Copy + From, + { + if let Ok(Some(content)) = resource.lookup_content(self.into()) { + content.is_empty() + } else { + true + } + } + + // test case for #9520 with generics in the function signature + pub fn len(self, resource: &T) -> usize + where + T: TestResource, + U: Copy + From, + { + if let Ok(Some(content)) = resource.lookup_content(self.into()) { + content.len() + } else { + 0_usize + } + } +} + fn main() {} From ecde2019e9ad63282b647fbce33cbbc3d394fd84 Mon Sep 17 00:00:00 2001 From: Sylvain Desodt Date: Mon, 30 Jan 2023 10:01:29 +0100 Subject: [PATCH 195/501] Fix version declared for semicolon_inside_block and semicolon_outside_block As per Issue #10244, the lint were documentated as being part of 1.66.0 but will actually be released 1.68.0 . --- clippy_lints/src/semicolon_block.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clippy_lints/src/semicolon_block.rs b/clippy_lints/src/semicolon_block.rs index 8f1d1490e1f08..34a3e5ddf4f6b 100644 --- a/clippy_lints/src/semicolon_block.rs +++ b/clippy_lints/src/semicolon_block.rs @@ -30,7 +30,7 @@ declare_clippy_lint! { /// # let x = 0; /// unsafe { f(x); } /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.68.0"] pub SEMICOLON_INSIDE_BLOCK, restriction, "add a semicolon inside the block" @@ -59,7 +59,7 @@ declare_clippy_lint! { /// # let x = 0; /// unsafe { f(x) }; /// ``` - #[clippy::version = "1.66.0"] + #[clippy::version = "1.68.0"] pub SEMICOLON_OUTSIDE_BLOCK, restriction, "add a semicolon outside the block" From a9e6b128542134c340bb25e13144eafbd176f51b Mon Sep 17 00:00:00 2001 From: koka Date: Sun, 29 Jan 2023 03:22:17 +0900 Subject: [PATCH 196/501] fix: use correct lint name fix --- book/src/lint_configuration.md | 2 +- clippy_lints/src/utils/conf.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/book/src/lint_configuration.md b/book/src/lint_configuration.md index 9acacede5a886..32e8e218c4055 100644 --- a/book/src/lint_configuration.md +++ b/book/src/lint_configuration.md @@ -291,7 +291,7 @@ The minimum size (in bytes) to consider a type for passing by reference instead **Default Value:** `256` (`u64`) -* [large_type_pass_by_move](https://rust-lang.github.io/rust-clippy/master/index.html#large_type_pass_by_move) +* [large_types_passed_by_value](https://rust-lang.github.io/rust-clippy/master/index.html#large_types_passed_by_value) ### too-many-lines-threshold diff --git a/clippy_lints/src/utils/conf.rs b/clippy_lints/src/utils/conf.rs index 4dfe23f474f61..1d78c7cfae0df 100644 --- a/clippy_lints/src/utils/conf.rs +++ b/clippy_lints/src/utils/conf.rs @@ -323,7 +323,7 @@ define_Conf! { /// /// The maximum size (in bytes) to consider a `Copy` type for passing by value instead of by reference. (trivial_copy_size_limit: Option = None), - /// Lint: LARGE_TYPE_PASS_BY_MOVE. + /// Lint: LARGE_TYPES_PASSED_BY_VALUE. /// /// The minimum size (in bytes) to consider a type for passing by reference instead of by value. (pass_by_value_size_limit: u64 = 256), From c65782e64a8653b17470c07ecd43af875eeebc1b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 30 Jan 2023 15:41:08 +0100 Subject: [PATCH 197/501] Parse macros allow_internal_unsafe attribute --- crates/hir-def/src/lib.rs | 7 ++++++- crates/hir-def/src/macro_expansion_tests.rs | 4 +++- crates/hir-def/src/nameres/collector.rs | 13 ++++++++++--- crates/hir-expand/src/lib.rs | 1 + 4 files changed, 20 insertions(+), 5 deletions(-) diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index 8eae2e92f4262..aabd694820813 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -290,6 +290,7 @@ pub struct Macro2Loc { pub container: ModuleId, pub id: ItemTreeId, pub expander: MacroExpander, + pub allow_internal_unsafe: bool, } impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2); @@ -299,8 +300,9 @@ pub struct MacroRulesId(salsa::InternId); pub struct MacroRulesLoc { pub container: ModuleId, pub id: ItemTreeId, - pub local_inner: bool, pub expander: MacroExpander, + pub allow_internal_unsafe: bool, + pub local_inner: bool, } impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules); @@ -894,6 +896,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { } }, local_inner: false, + allow_internal_unsafe: loc.allow_internal_unsafe, } } MacroId::MacroRulesId(it) => { @@ -918,6 +921,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { } }, local_inner: loc.local_inner, + allow_internal_unsafe: loc.allow_internal_unsafe, } } MacroId::ProcMacroId(it) => { @@ -933,6 +937,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId { InFile::new(loc.id.file_id(), makro.ast_id), ), local_inner: false, + allow_internal_unsafe: false, } } } diff --git a/crates/hir-def/src/macro_expansion_tests.rs b/crates/hir-def/src/macro_expansion_tests.rs index 79c85d118316a..4907f237f6b8a 100644 --- a/crates/hir-def/src/macro_expansion_tests.rs +++ b/crates/hir-def/src/macro_expansion_tests.rs @@ -97,7 +97,9 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let ast_id = AstId::new(source.file_id, file_ast_id.upcast()); let kind = MacroDefKind::Declarative(ast_id); - let macro_def = db.macro_def(MacroDefId { krate, kind, local_inner: false }).unwrap(); + let macro_def = db + .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false }) + .unwrap(); if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def { let tt = match ¯o_ { ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index c69599079b5e3..6c2bbc74d60ba 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -1982,11 +1982,13 @@ impl ModCollector<'_, '_> { // Case 2: normal `macro_rules!` macro MacroExpander::Declarative }; + let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); let macro_id = MacroRulesLoc { container: module, id: ItemTreeId::new(self.tree_id, id), local_inner, + allow_internal_unsafe, expander, } .intern(self.def_collector.db); @@ -2046,10 +2048,15 @@ impl ModCollector<'_, '_> { // Case 2: normal `macro` MacroExpander::Declarative }; + let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists(); - let macro_id = - Macro2Loc { container: module, id: ItemTreeId::new(self.tree_id, id), expander } - .intern(self.def_collector.db); + let macro_id = Macro2Loc { + container: module, + id: ItemTreeId::new(self.tree_id, id), + expander, + allow_internal_unsafe, + } + .intern(self.def_collector.db); self.def_collector.define_macro_def( self.module_id, mac.name.clone(), diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index b879eec4cc8d7..967a8fedbd108 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -116,6 +116,7 @@ pub struct MacroDefId { pub krate: CrateId, pub kind: MacroDefKind, pub local_inner: bool, + pub allow_internal_unsafe: bool, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] From 1f20804b6e54d90226e1d82253676648ca8be8da Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 30 Jan 2023 17:50:02 +0100 Subject: [PATCH 198/501] Improve "match to let-else" assist --- .../src/handlers/convert_match_to_let_else.rs | 75 +++++++++++++++---- 1 file changed, 60 insertions(+), 15 deletions(-) diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index fbd81c80151d3..7896d3a66d4b8 100644 --- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -30,7 +30,7 @@ use crate::{ // ``` pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let let_stmt: ast::LetStmt = ctx.find_node_at_offset()?; - let binding = find_binding(let_stmt.pat()?)?; + let binding = let_stmt.pat()?; let initializer = match let_stmt.initializer() { Some(ast::Expr::MatchExpr(it)) => it, @@ -47,7 +47,12 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<' return None; } - let diverging_arm_expr = diverging_arm.expr()?; + let diverging_arm_expr = match diverging_arm.expr()? { + ast::Expr::BlockExpr(block) if block.modifier().is_none() && block.label().is_none() => { + block.to_string() + } + other => format!("{{ {other} }}"), + }; let extracting_arm_pat = extracting_arm.pat()?; let extracted_variable = find_extracted_variable(ctx, &extracting_arm)?; @@ -56,24 +61,16 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<' "Convert match to let-else", let_stmt.syntax().text_range(), |builder| { - let extracting_arm_pat = rename_variable(&extracting_arm_pat, extracted_variable, binding); + let extracting_arm_pat = + rename_variable(&extracting_arm_pat, extracted_variable, binding); builder.replace( let_stmt.syntax().text_range(), - format!("let {extracting_arm_pat} = {initializer_expr} else {{ {diverging_arm_expr} }};") + format!("let {extracting_arm_pat} = {initializer_expr} else {diverging_arm_expr};"), ) }, ) } -// Given a pattern, find the name introduced to the surrounding scope. -fn find_binding(pat: ast::Pat) -> Option { - if let ast::Pat::IdentPat(ident) = pat { - Some(ident) - } else { - None - } -} - // Given a match expression, find extracting and diverging arms. fn find_arms( ctx: &AssistContext<'_>, @@ -124,7 +121,7 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti } // Rename `extracted` with `binding` in `pat`. -fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::IdentPat) -> SyntaxNode { +fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::Pat) -> SyntaxNode { let syntax = pat.syntax().clone_for_update(); let extracted_syntax = syntax.covering_element(extracted.syntax().text_range()); @@ -136,7 +133,7 @@ fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::IdentPat) if let Some(name_ref) = record_pat_field.field_name() { ted::replace( record_pat_field.syntax(), - ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding.into()) + ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding) .syntax() .clone_for_update(), ); @@ -410,4 +407,52 @@ fn foo(opt: Option) -> Option { "#, ); } + + #[test] + fn complex_pattern() { + check_assist( + convert_match_to_let_else, + r#" +//- minicore: option +fn f() { + let (x, y) = $0match Some((0, 1)) { + Some(it) => it, + None => return, + }; +} +"#, + r#" +fn f() { + let Some((x, y)) = Some((0, 1)) else { return }; +} +"#, + ); + } + + #[test] + fn diverging_block() { + check_assist( + convert_match_to_let_else, + r#" +//- minicore: option +fn f() { + let x = $0match Some(()) { + Some(it) => it, + None => {//comment + println!("nope"); + return + }, + }; +} +"#, + r#" +fn f() { + let Some(x) = Some(()) else {//comment + println!("nope"); + return + }; +} +"#, + ); + } } From 6321b25a21935bce6f2498b19958762e8d120081 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 30 Jan 2023 17:52:15 +0100 Subject: [PATCH 199/501] Apply the assist to itself --- .../src/handlers/convert_match_to_let_else.rs | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index 7896d3a66d4b8..65c2479e9f29b 100644 --- a/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -32,16 +32,10 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<' let let_stmt: ast::LetStmt = ctx.find_node_at_offset()?; let binding = let_stmt.pat()?; - let initializer = match let_stmt.initializer() { - Some(ast::Expr::MatchExpr(it)) => it, - _ => return None, - }; + let Some(ast::Expr::MatchExpr(initializer)) = let_stmt.initializer() else { return None }; let initializer_expr = initializer.expr()?; - let (extracting_arm, diverging_arm) = match find_arms(ctx, &initializer) { - Some(it) => it, - None => return None, - }; + let Some((extracting_arm, diverging_arm)) = find_arms(ctx, &initializer) else { return None }; if extracting_arm.guard().is_some() { cov_mark::hit!(extracting_arm_has_guard); return None; From 28b7460db18d965d80e14637e62981892a05721e Mon Sep 17 00:00:00 2001 From: gftea Date: Mon, 30 Jan 2023 17:57:27 +0100 Subject: [PATCH 200/501] fix negative trait bound in outline view (#14044) --- crates/ide/src/file_structure.rs | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index 68fd0952b4881..ab1365e317402 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -160,7 +160,10 @@ fn structure_node(node: &SyntaxNode) -> Option { let label = match target_trait { None => format!("impl {}", target_type.syntax().text()), Some(t) => { - format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),) + format!("impl {}{} for {}", + it.excl_token().map(|x| x.to_string()).unwrap_or_default(), + t.syntax().text(), + target_type.syntax().text(),) } }; @@ -213,6 +216,26 @@ mod tests { expect.assert_debug_eq(&structure) } + #[test] + fn test_nagative_trait_bound() { + let txt = r#"impl !Unpin for Test {}"#; + check(txt, expect![[r#" + [ + StructureNode { + parent: None, + label: "impl !Unpin for Test", + navigation_range: 16..20, + node_range: 0..23, + kind: SymbolKind( + Impl, + ), + detail: None, + deprecated: false, + }, + ] + "#]]); + } + #[test] fn test_file_structure() { check( From 3b225e3a96abcf8eab678691b244e5cdda4c6ea5 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Mon, 30 Jan 2023 09:59:26 -0700 Subject: [PATCH 201/501] Update clippy_lints/src/loops/needless_range_loop.rs Co-authored-by: Manish Goregaokar --- clippy_lints/src/loops/needless_range_loop.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clippy_lints/src/loops/needless_range_loop.rs b/clippy_lints/src/loops/needless_range_loop.rs index 1336b80d88d6d..d060b6ade24ea 100644 --- a/clippy_lints/src/loops/needless_range_loop.rs +++ b/clippy_lints/src/loops/needless_range_loop.rs @@ -149,7 +149,7 @@ pub(super) fn check<'tcx>( |diag| { multispan_sugg( diag, - "consider using an iterator and enumerate", + "consider using an iterator and enumerate()", vec![ (pat.span, format!("({}, )", ident.name)), ( From 5ed191de6bf14cdc0aee749fd03fb6dfb32c4926 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Mon, 30 Jan 2023 10:10:52 -0700 Subject: [PATCH 202/501] bless --- tests/ui/needless_range_loop.stderr | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/ui/needless_range_loop.stderr b/tests/ui/needless_range_loop.stderr index 81b6f214bd11f..cffa19bec3a66 100644 --- a/tests/ui/needless_range_loop.stderr +++ b/tests/ui/needless_range_loop.stderr @@ -49,7 +49,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 0..vec.len() { | ^^^^^^^^^^^^ | -help: consider using an iterator and enumerate +help: consider using an iterator and enumerate() | LL | for (i, ) in vec.iter().enumerate() { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~ @@ -126,7 +126,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 5..vec.len() { | ^^^^^^^^^^^^ | -help: consider using an iterator and enumerate +help: consider using an iterator and enumerate() | LL | for (i, ) in vec.iter().enumerate().skip(5) { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -137,7 +137,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 5..10 { | ^^^^^ | -help: consider using an iterator and enumerate +help: consider using an iterator and enumerate() | LL | for (i, ) in vec.iter().enumerate().take(10).skip(5) { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -148,7 +148,7 @@ error: the loop variable `i` is used to index `vec` LL | for i in 0..vec.len() { | ^^^^^^^^^^^^ | -help: consider using an iterator and enumerate +help: consider using an iterator and enumerate() | LL | for (i, ) in vec.iter_mut().enumerate() { | ~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~ From 926c5e4cde09470ecfcdae562e802e5abbddb17c Mon Sep 17 00:00:00 2001 From: Niki4tap Date: Mon, 30 Jan 2023 20:42:40 +0300 Subject: [PATCH 203/501] multiple_unsafe_ops_per_block: don't lint in external macros --- .../src/multiple_unsafe_ops_per_block.rs | 3 +- tests/ui/auxiliary/macro_rules.rs | 10 +++++ tests/ui/multiple_unsafe_ops_per_block.rs | 9 +++++ tests/ui/multiple_unsafe_ops_per_block.stderr | 40 +++++++++---------- 4 files changed, 41 insertions(+), 21 deletions(-) diff --git a/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/clippy_lints/src/multiple_unsafe_ops_per_block.rs index 18e61c75eece3..191da3085be2e 100644 --- a/clippy_lints/src/multiple_unsafe_ops_per_block.rs +++ b/clippy_lints/src/multiple_unsafe_ops_per_block.rs @@ -10,6 +10,7 @@ use hir::{ use rustc_ast::Mutability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::Span; @@ -66,7 +67,7 @@ declare_lint_pass!(MultipleUnsafeOpsPerBlock => [MULTIPLE_UNSAFE_OPS_PER_BLOCK]) impl<'tcx> LateLintPass<'tcx> for MultipleUnsafeOpsPerBlock { fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'_>) { - if !matches!(block.rules, BlockCheckMode::UnsafeBlock(_)) { + if !matches!(block.rules, BlockCheckMode::UnsafeBlock(_)) || in_external_macro(cx.tcx.sess, block.span) { return; } let mut unsafe_ops = vec![]; diff --git a/tests/ui/auxiliary/macro_rules.rs b/tests/ui/auxiliary/macro_rules.rs index 1e5f20e8c39ba..f74a614eefe9a 100644 --- a/tests/ui/auxiliary/macro_rules.rs +++ b/tests/ui/auxiliary/macro_rules.rs @@ -149,3 +149,13 @@ macro_rules! almost_complete_range { let _ = '0'..'9'; }; } + +#[macro_export] +macro_rules! unsafe_macro { + () => { + unsafe { + *core::ptr::null::<()>(); + *core::ptr::null::<()>(); + } + }; +} diff --git a/tests/ui/multiple_unsafe_ops_per_block.rs b/tests/ui/multiple_unsafe_ops_per_block.rs index 41263535df673..4511bc99c3c71 100644 --- a/tests/ui/multiple_unsafe_ops_per_block.rs +++ b/tests/ui/multiple_unsafe_ops_per_block.rs @@ -1,9 +1,13 @@ +// aux-build:macro_rules.rs #![allow(unused)] #![allow(deref_nullptr)] #![allow(clippy::unnecessary_operation)] #![allow(clippy::drop_copy)] #![warn(clippy::multiple_unsafe_ops_per_block)] +#[macro_use] +extern crate macro_rules; + use core::arch::asm; fn raw_ptr() -> *const () { @@ -107,4 +111,9 @@ unsafe fn read_char_good(ptr: *const u8) -> char { unsafe { core::char::from_u32_unchecked(int_value) } } +// no lint +fn issue10259() { + unsafe_macro!(); +} + fn main() {} diff --git a/tests/ui/multiple_unsafe_ops_per_block.stderr b/tests/ui/multiple_unsafe_ops_per_block.stderr index f6b8341795d23..303aeb7aee0c5 100644 --- a/tests/ui/multiple_unsafe_ops_per_block.stderr +++ b/tests/ui/multiple_unsafe_ops_per_block.stderr @@ -1,5 +1,5 @@ error: this `unsafe` block contains 2 unsafe operations, expected only one - --> $DIR/multiple_unsafe_ops_per_block.rs:32:5 + --> $DIR/multiple_unsafe_ops_per_block.rs:36:5 | LL | / unsafe { LL | | STATIC += 1; @@ -8,19 +8,19 @@ LL | | } | |_____^ | note: modification of a mutable static occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:33:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:37:9 | LL | STATIC += 1; | ^^^^^^^^^^^ note: unsafe function call occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:34:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:38:9 | LL | not_very_safe(); | ^^^^^^^^^^^^^^^ = note: `-D clippy::multiple-unsafe-ops-per-block` implied by `-D warnings` error: this `unsafe` block contains 2 unsafe operations, expected only one - --> $DIR/multiple_unsafe_ops_per_block.rs:41:5 + --> $DIR/multiple_unsafe_ops_per_block.rs:45:5 | LL | / unsafe { LL | | drop(u.u); @@ -29,18 +29,18 @@ LL | | } | |_____^ | note: union field access occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:42:14 + --> $DIR/multiple_unsafe_ops_per_block.rs:46:14 | LL | drop(u.u); | ^^^ note: raw pointer dereference occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:43:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:47:9 | LL | *raw_ptr(); | ^^^^^^^^^^ error: this `unsafe` block contains 3 unsafe operations, expected only one - --> $DIR/multiple_unsafe_ops_per_block.rs:48:5 + --> $DIR/multiple_unsafe_ops_per_block.rs:52:5 | LL | / unsafe { LL | | asm!("nop"); @@ -50,23 +50,23 @@ LL | | } | |_____^ | note: inline assembly used here - --> $DIR/multiple_unsafe_ops_per_block.rs:49:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:53:9 | LL | asm!("nop"); | ^^^^^^^^^^^ note: unsafe method call occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:50:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:54:9 | LL | sample.not_very_safe(); | ^^^^^^^^^^^^^^^^^^^^^^ note: modification of a mutable static occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:51:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:55:9 | LL | STATIC = 0; | ^^^^^^^^^^ error: this `unsafe` block contains 6 unsafe operations, expected only one - --> $DIR/multiple_unsafe_ops_per_block.rs:57:5 + --> $DIR/multiple_unsafe_ops_per_block.rs:61:5 | LL | / unsafe { LL | | drop(u.u); @@ -78,49 +78,49 @@ LL | | } | |_____^ | note: union field access occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:58:14 + --> $DIR/multiple_unsafe_ops_per_block.rs:62:14 | LL | drop(u.u); | ^^^ note: access of a mutable static occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:59:14 + --> $DIR/multiple_unsafe_ops_per_block.rs:63:14 | LL | drop(STATIC); | ^^^^^^ note: unsafe method call occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:60:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:64:9 | LL | sample.not_very_safe(); | ^^^^^^^^^^^^^^^^^^^^^^ note: unsafe function call occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:61:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:65:9 | LL | not_very_safe(); | ^^^^^^^^^^^^^^^ note: raw pointer dereference occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:62:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:66:9 | LL | *raw_ptr(); | ^^^^^^^^^^ note: inline assembly used here - --> $DIR/multiple_unsafe_ops_per_block.rs:63:9 + --> $DIR/multiple_unsafe_ops_per_block.rs:67:9 | LL | asm!("nop"); | ^^^^^^^^^^^ error: this `unsafe` block contains 2 unsafe operations, expected only one - --> $DIR/multiple_unsafe_ops_per_block.rs:101:5 + --> $DIR/multiple_unsafe_ops_per_block.rs:105:5 | LL | unsafe { char::from_u32_unchecked(*ptr.cast::()) } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | note: unsafe function call occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:101:14 + --> $DIR/multiple_unsafe_ops_per_block.rs:105:14 | LL | unsafe { char::from_u32_unchecked(*ptr.cast::()) } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ note: raw pointer dereference occurs here - --> $DIR/multiple_unsafe_ops_per_block.rs:101:39 + --> $DIR/multiple_unsafe_ops_per_block.rs:105:39 | LL | unsafe { char::from_u32_unchecked(*ptr.cast::()) } | ^^^^^^^^^^^^^^^^^^ From 75676ebe86703efbdaf00c5586ed6208bbd7ad9b Mon Sep 17 00:00:00 2001 From: gftea <1705787+gftea@users.noreply.github.com> Date: Mon, 30 Jan 2023 19:17:48 +0100 Subject: [PATCH 204/501] Update crates/ide/src/file_structure.rs Co-authored-by: Jonas Schievink --- crates/ide/src/file_structure.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index ab1365e317402..afcdf7b598378 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -161,9 +161,10 @@ fn structure_node(node: &SyntaxNode) -> Option { None => format!("impl {}", target_type.syntax().text()), Some(t) => { format!("impl {}{} for {}", - it.excl_token().map(|x| x.to_string()).unwrap_or_default(), - t.syntax().text(), - target_type.syntax().text(),) + it.excl_token().map(|x| x.to_string()).unwrap_or_default(), + t.syntax().text(), + target_type.syntax().text(), + ) } }; From 5356a8b1575514ba2a51d54e0e5565d9c001bd18 Mon Sep 17 00:00:00 2001 From: gftea Date: Mon, 30 Jan 2023 20:03:44 +0100 Subject: [PATCH 205/501] trim trailing whitespaces (#14044) --- crates/ide/src/file_structure.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index afcdf7b598378..dcca3d323e82b 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -160,9 +160,9 @@ fn structure_node(node: &SyntaxNode) -> Option { let label = match target_trait { None => format!("impl {}", target_type.syntax().text()), Some(t) => { - format!("impl {}{} for {}", - it.excl_token().map(|x| x.to_string()).unwrap_or_default(), - t.syntax().text(), + format!("impl {}{} for {}", + it.excl_token().map(|x| x.to_string()).unwrap_or_default(), + t.syntax().text(), target_type.syntax().text(), ) } From 04a4ac1cffd3acb9361b63221cec76eae3694e11 Mon Sep 17 00:00:00 2001 From: gftea Date: Mon, 30 Jan 2023 20:17:12 +0100 Subject: [PATCH 206/501] trim trailing whitespaces --- crates/ide/src/file_structure.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index dcca3d323e82b..5303590f7fdb3 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -235,7 +235,7 @@ mod tests { }, ] "#]]); - } + } #[test] fn test_file_structure() { From fd1a9a93fee829ea471601bd932385a4ab4ac4d2 Mon Sep 17 00:00:00 2001 From: gftea Date: Mon, 30 Jan 2023 20:34:07 +0100 Subject: [PATCH 207/501] tidy by rustfmt --- crates/ide/src/file_structure.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/ide/src/file_structure.rs b/crates/ide/src/file_structure.rs index 5303590f7fdb3..b23763dce8679 100644 --- a/crates/ide/src/file_structure.rs +++ b/crates/ide/src/file_structure.rs @@ -220,7 +220,9 @@ mod tests { #[test] fn test_nagative_trait_bound() { let txt = r#"impl !Unpin for Test {}"#; - check(txt, expect![[r#" + check( + txt, + expect![[r#" [ StructureNode { parent: None, @@ -234,7 +236,8 @@ mod tests { deprecated: false, }, ] - "#]]); + "#]], + ); } #[test] From 821199c885b4e3648682ac41c1d32b832407112f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Mon, 2 Jan 2023 18:00:33 -0800 Subject: [PATCH 208/501] Modify primary span label for E0308 The previous output was unintuitive to users. --- tests/ui/track-diagnostics.stderr | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/ui/track-diagnostics.stderr b/tests/ui/track-diagnostics.stderr index ec30318625311..39418d359288b 100644 --- a/tests/ui/track-diagnostics.stderr +++ b/tests/ui/track-diagnostics.stderr @@ -2,7 +2,7 @@ error[E0308]: mismatched types --> $DIR/track-diagnostics.rs:LL:CC | LL | const S: A = B; - | ^ expected struct `A`, found struct `B` + | ^ expected `A`, found `B` -Ztrack-diagnostics: created at compiler/rustc_infer/src/infer/error_reporting/mod.rs:LL:CC error: aborting due to previous error From c959813bfd376322491f0a7b89d7dce79d5af942 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Mon, 30 Jan 2023 07:59:01 -0700 Subject: [PATCH 209/501] needless_lifetimes: macro test Signed-off-by: Tyler Weaver --- tests/ui/needless_lifetimes.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/ui/needless_lifetimes.rs b/tests/ui/needless_lifetimes.rs index 2efc936752ef9..ac467cbbdf1e9 100644 --- a/tests/ui/needless_lifetimes.rs +++ b/tests/ui/needless_lifetimes.rs @@ -495,4 +495,17 @@ mod pr_9743_output_lifetime_checks { } } +mod skip_inside_macros { + macro_rules! print_with_one_input { + ($a:expr) => { + fn print_with_one_input<'a>(x: &'a u8) -> &'a u8 { + println!("{}", $a); + unimplemented!() + } + }; + } + + print_with_one_input!("this is a dandy little string literal"); +} + fn main() {} From 4fde96c30eab3845905f8eb0b074c1168877dd95 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Mon, 30 Jan 2023 16:04:19 -0700 Subject: [PATCH 210/501] Test needless_lifetimes within external macro Signed-off-by: Tyler Weaver --- clippy_lints/src/lifetimes.rs | 5 +- tests/ui/auxiliary/macro_rules.rs | 9 +++ tests/ui/needless_lifetimes.rs | 19 +++-- tests/ui/needless_lifetimes.stderr | 117 ++++++++++++++++------------- 4 files changed, 89 insertions(+), 61 deletions(-) diff --git a/clippy_lints/src/lifetimes.rs b/clippy_lints/src/lifetimes.rs index 7cf1a6b8084a6..ef9ac96ace5c7 100644 --- a/clippy_lints/src/lifetimes.rs +++ b/clippy_lints/src/lifetimes.rs @@ -13,8 +13,9 @@ use rustc_hir::{ ImplItemKind, Item, ItemKind, Lifetime, LifetimeName, LifetimeParamKind, PolyTraitRef, PredicateOrigin, TraitFn, TraitItem, TraitItemKind, Ty, TyKind, WherePredicate, }; -use rustc_lint::{LateContext, LateLintPass}; +use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::hir::nested_filter as middle_nested_filter; +use rustc_middle::lint::in_external_macro; use rustc_middle::ty::TyCtxt; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::def_id::LocalDefId; @@ -144,7 +145,7 @@ fn check_fn_inner<'tcx>( span: Span, report_extra_lifetimes: bool, ) { - if span.from_expansion() || has_where_lifetimes(cx, generics) { + if in_external_macro(cx.sess(), span) || has_where_lifetimes(cx, generics) { return; } diff --git a/tests/ui/auxiliary/macro_rules.rs b/tests/ui/auxiliary/macro_rules.rs index f74a614eefe9a..a13af56520389 100644 --- a/tests/ui/auxiliary/macro_rules.rs +++ b/tests/ui/auxiliary/macro_rules.rs @@ -159,3 +159,12 @@ macro_rules! unsafe_macro { } }; } + +#[macro_export] +macro_rules! needless_lifetime { + () => { + fn needless_lifetime<'a>(x: &'a u8) -> &'a u8 { + unimplemented!() + } + }; +} diff --git a/tests/ui/needless_lifetimes.rs b/tests/ui/needless_lifetimes.rs index ac467cbbdf1e9..78493c6d06726 100644 --- a/tests/ui/needless_lifetimes.rs +++ b/tests/ui/needless_lifetimes.rs @@ -1,3 +1,4 @@ +// aux-build:macro_rules.rs #![warn(clippy::needless_lifetimes)] #![allow( dead_code, @@ -8,6 +9,9 @@ clippy::get_first )] +#[macro_use] +extern crate macro_rules; + fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {} fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {} @@ -495,17 +499,20 @@ mod pr_9743_output_lifetime_checks { } } -mod skip_inside_macros { - macro_rules! print_with_one_input { - ($a:expr) => { - fn print_with_one_input<'a>(x: &'a u8) -> &'a u8 { - println!("{}", $a); +mod in_macro { + macro_rules! local_one_input_macro { + () => { + fn one_input<'a>(x: &'a u8) -> &'a u8 { unimplemented!() } }; } - print_with_one_input!("this is a dandy little string literal"); + // lint local macro expands to function with needless lifetimes + local_one_input_macro!(); + + // no lint on external macro + macro_rules::needless_lifetime!(); } fn main() {} diff --git a/tests/ui/needless_lifetimes.stderr b/tests/ui/needless_lifetimes.stderr index 5a7cf13c86dde..9d02626956e0f 100644 --- a/tests/ui/needless_lifetimes.stderr +++ b/tests/ui/needless_lifetimes.stderr @@ -1,5 +1,5 @@ error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:11:1 + --> $DIR/needless_lifetimes.rs:15:1 | LL | fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -7,310 +7,321 @@ LL | fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {} = note: `-D clippy::needless-lifetimes` implied by `-D warnings` error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:13:1 + --> $DIR/needless_lifetimes.rs:17:1 | LL | fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:23:1 + --> $DIR/needless_lifetimes.rs:27:1 | LL | fn in_and_out<'a>(x: &'a u8, _y: u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:35:1 + --> $DIR/needless_lifetimes.rs:39:1 | LL | fn multiple_in_and_out_2a<'a, 'b>(x: &'a u8, _y: &'b u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:42:1 + --> $DIR/needless_lifetimes.rs:46:1 | LL | fn multiple_in_and_out_2b<'a, 'b>(_x: &'a u8, y: &'b u8) -> &'b u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:59:1 + --> $DIR/needless_lifetimes.rs:63:1 | LL | fn deep_reference_1a<'a, 'b>(x: &'a u8, _y: &'b u8) -> Result<&'a u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:66:1 + --> $DIR/needless_lifetimes.rs:70:1 | LL | fn deep_reference_1b<'a, 'b>(_x: &'a u8, y: &'b u8) -> Result<&'b u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:75:1 + --> $DIR/needless_lifetimes.rs:79:1 | LL | fn deep_reference_3<'a>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:80:1 + --> $DIR/needless_lifetimes.rs:84:1 | LL | fn where_clause_without_lt<'a, T>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:92:1 + --> $DIR/needless_lifetimes.rs:96:1 | LL | fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:92:37 + --> $DIR/needless_lifetimes.rs:96:37 | LL | fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {} | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:116:1 + --> $DIR/needless_lifetimes.rs:120:1 | LL | fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:116:32 + --> $DIR/needless_lifetimes.rs:120:32 | LL | fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> | ^^ error: the following explicit lifetimes could be elided: 's - --> $DIR/needless_lifetimes.rs:146:5 + --> $DIR/needless_lifetimes.rs:150:5 | LL | fn self_and_out<'s>(&'s self) -> &'s u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 't - --> $DIR/needless_lifetimes.rs:153:5 + --> $DIR/needless_lifetimes.rs:157:5 | LL | fn self_and_in_out_1<'s, 't>(&'s self, _x: &'t u8) -> &'s u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 's - --> $DIR/needless_lifetimes.rs:160:5 + --> $DIR/needless_lifetimes.rs:164:5 | LL | fn self_and_in_out_2<'s, 't>(&'s self, x: &'t u8) -> &'t u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 's, 't - --> $DIR/needless_lifetimes.rs:164:5 + --> $DIR/needless_lifetimes.rs:168:5 | LL | fn distinct_self_and_in<'s, 't>(&'s self, _x: &'t u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:183:1 + --> $DIR/needless_lifetimes.rs:187:1 | LL | fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:183:33 + --> $DIR/needless_lifetimes.rs:187:33 | LL | fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str { | ^^ error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:201:1 + --> $DIR/needless_lifetimes.rs:205:1 | LL | fn struct_with_lt4a<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:201:43 + --> $DIR/needless_lifetimes.rs:205:43 | LL | fn struct_with_lt4a<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str { | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:209:1 + --> $DIR/needless_lifetimes.rs:213:1 | LL | fn struct_with_lt4b<'a, 'b>(_foo: &'a Foo<'b>) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:224:1 + --> $DIR/needless_lifetimes.rs:228:1 | LL | fn trait_obj_elided2<'a>(_arg: &'a dyn Drop) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:230:1 + --> $DIR/needless_lifetimes.rs:234:1 | LL | fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:230:37 + --> $DIR/needless_lifetimes.rs:234:37 | LL | fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str { | ^^ error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:248:1 + --> $DIR/needless_lifetimes.rs:252:1 | LL | fn alias_with_lt4a<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:248:47 + --> $DIR/needless_lifetimes.rs:252:47 | LL | fn alias_with_lt4a<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str { | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:256:1 + --> $DIR/needless_lifetimes.rs:260:1 | LL | fn alias_with_lt4b<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:260:1 + --> $DIR/needless_lifetimes.rs:264:1 | LL | fn named_input_elided_output<'a>(_arg: &'a str) -> &str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:268:1 + --> $DIR/needless_lifetimes.rs:272:1 | LL | fn trait_bound_ok<'a, T: WithLifetime<'static>>(_: &'a u8, _: T) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:304:1 + --> $DIR/needless_lifetimes.rs:308:1 | LL | fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:304:47 + --> $DIR/needless_lifetimes.rs:308:47 | LL | fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> { | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:311:9 + --> $DIR/needless_lifetimes.rs:315:9 | LL | fn needless_lt<'a>(x: &'a u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:315:9 + --> $DIR/needless_lifetimes.rs:319:9 | LL | fn needless_lt<'a>(_x: &'a u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:328:9 + --> $DIR/needless_lifetimes.rs:332:9 | LL | fn baz<'a>(&'a self) -> impl Foo + 'a { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:360:5 + --> $DIR/needless_lifetimes.rs:364:5 | LL | fn impl_trait_elidable_nested_anonymous_lifetimes<'a>(i: &'a i32, f: impl Fn(&i32) -> &i32) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:369:5 + --> $DIR/needless_lifetimes.rs:373:5 | LL | fn generics_elidable<'a, T: Fn(&i32) -> &i32>(i: &'a i32, f: T) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:381:5 + --> $DIR/needless_lifetimes.rs:385:5 | LL | fn where_clause_elidadable<'a, T>(i: &'a i32, f: T) -> &'a i32 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:396:5 + --> $DIR/needless_lifetimes.rs:400:5 | LL | fn pointer_fn_elidable<'a>(i: &'a i32, f: fn(&i32) -> &i32) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:409:5 + --> $DIR/needless_lifetimes.rs:413:5 | LL | fn nested_fn_pointer_3<'a>(_: &'a i32) -> fn(fn(&i32) -> &i32) -> i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:412:5 + --> $DIR/needless_lifetimes.rs:416:5 | LL | fn nested_fn_pointer_4<'a>(_: &'a i32) -> impl Fn(fn(&i32)) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:434:9 + --> $DIR/needless_lifetimes.rs:438:9 | LL | fn implicit<'a>(&'a self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:437:9 + --> $DIR/needless_lifetimes.rs:441:9 | LL | fn implicit_mut<'a>(&'a mut self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:448:9 + --> $DIR/needless_lifetimes.rs:452:9 | LL | fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:454:9 + --> $DIR/needless_lifetimes.rs:458:9 | LL | fn implicit<'a>(&'a self) -> &'a (); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:455:9 + --> $DIR/needless_lifetimes.rs:459:9 | LL | fn implicit_provided<'a>(&'a self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:464:9 + --> $DIR/needless_lifetimes.rs:468:9 | LL | fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a (); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:465:9 + --> $DIR/needless_lifetimes.rs:469:9 | LL | fn lifetime_elsewhere_provided<'a>(self: Box, here: &'a ()) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:474:5 + --> $DIR/needless_lifetimes.rs:478:5 | LL | fn foo<'a>(x: &'a u8, y: &'_ u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:476:5 + --> $DIR/needless_lifetimes.rs:480:5 | LL | fn bar<'a>(x: &'a u8, y: &'_ u8, z: &'_ u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:483:5 + --> $DIR/needless_lifetimes.rs:487:5 | LL | fn one_input<'a>(x: &'a u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:488:5 + --> $DIR/needless_lifetimes.rs:492:5 | LL | fn multiple_inputs_output_not_elided<'a, 'b>(x: &'a u8, y: &'b u8, z: &'b u8) -> &'b u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 45 previous errors +error: the following explicit lifetimes could be elided: 'a + --> $DIR/needless_lifetimes.rs:505:13 + | +LL | fn one_input<'a>(x: &'a u8) -> &'a u8 { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +... +LL | local_one_input_macro!(); + | ------------------------ in this macro invocation + | + = note: this error originates in the macro `local_one_input_macro` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 46 previous errors From 2432e97d6ab7e44390542c00229344bf45fcaddd Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Sat, 28 Jan 2023 18:45:57 -0700 Subject: [PATCH 211/501] wildcard_enum_match_arm lint takes the enum origin into account Signed-off-by: Tyler Weaver --- clippy_lints/src/matches/match_wild_enum.rs | 17 +++++++++++------ .../ui/match_wildcard_for_single_variants.fixed | 2 +- .../match_wildcard_for_single_variants.stderr | 8 +++++++- tests/ui/wildcard_enum_match_arm.fixed | 2 +- tests/ui/wildcard_enum_match_arm.stderr | 4 ++-- 5 files changed, 22 insertions(+), 11 deletions(-) diff --git a/clippy_lints/src/matches/match_wild_enum.rs b/clippy_lints/src/matches/match_wild_enum.rs index 59de8c0384ba0..05cac0f997976 100644 --- a/clippy_lints/src/matches/match_wild_enum.rs +++ b/clippy_lints/src/matches/match_wild_enum.rs @@ -45,8 +45,12 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { // Accumulate the variants which should be put in place of the wildcard because they're not // already covered. - let has_hidden = adt_def.variants().iter().any(|x| is_hidden(cx, x)); - let mut missing_variants: Vec<_> = adt_def.variants().iter().filter(|x| !is_hidden(cx, x)).collect(); + let has_hidden_external = adt_def.variants().iter().any(|x| is_hidden_and_external(cx, x)); + let mut missing_variants: Vec<_> = adt_def + .variants() + .iter() + .filter(|x| !is_hidden_and_external(cx, x)) + .collect(); let mut path_prefix = CommonPrefixSearcher::None; for arm in arms { @@ -133,7 +137,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { match missing_variants.as_slice() { [] => (), - [x] if !adt_def.is_variant_list_non_exhaustive() && !has_hidden => span_lint_and_sugg( + [x] if !adt_def.is_variant_list_non_exhaustive() && !has_hidden_external => span_lint_and_sugg( cx, MATCH_WILDCARD_FOR_SINGLE_VARIANTS, wildcard_span, @@ -144,7 +148,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { ), variants => { let mut suggestions: Vec<_> = variants.iter().copied().map(format_suggestion).collect(); - let message = if adt_def.is_variant_list_non_exhaustive() || has_hidden { + let message = if adt_def.is_variant_list_non_exhaustive() || has_hidden_external { suggestions.push("_".into()); "wildcard matches known variants and will also match future added variants" } else { @@ -191,6 +195,7 @@ impl<'a> CommonPrefixSearcher<'a> { } } -fn is_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { - cx.tcx.is_doc_hidden(variant_def.def_id) || cx.tcx.has_attr(variant_def.def_id, sym::unstable) +fn is_hidden_and_external(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { + (cx.tcx.is_doc_hidden(variant_def.def_id) || cx.tcx.has_attr(variant_def.def_id, sym::unstable)) + && variant_def.def_id.as_local().is_none() } diff --git a/tests/ui/match_wildcard_for_single_variants.fixed b/tests/ui/match_wildcard_for_single_variants.fixed index fc252cdd35294..9fd3739b69c2c 100644 --- a/tests/ui/match_wildcard_for_single_variants.fixed +++ b/tests/ui/match_wildcard_for_single_variants.fixed @@ -123,7 +123,7 @@ fn main() { Enum::A => (), Enum::B => (), Enum::C => (), - _ => (), + Enum::__Private => (), } match Enum::A { Enum::A => (), diff --git a/tests/ui/match_wildcard_for_single_variants.stderr b/tests/ui/match_wildcard_for_single_variants.stderr index 6fa313dc91114..105b4c4b41d1e 100644 --- a/tests/ui/match_wildcard_for_single_variants.stderr +++ b/tests/ui/match_wildcard_for_single_variants.stderr @@ -48,11 +48,17 @@ error: wildcard matches only a single variant and will also match any future add LL | _ => (), | ^ help: try this: `Color::Blue` +error: wildcard matches only a single variant and will also match any future added variants + --> $DIR/match_wildcard_for_single_variants.rs:126:13 + | +LL | _ => (), + | ^ help: try this: `Enum::__Private` + error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:153:13 | LL | _ => 2, | ^ help: try this: `Foo::B` -error: aborting due to 9 previous errors +error: aborting due to 10 previous errors diff --git a/tests/ui/wildcard_enum_match_arm.fixed b/tests/ui/wildcard_enum_match_arm.fixed index 23607497841e4..293bf75a71762 100644 --- a/tests/ui/wildcard_enum_match_arm.fixed +++ b/tests/ui/wildcard_enum_match_arm.fixed @@ -96,7 +96,7 @@ fn main() { } match Enum::A { Enum::A => (), - Enum::B | _ => (), + Enum::B | Enum::__Private => (), } } } diff --git a/tests/ui/wildcard_enum_match_arm.stderr b/tests/ui/wildcard_enum_match_arm.stderr index efecc9576cc7b..30d29aa4e77a1 100644 --- a/tests/ui/wildcard_enum_match_arm.stderr +++ b/tests/ui/wildcard_enum_match_arm.stderr @@ -34,11 +34,11 @@ error: wildcard matches known variants and will also match future added variants LL | _ => {}, | ^ help: try this: `ErrorKind::PermissionDenied | _` -error: wildcard matches known variants and will also match future added variants +error: wildcard match will also match any future added variants --> $DIR/wildcard_enum_match_arm.rs:99:13 | LL | _ => (), - | ^ help: try this: `Enum::B | _` + | ^ help: try this: `Enum::B | Enum::__Private` error: aborting due to 6 previous errors From c531b09eb85e4dff966b6bfb93e7fbf128fdfba0 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Mon, 30 Jan 2023 15:46:34 -0700 Subject: [PATCH 212/501] Check external before hidden --- clippy_lints/src/matches/match_wild_enum.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/clippy_lints/src/matches/match_wild_enum.rs b/clippy_lints/src/matches/match_wild_enum.rs index 05cac0f997976..3b35c04620c1c 100644 --- a/clippy_lints/src/matches/match_wild_enum.rs +++ b/clippy_lints/src/matches/match_wild_enum.rs @@ -3,6 +3,7 @@ use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::{is_refutable, peel_hir_pat_refs, recurse_or_patterns}; use rustc_errors::Applicability; use rustc_hir::def::{CtorKind, DefKind, Res}; +use rustc_hir::def_id::DefId; use rustc_hir::{Arm, Expr, PatKind, PathSegment, QPath, Ty, TyKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, VariantDef}; @@ -45,11 +46,11 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { // Accumulate the variants which should be put in place of the wildcard because they're not // already covered. - let has_hidden_external = adt_def.variants().iter().any(|x| is_hidden_and_external(cx, x)); + let has_hidden_external = adt_def.variants().iter().any(|x| is_external_and_hidden(cx, x)); let mut missing_variants: Vec<_> = adt_def .variants() .iter() - .filter(|x| !is_hidden_and_external(cx, x)) + .filter(|x| !is_external_and_hidden(cx, x)) .collect(); let mut path_prefix = CommonPrefixSearcher::None; @@ -195,7 +196,14 @@ impl<'a> CommonPrefixSearcher<'a> { } } -fn is_hidden_and_external(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { - (cx.tcx.is_doc_hidden(variant_def.def_id) || cx.tcx.has_attr(variant_def.def_id, sym::unstable)) - && variant_def.def_id.as_local().is_none() +fn is_external_and_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { + is_external(variant_def.def_id) && is_hidden(cx, variant_def) +} + +fn is_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { + cx.tcx.is_doc_hidden(variant_def.def_id) || cx.tcx.has_attr(variant_def.def_id, sym::unstable) +} + +fn is_external(def_id: DefId) -> bool { + def_id.as_local().is_none() } From df7cdf732d3679f21c4a36808fb43b832f0a6725 Mon Sep 17 00:00:00 2001 From: Tyler Weaver Date: Mon, 30 Jan 2023 17:29:17 -0700 Subject: [PATCH 213/501] Pull the is_external test out of the loop --- clippy_lints/src/matches/match_wild_enum.rs | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/clippy_lints/src/matches/match_wild_enum.rs b/clippy_lints/src/matches/match_wild_enum.rs index 3b35c04620c1c..3126b590180e6 100644 --- a/clippy_lints/src/matches/match_wild_enum.rs +++ b/clippy_lints/src/matches/match_wild_enum.rs @@ -3,7 +3,6 @@ use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::{is_refutable, peel_hir_pat_refs, recurse_or_patterns}; use rustc_errors::Applicability; use rustc_hir::def::{CtorKind, DefKind, Res}; -use rustc_hir::def_id::DefId; use rustc_hir::{Arm, Expr, PatKind, PathSegment, QPath, Ty, TyKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, VariantDef}; @@ -46,11 +45,12 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { // Accumulate the variants which should be put in place of the wildcard because they're not // already covered. - let has_hidden_external = adt_def.variants().iter().any(|x| is_external_and_hidden(cx, x)); + let is_external = adt_def.did().as_local().is_none(); + let has_external_hidden = is_external && adt_def.variants().iter().any(|x| is_hidden(cx, x)); let mut missing_variants: Vec<_> = adt_def .variants() .iter() - .filter(|x| !is_external_and_hidden(cx, x)) + .filter(|x| !(is_external && is_hidden(cx, x))) .collect(); let mut path_prefix = CommonPrefixSearcher::None; @@ -138,7 +138,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { match missing_variants.as_slice() { [] => (), - [x] if !adt_def.is_variant_list_non_exhaustive() && !has_hidden_external => span_lint_and_sugg( + [x] if !adt_def.is_variant_list_non_exhaustive() && !has_external_hidden => span_lint_and_sugg( cx, MATCH_WILDCARD_FOR_SINGLE_VARIANTS, wildcard_span, @@ -149,7 +149,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { ), variants => { let mut suggestions: Vec<_> = variants.iter().copied().map(format_suggestion).collect(); - let message = if adt_def.is_variant_list_non_exhaustive() || has_hidden_external { + let message = if adt_def.is_variant_list_non_exhaustive() || has_external_hidden { suggestions.push("_".into()); "wildcard matches known variants and will also match future added variants" } else { @@ -196,14 +196,6 @@ impl<'a> CommonPrefixSearcher<'a> { } } -fn is_external_and_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { - is_external(variant_def.def_id) && is_hidden(cx, variant_def) -} - fn is_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { cx.tcx.is_doc_hidden(variant_def.def_id) || cx.tcx.has_attr(variant_def.def_id, sym::unstable) } - -fn is_external(def_id: DefId) -> bool { - def_id.as_local().is_none() -} From b4e2b48270008b2c4fa84f4d6270e13a93f1ac14 Mon Sep 17 00:00:00 2001 From: Manish Goregaokar Date: Mon, 30 Jan 2023 19:28:27 -0800 Subject: [PATCH 214/501] Mark uninlined_format_args as pedantic --- clippy_lints/src/format_args.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clippy_lints/src/format_args.rs b/clippy_lints/src/format_args.rs index ab05d4688ccf4..d6b02f174f938 100644 --- a/clippy_lints/src/format_args.rs +++ b/clippy_lints/src/format_args.rs @@ -125,7 +125,7 @@ declare_clippy_lint! { /// nothing will be suggested, e.g. `println!("{0}={1}", var, 1+2)`. #[clippy::version = "1.66.0"] pub UNINLINED_FORMAT_ARGS, - style, + pedantic, "using non-inlined variables in `format!` calls" } From 70bfcc2518dc431cf20cd7d088b954fa348f17d9 Mon Sep 17 00:00:00 2001 From: SpanishPear Date: Tue, 31 Jan 2023 21:44:11 +1100 Subject: [PATCH 215/501] move to multipart spans --- compiler/rustc_parse/src/parser/diagnostics.rs | 11 ++++++----- .../ui/parser/suggest_misplaced_generics/enum.stderr | 5 +++-- .../fn-complex-generics.stderr | 5 +++-- .../suggest_misplaced_generics/fn-simple.stderr | 5 +++-- .../parser/suggest_misplaced_generics/struct.stderr | 5 +++-- .../ui/parser/suggest_misplaced_generics/trait.stderr | 5 +++-- .../ui/parser/suggest_misplaced_generics/type.stderr | 5 +++-- 7 files changed, 24 insertions(+), 17 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 9ac3bb946dc42..1740f2c2c8455 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -352,12 +352,13 @@ impl<'a> Parser<'a> { // if there is a `<` after the fn name, then don't show a suggestion, show help if !self.look_ahead(1, |t| *t == token::Lt) && - let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) && - let Ok(ident) = self.sess.source_map().span_to_snippet(self.token.span) { - err.span_suggestion_verbose( - generic.span.to(self.token.span), + let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) { + err.multipart_suggestion_verbose( format!("place the generic parameter name after the {ident_name} name"), - format!(" {ident}{snippet}"), + vec![ + (self.token.span.shrink_to_hi(), snippet), + (generic.span, String::new()) + ], Applicability::MaybeIncorrect, ); } else { diff --git a/tests/ui/parser/suggest_misplaced_generics/enum.stderr b/tests/ui/parser/suggest_misplaced_generics/enum.stderr index 521cee4f72898..5f5947627ee5c 100644 --- a/tests/ui/parser/suggest_misplaced_generics/enum.stderr +++ b/tests/ui/parser/suggest_misplaced_generics/enum.stderr @@ -6,8 +6,9 @@ LL | enum Foo { Variant(T) } | help: place the generic parameter name after the enum name | -LL | enum Foo { Variant(T) } - | ~~~~~~ +LL - enum Foo { Variant(T) } +LL + enum Foo { Variant(T) } + | error: aborting due to previous error diff --git a/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr index 7d1b44c44944c..061d0910a742d 100644 --- a/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr +++ b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.stderr @@ -6,8 +6,9 @@ LL | fn<'a, B: 'a + std::ops::Add> f(_x: B) { } | help: place the generic parameter name after the fn name | -LL | fn f<'a, B: 'a + std::ops::Add>(_x: B) { } - | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +LL - fn<'a, B: 'a + std::ops::Add> f(_x: B) { } +LL + fn f<'a, B: 'a + std::ops::Add>(_x: B) { } + | error: aborting due to previous error diff --git a/tests/ui/parser/suggest_misplaced_generics/fn-simple.stderr b/tests/ui/parser/suggest_misplaced_generics/fn-simple.stderr index 40c4581e513ad..e749f1a0d00d6 100644 --- a/tests/ui/parser/suggest_misplaced_generics/fn-simple.stderr +++ b/tests/ui/parser/suggest_misplaced_generics/fn-simple.stderr @@ -6,8 +6,9 @@ LL | fn id(x: T) -> T { x } | help: place the generic parameter name after the fn name | -LL | fn id(x: T) -> T { x } - | ~~~~~ +LL - fn id(x: T) -> T { x } +LL + fn id(x: T) -> T { x } + | error: aborting due to previous error diff --git a/tests/ui/parser/suggest_misplaced_generics/struct.stderr b/tests/ui/parser/suggest_misplaced_generics/struct.stderr index ab17ee57e0bcd..2b650907092d1 100644 --- a/tests/ui/parser/suggest_misplaced_generics/struct.stderr +++ b/tests/ui/parser/suggest_misplaced_generics/struct.stderr @@ -6,8 +6,9 @@ LL | struct Foo { x: T } | help: place the generic parameter name after the struct name | -LL | struct Foo { x: T } - | ~~~~~~ +LL - struct Foo { x: T } +LL + struct Foo { x: T } + | error: aborting due to previous error diff --git a/tests/ui/parser/suggest_misplaced_generics/trait.stderr b/tests/ui/parser/suggest_misplaced_generics/trait.stderr index 069683bda1be3..ac86cfa469704 100644 --- a/tests/ui/parser/suggest_misplaced_generics/trait.stderr +++ b/tests/ui/parser/suggest_misplaced_generics/trait.stderr @@ -6,8 +6,9 @@ LL | trait Foo { | help: place the generic parameter name after the trait name | -LL | trait Foo { - | ~~~~~~ +LL - trait Foo { +LL + trait Foo { + | error: aborting due to previous error diff --git a/tests/ui/parser/suggest_misplaced_generics/type.stderr b/tests/ui/parser/suggest_misplaced_generics/type.stderr index a2832965c6d0e..22744f6cf37fb 100644 --- a/tests/ui/parser/suggest_misplaced_generics/type.stderr +++ b/tests/ui/parser/suggest_misplaced_generics/type.stderr @@ -6,8 +6,9 @@ LL | type Foo = T; | help: place the generic parameter name after the type name | -LL | type Foo = T; - | ~~~~~~ +LL - type Foo = T; +LL + type Foo = T; + | error: aborting due to previous error From 249ea9502d3d69c3acbbae7a4cdfabcbf38de0c3 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Tue, 31 Jan 2023 10:39:25 +0000 Subject: [PATCH 216/501] Set "current" edition to 2021 --- crates/base-db/src/input.rs | 2 +- crates/hir-def/src/find_path.rs | 8 ++++---- crates/hir-def/src/nameres/tests.rs | 8 ++++---- crates/hir-def/src/nameres/tests/macros.rs | 6 +++--- crates/hir-ty/src/tests/method_resolution.rs | 4 ++-- crates/ide-completion/src/tests/special.rs | 8 ++++---- crates/project-model/src/tests.rs | 20 ++++++++++---------- 7 files changed, 28 insertions(+), 28 deletions(-) diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index ea0561772d225..db9589ca186ec 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -282,7 +282,7 @@ pub enum Edition { } impl Edition { - pub const CURRENT: Edition = Edition::Edition2018; + pub const CURRENT: Edition = Edition::Edition2021; } #[derive(Default, Debug, Clone, PartialEq, Eq)] diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index ddd7ad99e9ad3..3f43923208371 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -811,7 +811,7 @@ pub struct S; fn prelude() { check_found_path( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std $0 //- /std.rs crate:std pub mod prelude { @@ -852,7 +852,7 @@ pub mod prelude { fn imported_prelude() { check_found_path( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std use S; $0 //- /std.rs crate:std @@ -872,7 +872,7 @@ pub mod prelude { #[test] fn enum_variant_from_prelude() { let code = r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std $0 //- /std.rs crate:std pub mod prelude { @@ -1273,7 +1273,7 @@ fn f() { fn prelude_with_inner_items() { check_found_path( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std fn f() { fn inner() {} $0 diff --git a/crates/hir-def/src/nameres/tests.rs b/crates/hir-def/src/nameres/tests.rs index 0d90047c28f6f..8a27c60df5c27 100644 --- a/crates/hir-def/src/nameres/tests.rs +++ b/crates/hir-def/src/nameres/tests.rs @@ -476,7 +476,7 @@ pub struct Bar; fn no_std_prelude() { check( r#" - //- /main.rs crate:main deps:core,std + //- /main.rs edition:2018 crate:main deps:core,std #![cfg_attr(not(never), no_std)] use Rust; @@ -544,7 +544,7 @@ fn edition_specific_preludes() { fn std_prelude_takes_precedence_above_core_prelude() { check( r#" -//- /main.rs crate:main deps:core,std +//- /main.rs edition:2018 crate:main deps:core,std use {Foo, Bar}; //- /std.rs crate:std deps:core @@ -574,7 +574,7 @@ pub mod prelude { fn cfg_not_test() { check( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std use {Foo, Bar, Baz}; //- /lib.rs crate:std @@ -602,7 +602,7 @@ pub mod prelude { fn cfg_test() { check( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std use {Foo, Bar, Baz}; //- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42 diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs index fe0ad4f3863c4..a4ccd14cbb463 100644 --- a/crates/hir-def/src/nameres/tests/macros.rs +++ b/crates/hir-def/src/nameres/tests/macros.rs @@ -264,7 +264,7 @@ fn prelude_is_macro_use() { cov_mark::check!(prelude_is_macro_use); check( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std structs!(Foo); structs_priv!(Bar); structs_outside!(Out); @@ -634,7 +634,7 @@ fn macro_dollar_crate_is_correct_in_indirect_deps() { // From std check( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std foo!(); //- /std.rs crate:std deps:core @@ -1034,7 +1034,7 @@ structs!(Foo); fn macro_in_prelude() { check( r#" -//- /lib.rs crate:lib deps:std +//- /lib.rs edition:2018 crate:lib deps:std global_asm!(); //- /std.rs crate:std diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index 6c7a5329970d7..616ca8058e9df 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -813,7 +813,7 @@ fn test() { fn method_resolution_trait_from_prelude() { check_types( r#" -//- /main.rs crate:main deps:core +//- /main.rs edition:2018 crate:main deps:core struct S; impl Clone for S {} @@ -1527,7 +1527,7 @@ fn f(x: U2) { fn skip_array_during_method_dispatch() { check_types( r#" -//- /main2018.rs crate:main2018 deps:core +//- /main2018.rs crate:main2018 deps:core edition:2018 use core::IntoIterator; fn f() { diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index cad4af4937de5..6052b0623204e 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -61,7 +61,7 @@ fn _alpha() {} fn completes_prelude() { check( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std fn foo() { let x: $0 } //- /std/lib.rs crate:std @@ -83,7 +83,7 @@ pub mod prelude { fn completes_prelude_macros() { check( r#" -//- /main.rs crate:main deps:std +//- /main.rs edition:2018 crate:main deps:std fn f() {$0} //- /std/lib.rs crate:std @@ -117,14 +117,14 @@ fn foo() { let x: $0 } //- /core/lib.rs crate:core pub mod prelude { - pub mod rust_2018 { + pub mod rust_2021 { pub struct Option; } } //- /std/lib.rs crate:std deps:core pub mod prelude { - pub mod rust_2018 { + pub mod rust_2021 { pub struct String; } } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 505b660f34493..9e9691d11e854 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -1357,7 +1357,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 1, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1404,7 +1404,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 2, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1441,7 +1441,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 3, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1478,7 +1478,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 4, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1515,7 +1515,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 5, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1562,7 +1562,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 6, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1599,7 +1599,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 7, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1709,7 +1709,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 8, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1746,7 +1746,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 9, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { @@ -1783,7 +1783,7 @@ fn rust_project_hello_world_project_model() { root_file_id: FileId( 10, ), - edition: Edition2018, + edition: Edition2021, version: None, display_name: Some( CrateDisplayName { From 183f1712638b804f88f1a91cd92a2cb33b68c543 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 31 Jan 2023 11:56:12 +0100 Subject: [PATCH 217/501] Remove support for 1.58 proc-macro abi --- .../proc-macro-srv/src/abis/abi_1_58/mod.rs | 104 -- .../abis/abi_1_58/proc_macro/bridge/buffer.rs | 143 --- .../abis/abi_1_58/proc_macro/bridge/client.rs | 485 -------- .../abi_1_58/proc_macro/bridge/closure.rs | 24 - .../abis/abi_1_58/proc_macro/bridge/handle.rs | 70 -- .../abis/abi_1_58/proc_macro/bridge/mod.rs | 429 ------- .../abis/abi_1_58/proc_macro/bridge/rpc.rs | 305 ----- .../abi_1_58/proc_macro/bridge/scoped_cell.rs | 81 -- .../abis/abi_1_58/proc_macro/bridge/server.rs | 352 ------ .../abis/abi_1_58/proc_macro/diagnostic.rs | 166 --- .../src/abis/abi_1_58/proc_macro/mod.rs | 1056 ----------------- .../src/abis/abi_1_58/proc_macro/quote.rs | 140 --- .../src/abis/abi_1_58/ra_server.rs | 822 ------------- crates/proc-macro-srv/src/abis/mod.rs | 9 - 14 files changed, 4186 deletions(-) delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/mod.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs delete mode 100644 crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs deleted file mode 100644 index 1c91ac0fa1b8f..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs +++ /dev/null @@ -1,104 +0,0 @@ -//! Macro ABI for version 1.58 of rustc - -#[allow(dead_code)] -#[doc(hidden)] -mod proc_macro; - -#[allow(dead_code)] -#[doc(hidden)] -mod ra_server; - -use libloading::Library; -use proc_macro_api::ProcMacroKind; - -use super::PanicMessage; - -pub(crate) struct Abi { - exported_macros: Vec, -} - -impl From for PanicMessage { - fn from(p: proc_macro::bridge::PanicMessage) -> Self { - Self { message: p.as_str().map(|s| s.to_string()) } - } -} - -impl Abi { - pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result { - let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> = - lib.get(symbol_name.as_bytes())?; - Ok(Self { exported_macros: macros.to_vec() }) - } - - pub fn expand( - &self, - macro_name: &str, - macro_body: &tt::Subtree, - attributes: Option<&tt::Subtree>, - ) -> Result { - let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone()); - - let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| { - ra_server::TokenStream::with_subtree(attr.clone()) - }); - - for proc_macro in &self.exported_macros { - match proc_macro { - proc_macro::bridge::client::ProcMacro::CustomDerive { - trait_name, client, .. - } if *trait_name == macro_name => { - let res = client.run( - &proc_macro::bridge::server::SameThread, - ra_server::RustAnalyzer::default(), - parsed_body, - true, - ); - return res.map(|it| it.into_subtree()).map_err(PanicMessage::from); - } - proc_macro::bridge::client::ProcMacro::Bang { name, client } - if *name == macro_name => - { - let res = client.run( - &proc_macro::bridge::server::SameThread, - ra_server::RustAnalyzer::default(), - parsed_body, - true, - ); - return res.map(|it| it.into_subtree()).map_err(PanicMessage::from); - } - proc_macro::bridge::client::ProcMacro::Attr { name, client } - if *name == macro_name => - { - let res = client.run( - &proc_macro::bridge::server::SameThread, - ra_server::RustAnalyzer::default(), - parsed_attributes, - parsed_body, - true, - ); - return res.map(|it| it.into_subtree()).map_err(PanicMessage::from); - } - _ => continue, - } - } - - Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into()) - } - - pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { - self.exported_macros - .iter() - .map(|proc_macro| match proc_macro { - proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { - (trait_name.to_string(), ProcMacroKind::CustomDerive) - } - proc_macro::bridge::client::ProcMacro::Bang { name, .. } => { - (name.to_string(), ProcMacroKind::FuncLike) - } - proc_macro::bridge::client::ProcMacro::Attr { name, .. } => { - (name.to_string(), ProcMacroKind::Attr) - } - }) - .collect() - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs deleted file mode 100644 index d82669d3e2336..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs +++ /dev/null @@ -1,143 +0,0 @@ -//! Buffer management for same-process client<->server communication. - -use std::io::{self, Write}; -use std::mem; -use std::ops::{Deref, DerefMut}; -use std::slice; - -#[repr(C)] -pub struct Buffer { - data: *mut T, - len: usize, - capacity: usize, - reserve: extern "C" fn(Buffer, usize) -> Buffer, - drop: extern "C" fn(Buffer), -} - -unsafe impl Sync for Buffer {} -unsafe impl Send for Buffer {} - -impl Default for Buffer { - fn default() -> Self { - Self::from(vec![]) - } -} - -impl Deref for Buffer { - type Target = [T]; - fn deref(&self) -> &[T] { - unsafe { slice::from_raw_parts(self.data as *const T, self.len) } - } -} - -impl DerefMut for Buffer { - fn deref_mut(&mut self) -> &mut [T] { - unsafe { slice::from_raw_parts_mut(self.data, self.len) } - } -} - -impl Buffer { - pub(super) fn new() -> Self { - Self::default() - } - - pub(super) fn clear(&mut self) { - self.len = 0; - } - - pub(super) fn take(&mut self) -> Self { - mem::take(self) - } - - // We have the array method separate from extending from a slice. This is - // because in the case of small arrays, codegen can be more efficient - // (avoiding a memmove call). With extend_from_slice, LLVM at least - // currently is not able to make that optimization. - pub(super) fn extend_from_array(&mut self, xs: &[T; N]) { - if xs.len() > (self.capacity - self.len) { - let b = self.take(); - *self = (b.reserve)(b, xs.len()); - } - unsafe { - xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len()); - self.len += xs.len(); - } - } - - pub(super) fn extend_from_slice(&mut self, xs: &[T]) { - if xs.len() > (self.capacity - self.len) { - let b = self.take(); - *self = (b.reserve)(b, xs.len()); - } - unsafe { - xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len()); - self.len += xs.len(); - } - } - - pub(super) fn push(&mut self, v: T) { - // The code here is taken from Vec::push, and we know that reserve() - // will panic if we're exceeding isize::MAX bytes and so there's no need - // to check for overflow. - if self.len == self.capacity { - let b = self.take(); - *self = (b.reserve)(b, 1); - } - unsafe { - *self.data.add(self.len) = v; - self.len += 1; - } - } -} - -impl Write for Buffer { - fn write(&mut self, xs: &[u8]) -> io::Result { - self.extend_from_slice(xs); - Ok(xs.len()) - } - - fn write_all(&mut self, xs: &[u8]) -> io::Result<()> { - self.extend_from_slice(xs); - Ok(()) - } - - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -impl Drop for Buffer { - fn drop(&mut self) { - let b = self.take(); - (b.drop)(b); - } -} - -impl From> for Buffer { - fn from(mut v: Vec) -> Self { - let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity()); - mem::forget(v); - - // This utility function is nested in here because it can *only* - // be safely called on `Buffer`s created by *this* `proc_macro`. - fn to_vec(b: Buffer) -> Vec { - unsafe { - let Buffer { data, len, capacity, .. } = b; - mem::forget(b); - Vec::from_raw_parts(data, len, capacity) - } - } - - extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer { - let mut v = to_vec(b); - v.reserve(additional); - Buffer::from(v) - } - - extern "C" fn drop(b: Buffer) { - mem::drop(to_vec(b)); - } - - Buffer { data, len, capacity, reserve, drop } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs deleted file mode 100644 index e78842f5c37ec..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs +++ /dev/null @@ -1,485 +0,0 @@ -//! Client-side types. - -use super::*; - -macro_rules! define_handles { - ( - 'owned: $($oty:ident,)* - 'interned: $($ity:ident,)* - ) => { - #[repr(C)] - #[allow(non_snake_case)] - pub struct HandleCounters { - $($oty: AtomicUsize,)* - $($ity: AtomicUsize,)* - } - - impl HandleCounters { - // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of - // a wrapper `fn` pointer, once `const fn` can reference `static`s. - extern "C" fn get() -> &'static Self { - static COUNTERS: HandleCounters = HandleCounters { - $($oty: AtomicUsize::new(1),)* - $($ity: AtomicUsize::new(1),)* - }; - &COUNTERS - } - } - - // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`. - #[repr(C)] - #[allow(non_snake_case)] - pub(super) struct HandleStore { - $($oty: handle::OwnedStore,)* - $($ity: handle::InternedStore,)* - } - - impl HandleStore { - pub(super) fn new(handle_counters: &'static HandleCounters) -> Self { - HandleStore { - $($oty: handle::OwnedStore::new(&handle_counters.$oty),)* - $($ity: handle::InternedStore::new(&handle_counters.$ity),)* - } - } - } - - $( - #[repr(C)] - pub(crate) struct $oty(handle::Handle); - - // Forward `Drop::drop` to the inherent `drop` method. - impl Drop for $oty { - fn drop(&mut self) { - $oty(self.0).drop(); - } - } - - impl Encode for $oty { - fn encode(self, w: &mut Writer, s: &mut S) { - let handle = self.0; - mem::forget(self); - handle.encode(w, s); - } - } - - impl DecodeMut<'_, '_, HandleStore>> - for Marked - { - fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { - s.$oty.take(handle::Handle::decode(r, &mut ())) - } - } - - impl Encode for &$oty { - fn encode(self, w: &mut Writer, s: &mut S) { - self.0.encode(w, s); - } - } - - impl<'s, S: server::Types> Decode<'_, 's, HandleStore>> - for &'s Marked - { - fn decode(r: &mut Reader<'_>, s: &'s HandleStore>) -> Self { - &s.$oty[handle::Handle::decode(r, &mut ())] - } - } - - impl Encode for &mut $oty { - fn encode(self, w: &mut Writer, s: &mut S) { - self.0.encode(w, s); - } - } - - impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore>> - for &'s mut Marked - { - fn decode( - r: &mut Reader<'_>, - s: &'s mut HandleStore> - ) -> Self { - &mut s.$oty[handle::Handle::decode(r, &mut ())] - } - } - - impl Encode>> - for Marked - { - fn encode(self, w: &mut Writer, s: &mut HandleStore>) { - s.$oty.alloc(self).encode(w, s); - } - } - - impl DecodeMut<'_, '_, S> for $oty { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - $oty(handle::Handle::decode(r, s)) - } - } - )* - - $( - #[repr(C)] - #[derive(Copy, Clone, PartialEq, Eq, Hash)] - pub(crate) struct $ity(handle::Handle); - - impl Encode for $ity { - fn encode(self, w: &mut Writer, s: &mut S) { - self.0.encode(w, s); - } - } - - impl DecodeMut<'_, '_, HandleStore>> - for Marked - { - fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { - s.$ity.copy(handle::Handle::decode(r, &mut ())) - } - } - - impl Encode>> - for Marked - { - fn encode(self, w: &mut Writer, s: &mut HandleStore>) { - s.$ity.alloc(self).encode(w, s); - } - } - - impl DecodeMut<'_, '_, S> for $ity { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - $ity(handle::Handle::decode(r, s)) - } - } - )* - } -} -define_handles! { - 'owned: - FreeFunctions, - TokenStream, - TokenStreamBuilder, - TokenStreamIter, - Group, - Literal, - SourceFile, - MultiSpan, - Diagnostic, - - 'interned: - Punct, - Ident, - Span, -} - -// FIXME(eddyb) generate these impls by pattern-matching on the -// names of methods - also could use the presence of `fn drop` -// to distinguish between 'owned and 'interned, above. -// Alternatively, special 'modes" could be listed of types in with_api -// instead of pattern matching on methods, here and in server decl. - -impl Clone for TokenStream { - fn clone(&self) -> Self { - self.clone() - } -} - -impl Clone for TokenStreamIter { - fn clone(&self) -> Self { - self.clone() - } -} - -impl Clone for Group { - fn clone(&self) -> Self { - self.clone() - } -} - -impl Clone for Literal { - fn clone(&self) -> Self { - self.clone() - } -} - -impl fmt::Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Literal") - // format the kind without quotes, as in `kind: Float` - .field("kind", &format_args!("{}", &self.debug_kind())) - .field("symbol", &self.symbol()) - // format `Some("...")` on one line even in {:#?} mode - .field("suffix", &format_args!("{:?}", &self.suffix())) - .field("span", &self.span()) - .finish() - } -} - -impl Clone for SourceFile { - fn clone(&self) -> Self { - self.clone() - } -} - -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(&self.debug()) - } -} - -macro_rules! define_client_side { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)* - }),* $(,)?) => { - $(impl $name { - $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* { - Bridge::with(|bridge| { - let mut b = bridge.cached_buffer.take(); - - b.clear(); - api_tags::Method::$name(api_tags::$name::$method).encode(&mut b, &mut ()); - reverse_encode!(b; $($arg),*); - - b = bridge.dispatch.call(b); - - let r = Result::<_, PanicMessage>::decode(&mut &b[..], &mut ()); - - bridge.cached_buffer = b; - - r.unwrap_or_else(|e| panic::resume_unwind(e.into())) - }) - })* - })* - } -} -with_api!(self, self, define_client_side); - -enum BridgeState<'a> { - /// No server is currently connected to this client. - NotConnected, - - /// A server is connected and available for requests. - Connected(Bridge<'a>), - - /// Access to the bridge is being exclusively acquired - /// (e.g., during `BridgeState::with`). - InUse, -} - -enum BridgeStateL {} - -impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL { - type Out = BridgeState<'a>; -} - -thread_local! { - static BRIDGE_STATE: scoped_cell::ScopedCell = - scoped_cell::ScopedCell::new(BridgeState::NotConnected); -} - -impl BridgeState<'_> { - /// Take exclusive control of the thread-local - /// `BridgeState`, and pass it to `f`, mutably. - /// The state will be restored after `f` exits, even - /// by panic, including modifications made to it by `f`. - /// - /// N.B., while `f` is running, the thread-local state - /// is `BridgeState::InUse`. - fn with(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R { - BRIDGE_STATE.with(|state| { - state.replace(BridgeState::InUse, |mut state| { - // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone - f(&mut state) - }) - }) - } -} - -impl Bridge<'_> { - pub(crate) fn is_available() -> bool { - BridgeState::with(|state| match state { - BridgeState::Connected(_) | BridgeState::InUse => true, - BridgeState::NotConnected => false, - }) - } - - fn enter(self, f: impl FnOnce() -> R) -> R { - let force_show_panics = self.force_show_panics; - // Hide the default panic output within `proc_macro` expansions. - // NB. the server can't do this because it may use a different libstd. - static HIDE_PANICS_DURING_EXPANSION: Once = Once::new(); - HIDE_PANICS_DURING_EXPANSION.call_once(|| { - let prev = panic::take_hook(); - panic::set_hook(Box::new(move |info| { - let show = BridgeState::with(|state| match state { - BridgeState::NotConnected => true, - BridgeState::Connected(_) | BridgeState::InUse => force_show_panics, - }); - if show { - prev(info) - } - })); - }); - - BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f)) - } - - fn with(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R { - BridgeState::with(|state| match state { - BridgeState::NotConnected => { - panic!("procedural macro API is used outside of a procedural macro"); - } - BridgeState::InUse => { - panic!("procedural macro API is used while it's already in use"); - } - BridgeState::Connected(bridge) => f(bridge), - }) - } -} - -/// A client-side "global object" (usually a function pointer), -/// which may be using a different `proc_macro` from the one -/// used by the server, but can be interacted with compatibly. -/// -/// N.B., `F` must have FFI-friendly memory layout (e.g., a pointer). -/// The call ABI of function pointers used for `F` doesn't -/// need to match between server and client, since it's only -/// passed between them and (eventually) called by the client. -#[repr(C)] -#[derive(Copy, Clone)] -pub struct Client { - // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of - // a wrapper `fn` pointer, once `const fn` can reference `static`s. - pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters, - pub(super) run: extern "C" fn(Bridge<'_>, F) -> Buffer, - pub(super) f: F, -} - -/// Client-side helper for handling client panics, entering the bridge, -/// deserializing input and serializing output. -// FIXME(eddyb) maybe replace `Bridge::enter` with this? -fn run_client DecodeMut<'a, 's, ()>, R: Encode<()>>( - mut bridge: Bridge<'_>, - f: impl FnOnce(A) -> R, -) -> Buffer { - // The initial `cached_buffer` contains the input. - let mut b = bridge.cached_buffer.take(); - - panic::catch_unwind(panic::AssertUnwindSafe(|| { - bridge.enter(|| { - let reader = &mut &b[..]; - let input = A::decode(reader, &mut ()); - - // Put the `cached_buffer` back in the `Bridge`, for requests. - Bridge::with(|bridge| bridge.cached_buffer = b.take()); - - let output = f(input); - - // Take the `cached_buffer` back out, for the output value. - b = Bridge::with(|bridge| bridge.cached_buffer.take()); - - // HACK(eddyb) Separate encoding a success value (`Ok(output)`) - // from encoding a panic (`Err(e: PanicMessage)`) to avoid - // having handles outside the `bridge.enter(|| ...)` scope, and - // to catch panics that could happen while encoding the success. - // - // Note that panics should be impossible beyond this point, but - // this is defensively trying to avoid any accidental panicking - // reaching the `extern "C"` (which should `abort` but might not - // at the moment, so this is also potentially preventing UB). - b.clear(); - Ok::<_, ()>(output).encode(&mut b, &mut ()); - }) - })) - .map_err(PanicMessage::from) - .unwrap_or_else(|e| { - b.clear(); - Err::<(), _>(e).encode(&mut b, &mut ()); - }); - b -} - -impl Client super::super::TokenStream> { - pub fn expand1(f: fn(super::super::TokenStream) -> super::super::TokenStream) -> Self { - extern "C" fn run( - bridge: Bridge<'_>, - f: impl FnOnce(super::super::TokenStream) -> super::super::TokenStream, - ) -> Buffer { - run_client(bridge, |input| f(super::super::TokenStream(input)).0) - } - Client { get_handle_counters: HandleCounters::get, run, f } - } -} - -impl Client super::super::TokenStream> { - pub fn expand2( - f: fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream, - ) -> Self { - extern "C" fn run( - bridge: Bridge<'_>, - f: impl FnOnce( - super::super::TokenStream, - super::super::TokenStream, - ) -> super::super::TokenStream, - ) -> Buffer { - run_client(bridge, |(input, input2)| { - f(super::super::TokenStream(input), super::super::TokenStream(input2)).0 - }) - } - Client { get_handle_counters: HandleCounters::get, run, f } - } -} - -#[repr(C)] -#[derive(Copy, Clone)] -pub enum ProcMacro { - CustomDerive { - trait_name: &'static str, - attributes: &'static [&'static str], - client: Client super::super::TokenStream>, - }, - - Attr { - name: &'static str, - client: Client< - fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream, - >, - }, - - Bang { - name: &'static str, - client: Client super::super::TokenStream>, - }, -} - -impl ProcMacro { - pub fn name(&self) -> &'static str { - match self { - ProcMacro::CustomDerive { trait_name, .. } => trait_name, - ProcMacro::Attr { name, .. } => name, - ProcMacro::Bang { name, .. } => name, - } - } - - pub fn custom_derive( - trait_name: &'static str, - attributes: &'static [&'static str], - expand: fn(super::super::TokenStream) -> super::super::TokenStream, - ) -> Self { - ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) } - } - - pub fn attr( - name: &'static str, - expand: fn( - super::super::TokenStream, - super::super::TokenStream, - ) -> super::super::TokenStream, - ) -> Self { - ProcMacro::Attr { name, client: Client::expand2(expand) } - } - - pub fn bang( - name: &'static str, - expand: fn(super::super::TokenStream) -> super::super::TokenStream, - ) -> Self { - ProcMacro::Bang { name, client: Client::expand1(expand) } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs deleted file mode 100644 index 5be71cc3d7013..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs +++ /dev/null @@ -1,24 +0,0 @@ -//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`. - -#[repr(C)] -pub struct Closure<'a, A, R> { - call: unsafe extern "C" fn(&mut Env, A) -> R, - env: &'a mut Env, -} - -struct Env; - -impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> { - fn from(f: &'a mut F) -> Self { - unsafe extern "C" fn call R>(env: &mut Env, arg: A) -> R { - (*(env as *mut _ as *mut F))(arg) - } - Closure { call: call::, env: unsafe { &mut *(f as *mut _ as *mut Env) } } - } -} - -impl<'a, A, R> Closure<'a, A, R> { - pub fn call(&mut self, arg: A) -> R { - unsafe { (self.call)(self.env, arg) } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs deleted file mode 100644 index bcbb86812470a..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs +++ /dev/null @@ -1,70 +0,0 @@ -//! Server-side handles and storage for per-handle data. - -use std::collections::{BTreeMap, HashMap}; -use std::hash::Hash; -use std::num::NonZeroU32; -use std::ops::{Index, IndexMut}; -use std::sync::atomic::{AtomicUsize, Ordering}; - -pub(super) type Handle = NonZeroU32; - -pub(super) struct OwnedStore { - counter: &'static AtomicUsize, - data: BTreeMap, -} - -impl OwnedStore { - pub(super) fn new(counter: &'static AtomicUsize) -> Self { - // Ensure the handle counter isn't 0, which would panic later, - // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`. - assert_ne!(counter.load(Ordering::SeqCst), 0); - - OwnedStore { counter, data: BTreeMap::new() } - } -} - -impl OwnedStore { - pub(super) fn alloc(&mut self, x: T) -> Handle { - let counter = self.counter.fetch_add(1, Ordering::SeqCst); - let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed"); - assert!(self.data.insert(handle, x).is_none()); - handle - } - - pub(super) fn take(&mut self, h: Handle) -> T { - self.data.remove(&h).expect("use-after-free in `proc_macro` handle") - } -} - -impl Index for OwnedStore { - type Output = T; - fn index(&self, h: Handle) -> &T { - self.data.get(&h).expect("use-after-free in `proc_macro` handle") - } -} - -impl IndexMut for OwnedStore { - fn index_mut(&mut self, h: Handle) -> &mut T { - self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle") - } -} - -pub(super) struct InternedStore { - owned: OwnedStore, - interner: HashMap, -} - -impl InternedStore { - pub(super) fn new(counter: &'static AtomicUsize) -> Self { - InternedStore { owned: OwnedStore::new(counter), interner: HashMap::new() } - } - - pub(super) fn alloc(&mut self, x: T) -> Handle { - let owned = &mut self.owned; - *self.interner.entry(x).or_insert_with(|| owned.alloc(x)) - } - - pub(super) fn copy(&mut self, h: Handle) -> T { - self.owned[h] - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs deleted file mode 100644 index b7968c529c30f..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs +++ /dev/null @@ -1,429 +0,0 @@ -//! Internal interface for communicating between a `proc_macro` client -//! (a proc macro crate) and a `proc_macro` server (a compiler front-end). -//! -//! Serialization (with C ABI buffers) and unique integer handles are employed -//! to allow safely interfacing between two copies of `proc_macro` built -//! (from the same source) by different compilers with potentially mismatching -//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap). - -#![deny(unsafe_code)] - -pub use super::{Delimiter, Level, LineColumn, Spacing}; -use std::fmt; -use std::hash::Hash; -use std::marker; -use std::mem; -use std::ops::Bound; -use std::panic; -use std::sync::atomic::AtomicUsize; -use std::sync::Once; -use std::thread; - -/// Higher-order macro describing the server RPC API, allowing automatic -/// generation of type-safe Rust APIs, both client-side and server-side. -/// -/// `with_api!(MySelf, my_self, my_macro)` expands to: -/// ```rust,ignore (pseudo-code) -/// my_macro! { -/// // ... -/// Literal { -/// // ... -/// fn character(ch: char) -> MySelf::Literal; -/// // ... -/// fn span(my_self: &MySelf::Literal) -> MySelf::Span; -/// fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span); -/// }, -/// // ... -/// } -/// ``` -/// -/// The first two arguments serve to customize the arguments names -/// and argument/return types, to enable several different usecases: -/// -/// If `my_self` is just `self`, then each `fn` signature can be used -/// as-is for a method. If it's anything else (`self_` in practice), -/// then the signatures don't have a special `self` argument, and -/// can, therefore, have a different one introduced. -/// -/// If `MySelf` is just `Self`, then the types are only valid inside -/// a trait or a trait impl, where the trait has associated types -/// for each of the API types. If non-associated types are desired, -/// a module name (`self` in practice) can be used instead of `Self`. -macro_rules! with_api { - ($S:ident, $self:ident, $m:ident) => { - $m! { - FreeFunctions { - fn drop($self: $S::FreeFunctions); - fn track_env_var(var: &str, value: Option<&str>); - fn track_path(path: &str); - }, - TokenStream { - fn drop($self: $S::TokenStream); - fn clone($self: &$S::TokenStream) -> $S::TokenStream; - fn new() -> $S::TokenStream; - fn is_empty($self: &$S::TokenStream) -> bool; - fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; - fn from_str(src: &str) -> $S::TokenStream; - fn to_string($self: &$S::TokenStream) -> String; - fn from_token_tree( - tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>, - ) -> $S::TokenStream; - fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter; - }, - TokenStreamBuilder { - fn drop($self: $S::TokenStreamBuilder); - fn new() -> $S::TokenStreamBuilder; - fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream); - fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream; - }, - TokenStreamIter { - fn drop($self: $S::TokenStreamIter); - fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter; - fn next( - $self: &mut $S::TokenStreamIter, - ) -> Option>; - }, - Group { - fn drop($self: $S::Group); - fn clone($self: &$S::Group) -> $S::Group; - fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group; - fn delimiter($self: &$S::Group) -> Delimiter; - fn stream($self: &$S::Group) -> $S::TokenStream; - fn span($self: &$S::Group) -> $S::Span; - fn span_open($self: &$S::Group) -> $S::Span; - fn span_close($self: &$S::Group) -> $S::Span; - fn set_span($self: &mut $S::Group, span: $S::Span); - }, - Punct { - fn new(ch: char, spacing: Spacing) -> $S::Punct; - fn as_char($self: $S::Punct) -> char; - fn spacing($self: $S::Punct) -> Spacing; - fn span($self: $S::Punct) -> $S::Span; - fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct; - }, - Ident { - fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident; - fn span($self: $S::Ident) -> $S::Span; - fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident; - }, - Literal { - fn drop($self: $S::Literal); - fn clone($self: &$S::Literal) -> $S::Literal; - fn from_str(s: &str) -> Result<$S::Literal, ()>; - fn to_string($self: &$S::Literal) -> String; - fn debug_kind($self: &$S::Literal) -> String; - fn symbol($self: &$S::Literal) -> String; - fn suffix($self: &$S::Literal) -> Option; - fn integer(n: &str) -> $S::Literal; - fn typed_integer(n: &str, kind: &str) -> $S::Literal; - fn float(n: &str) -> $S::Literal; - fn f32(n: &str) -> $S::Literal; - fn f64(n: &str) -> $S::Literal; - fn string(string: &str) -> $S::Literal; - fn character(ch: char) -> $S::Literal; - fn byte_string(bytes: &[u8]) -> $S::Literal; - fn span($self: &$S::Literal) -> $S::Span; - fn set_span($self: &mut $S::Literal, span: $S::Span); - fn subspan( - $self: &$S::Literal, - start: Bound, - end: Bound, - ) -> Option<$S::Span>; - }, - SourceFile { - fn drop($self: $S::SourceFile); - fn clone($self: &$S::SourceFile) -> $S::SourceFile; - fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool; - fn path($self: &$S::SourceFile) -> String; - fn is_real($self: &$S::SourceFile) -> bool; - }, - MultiSpan { - fn drop($self: $S::MultiSpan); - fn new() -> $S::MultiSpan; - fn push($self: &mut $S::MultiSpan, span: $S::Span); - }, - Diagnostic { - fn drop($self: $S::Diagnostic); - fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic; - fn sub( - $self: &mut $S::Diagnostic, - level: Level, - msg: &str, - span: $S::MultiSpan, - ); - fn emit($self: $S::Diagnostic); - }, - Span { - fn debug($self: $S::Span) -> String; - fn def_site() -> $S::Span; - fn call_site() -> $S::Span; - fn mixed_site() -> $S::Span; - fn source_file($self: $S::Span) -> $S::SourceFile; - fn parent($self: $S::Span) -> Option<$S::Span>; - fn source($self: $S::Span) -> $S::Span; - fn start($self: $S::Span) -> LineColumn; - fn end($self: $S::Span) -> LineColumn; - fn before($self: $S::Span) -> $S::Span; - fn after($self: $S::Span) -> $S::Span; - fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>; - fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span; - fn source_text($self: $S::Span) -> Option; - fn save_span($self: $S::Span) -> usize; - fn recover_proc_macro_span(id: usize) -> $S::Span; - }, - } - }; -} - -// FIXME(eddyb) this calls `encode` for each argument, but in reverse, -// to avoid borrow conflicts from borrows started by `&mut` arguments. -macro_rules! reverse_encode { - ($writer:ident;) => {}; - ($writer:ident; $first:ident $(, $rest:ident)*) => { - reverse_encode!($writer; $($rest),*); - $first.encode(&mut $writer, &mut ()); - } -} - -// FIXME(eddyb) this calls `decode` for each argument, but in reverse, -// to avoid borrow conflicts from borrows started by `&mut` arguments. -macro_rules! reverse_decode { - ($reader:ident, $s:ident;) => {}; - ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => { - reverse_decode!($reader, $s; $($rest: $rest_ty),*); - let $first = <$first_ty>::decode(&mut $reader, $s); - } -} - -#[allow(unsafe_code)] -mod buffer; -#[forbid(unsafe_code)] -pub mod client; -#[allow(unsafe_code)] -mod closure; -#[forbid(unsafe_code)] -mod handle; -#[macro_use] -#[forbid(unsafe_code)] -mod rpc; -#[allow(unsafe_code)] -mod scoped_cell; -#[forbid(unsafe_code)] -pub mod server; - -use buffer::Buffer; -pub use rpc::PanicMessage; -use rpc::{Decode, DecodeMut, Encode, Reader, Writer}; - -/// An active connection between a server and a client. -/// The server creates the bridge (`Bridge::run_server` in `server.rs`), -/// then passes it to the client through the function pointer in the `run` -/// field of `client::Client`. The client holds its copy of the `Bridge` -/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`). -#[repr(C)] -pub struct Bridge<'a> { - /// Reusable buffer (only `clear`-ed, never shrunk), primarily - /// used for making requests, but also for passing input to client. - cached_buffer: Buffer, - - /// Server-side function that the client uses to make requests. - dispatch: closure::Closure<'a, Buffer, Buffer>, - - /// If 'true', always invoke the default panic hook - force_show_panics: bool, -} - -#[forbid(unsafe_code)] -#[allow(non_camel_case_types)] -mod api_tags { - use super::rpc::{DecodeMut, Encode, Reader, Writer}; - - macro_rules! declare_tags { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)* - }),* $(,)?) => { - $( - pub(super) enum $name { - $($method),* - } - rpc_encode_decode!(enum $name { $($method),* }); - )* - - - pub(super) enum Method { - $($name($name)),* - } - rpc_encode_decode!(enum Method { $($name(m)),* }); - } - } - with_api!(self, self, declare_tags); -} - -/// Helper to wrap associated types to allow trait impl dispatch. -/// That is, normally a pair of impls for `T::Foo` and `T::Bar` -/// can overlap, but if the impls are, instead, on types like -/// `Marked` and `Marked`, they can't. -trait Mark { - type Unmarked; - fn mark(unmarked: Self::Unmarked) -> Self; -} - -/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details). -trait Unmark { - type Unmarked; - fn unmark(self) -> Self::Unmarked; -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash)] -struct Marked { - value: T, - _marker: marker::PhantomData, -} - -impl Mark for Marked { - type Unmarked = T; - fn mark(unmarked: Self::Unmarked) -> Self { - Marked { value: unmarked, _marker: marker::PhantomData } - } -} -impl Unmark for Marked { - type Unmarked = T; - fn unmark(self) -> Self::Unmarked { - self.value - } -} -impl<'a, T, M> Unmark for &'a Marked { - type Unmarked = &'a T; - fn unmark(self) -> Self::Unmarked { - &self.value - } -} -impl<'a, T, M> Unmark for &'a mut Marked { - type Unmarked = &'a mut T; - fn unmark(self) -> Self::Unmarked { - &mut self.value - } -} - -impl Mark for Option { - type Unmarked = Option; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked.map(T::mark) - } -} -impl Unmark for Option { - type Unmarked = Option; - fn unmark(self) -> Self::Unmarked { - self.map(T::unmark) - } -} - -impl Mark for Result { - type Unmarked = Result; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked.map(T::mark).map_err(E::mark) - } -} -impl Unmark for Result { - type Unmarked = Result; - fn unmark(self) -> Self::Unmarked { - self.map(T::unmark).map_err(E::unmark) - } -} - -macro_rules! mark_noop { - ($($ty:ty),* $(,)?) => { - $( - impl Mark for $ty { - type Unmarked = Self; - fn mark(unmarked: Self::Unmarked) -> Self { - unmarked - } - } - impl Unmark for $ty { - type Unmarked = Self; - fn unmark(self) -> Self::Unmarked { - self - } - } - )* - } -} -mark_noop! { - (), - bool, - char, - &'_ [u8], - &'_ str, - String, - usize, - Delimiter, - Level, - LineColumn, - Spacing, - Bound, -} - -rpc_encode_decode!( - enum Delimiter { - Parenthesis, - Brace, - Bracket, - None, - } -); -rpc_encode_decode!( - enum Level { - Error, - Warning, - Note, - Help, - } -); -rpc_encode_decode!(struct LineColumn { line, column }); -rpc_encode_decode!( - enum Spacing { - Alone, - Joint, - } -); - -#[derive(Clone)] -pub enum TokenTree { - Group(G), - Punct(P), - Ident(I), - Literal(L), -} - -impl Mark for TokenTree { - type Unmarked = TokenTree; - fn mark(unmarked: Self::Unmarked) -> Self { - match unmarked { - TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)), - TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)), - TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)), - TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)), - } - } -} -impl Unmark for TokenTree { - type Unmarked = TokenTree; - fn unmark(self) -> Self::Unmarked { - match self { - TokenTree::Group(tt) => TokenTree::Group(tt.unmark()), - TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()), - TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()), - TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()), - } - } -} - -rpc_encode_decode!( - enum TokenTree { - Group(tt), - Punct(tt), - Ident(tt), - Literal(tt), - } -); diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs deleted file mode 100644 index d50564d01a5d2..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs +++ /dev/null @@ -1,305 +0,0 @@ -//! Serialization for client-server communication. - -use std::any::Any; -use std::char; -use std::io::Write; -use std::num::NonZeroU32; -use std::ops::Bound; -use std::str; - -pub(super) type Writer = super::buffer::Buffer; - -pub(super) trait Encode: Sized { - fn encode(self, w: &mut Writer, s: &mut S); -} - -pub(super) type Reader<'a> = &'a [u8]; - -pub(super) trait Decode<'a, 's, S>: Sized { - fn decode(r: &mut Reader<'a>, s: &'s S) -> Self; -} - -pub(super) trait DecodeMut<'a, 's, S>: Sized { - fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self; -} - -macro_rules! rpc_encode_decode { - (le $ty:ty) => { - impl Encode for $ty { - fn encode(self, w: &mut Writer, _: &mut S) { - w.extend_from_array(&self.to_le_bytes()); - } - } - - impl DecodeMut<'_, '_, S> for $ty { - fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { - const N: usize = ::std::mem::size_of::<$ty>(); - - let mut bytes = [0; N]; - bytes.copy_from_slice(&r[..N]); - *r = &r[N..]; - - Self::from_le_bytes(bytes) - } - } - }; - (struct $name:ident { $($field:ident),* $(,)? }) => { - impl Encode for $name { - fn encode(self, w: &mut Writer, s: &mut S) { - $(self.$field.encode(w, s);)* - } - } - - impl DecodeMut<'_, '_, S> for $name { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - $name { - $($field: DecodeMut::decode(r, s)),* - } - } - } - }; - (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => { - impl),+)?> Encode for $name $(<$($T),+>)? { - fn encode(self, w: &mut Writer, s: &mut S) { - // HACK(eddyb): `Tag` enum duplicated between the - // two impls as there's no other place to stash it. - #[allow(non_upper_case_globals)] - mod tag { - #[repr(u8)] enum Tag { $($variant),* } - - $(pub const $variant: u8 = Tag::$variant as u8;)* - } - - match self { - $($name::$variant $(($field))* => { - tag::$variant.encode(w, s); - $($field.encode(w, s);)* - })* - } - } - } - - impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> - for $name $(<$($T),+>)? - { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - // HACK(eddyb): `Tag` enum duplicated between the - // two impls as there's no other place to stash it. - #[allow(non_upper_case_globals)] - mod tag { - #[repr(u8)] enum Tag { $($variant),* } - - $(pub const $variant: u8 = Tag::$variant as u8;)* - } - - match u8::decode(r, s) { - $(tag::$variant => { - $(let $field = DecodeMut::decode(r, s);)* - $name::$variant $(($field))* - })* - _ => unreachable!(), - } - } - } - } -} - -impl Encode for () { - fn encode(self, _: &mut Writer, _: &mut S) {} -} - -impl DecodeMut<'_, '_, S> for () { - fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {} -} - -impl Encode for u8 { - fn encode(self, w: &mut Writer, _: &mut S) { - w.push(self); - } -} - -impl DecodeMut<'_, '_, S> for u8 { - fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { - let x = r[0]; - *r = &r[1..]; - x - } -} - -rpc_encode_decode!(le u32); -rpc_encode_decode!(le usize); - -impl Encode for bool { - fn encode(self, w: &mut Writer, s: &mut S) { - (self as u8).encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for bool { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - match u8::decode(r, s) { - 0 => false, - 1 => true, - _ => unreachable!(), - } - } -} - -impl Encode for char { - fn encode(self, w: &mut Writer, s: &mut S) { - (self as u32).encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for char { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - char::from_u32(u32::decode(r, s)).unwrap() - } -} - -impl Encode for NonZeroU32 { - fn encode(self, w: &mut Writer, s: &mut S) { - self.get().encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for NonZeroU32 { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - Self::new(u32::decode(r, s)).unwrap() - } -} - -impl, B: Encode> Encode for (A, B) { - fn encode(self, w: &mut Writer, s: &mut S) { - self.0.encode(w, s); - self.1.encode(w, s); - } -} - -impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> - for (A, B) -{ - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - (DecodeMut::decode(r, s), DecodeMut::decode(r, s)) - } -} - -rpc_encode_decode!( - enum Bound { - Included(x), - Excluded(x), - Unbounded, - } -); - -rpc_encode_decode!( - enum Option { - None, - Some(x), - } -); - -rpc_encode_decode!( - enum Result { - Ok(x), - Err(e), - } -); - -impl Encode for &[u8] { - fn encode(self, w: &mut Writer, s: &mut S) { - self.len().encode(w, s); - w.write_all(self).unwrap(); - } -} - -impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - let len = usize::decode(r, s); - let xs = &r[..len]; - *r = &r[len..]; - xs - } -} - -impl Encode for &str { - fn encode(self, w: &mut Writer, s: &mut S) { - self.as_bytes().encode(w, s); - } -} - -impl<'a, S> DecodeMut<'a, '_, S> for &'a str { - fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { - str::from_utf8(<&[u8]>::decode(r, s)).unwrap() - } -} - -impl Encode for String { - fn encode(self, w: &mut Writer, s: &mut S) { - self[..].encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for String { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - <&str>::decode(r, s).to_string() - } -} - -/// Simplified version of panic payloads, ignoring -/// types other than `&'static str` and `String`. -pub enum PanicMessage { - StaticStr(&'static str), - String(String), - Unknown, -} - -impl From> for PanicMessage { - fn from(payload: Box) -> Self { - if let Some(s) = payload.downcast_ref::<&'static str>() { - return PanicMessage::StaticStr(s); - } - if let Ok(s) = payload.downcast::() { - return PanicMessage::String(*s); - } - PanicMessage::Unknown - } -} - -impl Into> for PanicMessage { - fn into(self) -> Box { - match self { - PanicMessage::StaticStr(s) => Box::new(s), - PanicMessage::String(s) => Box::new(s), - PanicMessage::Unknown => { - struct UnknownPanicMessage; - Box::new(UnknownPanicMessage) - } - } - } -} - -impl PanicMessage { - pub fn as_str(&self) -> Option<&str> { - match self { - PanicMessage::StaticStr(s) => Some(s), - PanicMessage::String(s) => Some(s), - PanicMessage::Unknown => None, - } - } -} - -impl Encode for PanicMessage { - fn encode(self, w: &mut Writer, s: &mut S) { - self.as_str().encode(w, s); - } -} - -impl DecodeMut<'_, '_, S> for PanicMessage { - fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { - match Option::::decode(r, s) { - Some(s) => PanicMessage::String(s), - None => PanicMessage::Unknown, - } - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs deleted file mode 100644 index b0c2e5b9c26b1..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs +++ /dev/null @@ -1,81 +0,0 @@ -//! `Cell` variant for (scoped) existential lifetimes. - -use std::cell::Cell; -use std::mem; -use std::ops::{Deref, DerefMut}; - -/// Type lambda application, with a lifetime. -#[allow(unused_lifetimes)] -pub trait ApplyL<'a> { - type Out; -} - -/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`. -pub trait LambdaL: for<'a> ApplyL<'a> {} - -impl ApplyL<'a>> LambdaL for T {} - -// HACK(eddyb) work around projection limitations with a newtype -// FIXME(#52812) replace with `&'a mut >::Out` -pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut >::Out); - -impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> { - type Target = >::Out; - fn deref(&self) -> &Self::Target { - self.0 - } -} - -impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - self.0 - } -} - -pub struct ScopedCell(Cell<>::Out>); - -impl ScopedCell { - pub fn new(value: >::Out) -> Self { - ScopedCell(Cell::new(value)) - } - - /// Sets the value in `self` to `replacement` while - /// running `f`, which gets the old value, mutably. - /// The old value will be restored after `f` exits, even - /// by panic, including modifications made to it by `f`. - pub fn replace<'a, R>( - &self, - replacement: >::Out, - f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R, - ) -> R { - /// Wrapper that ensures that the cell always gets filled - /// (with the original state, optionally changed by `f`), - /// even if `f` had panicked. - struct PutBackOnDrop<'a, T: LambdaL> { - cell: &'a ScopedCell, - value: Option<>::Out>, - } - - impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> { - fn drop(&mut self) { - self.cell.0.set(self.value.take().unwrap()); - } - } - - let mut put_back_on_drop = PutBackOnDrop { - cell: self, - value: Some(self.0.replace(unsafe { - let erased = mem::transmute_copy(&replacement); - mem::forget(replacement); - erased - })), - }; - - f(RefMutL(put_back_on_drop.value.as_mut().unwrap())) - } - - /// Sets the value in `self` to `value` while running `f`. - pub fn set(&self, value: >::Out, f: impl FnOnce() -> R) -> R { - self.replace(value, |_| f()) - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs deleted file mode 100644 index 06a19791351a4..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs +++ /dev/null @@ -1,352 +0,0 @@ -//! Server-side traits. - -use super::*; - -// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`. -use super::client::HandleStore; - -/// Declare an associated item of one of the traits below, optionally -/// adjusting it (i.e., adding bounds to types and default bodies to methods). -macro_rules! associated_item { - (type FreeFunctions) => - (type FreeFunctions: 'static;); - (type TokenStream) => - (type TokenStream: 'static + Clone;); - (type TokenStreamBuilder) => - (type TokenStreamBuilder: 'static;); - (type TokenStreamIter) => - (type TokenStreamIter: 'static + Clone;); - (type Group) => - (type Group: 'static + Clone;); - (type Punct) => - (type Punct: 'static + Copy + Eq + Hash;); - (type Ident) => - (type Ident: 'static + Copy + Eq + Hash;); - (type Literal) => - (type Literal: 'static + Clone;); - (type SourceFile) => - (type SourceFile: 'static + Clone;); - (type MultiSpan) => - (type MultiSpan: 'static;); - (type Diagnostic) => - (type Diagnostic: 'static;); - (type Span) => - (type Span: 'static + Copy + Eq + Hash;); - (fn drop(&mut self, $arg:ident: $arg_ty:ty)) => - (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) }); - (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) => - (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() }); - ($($item:tt)*) => ($($item)*;) -} - -macro_rules! declare_server_traits { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* - }),* $(,)?) => { - pub trait Types { - $(associated_item!(type $name);)* - } - - $(pub trait $name: Types { - $(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)* - })* - - pub trait Server: Types $(+ $name)* {} - impl Server for S {} - } -} -with_api!(Self, self_, declare_server_traits); - -pub(super) struct MarkedTypes(S); - -macro_rules! define_mark_types_impls { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* - }),* $(,)?) => { - impl Types for MarkedTypes { - $(type $name = Marked;)* - } - - $(impl $name for MarkedTypes { - $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? { - <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*)) - })* - })* - } -} -with_api!(Self, self_, define_mark_types_impls); - -struct Dispatcher { - handle_store: HandleStore, - server: S, -} - -macro_rules! define_dispatcher_impl { - ($($name:ident { - $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* - }),* $(,)?) => { - // FIXME(eddyb) `pub` only for `ExecutionStrategy` below. - pub trait DispatcherTrait { - // HACK(eddyb) these are here to allow `Self::$name` to work below. - $(type $name;)* - fn dispatch(&mut self, b: Buffer) -> Buffer; - } - - impl DispatcherTrait for Dispatcher> { - $(type $name = as Types>::$name;)* - fn dispatch(&mut self, mut b: Buffer) -> Buffer { - let Dispatcher { handle_store, server } = self; - - let mut reader = &b[..]; - match api_tags::Method::decode(&mut reader, &mut ()) { - $(api_tags::Method::$name(m) => match m { - $(api_tags::$name::$method => { - let mut call_method = || { - reverse_decode!(reader, handle_store; $($arg: $arg_ty),*); - $name::$method(server, $($arg),*) - }; - // HACK(eddyb) don't use `panic::catch_unwind` in a panic. - // If client and server happen to use the same `libstd`, - // `catch_unwind` asserts that the panic counter was 0, - // even when the closure passed to it didn't panic. - let r = if thread::panicking() { - Ok(call_method()) - } else { - panic::catch_unwind(panic::AssertUnwindSafe(call_method)) - .map_err(PanicMessage::from) - }; - - b.clear(); - r.encode(&mut b, handle_store); - })* - }),* - } - b - } - } - } -} -with_api!(Self, self_, define_dispatcher_impl); - -pub trait ExecutionStrategy { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, - client_data: D, - force_show_panics: bool, - ) -> Buffer; -} - -pub struct SameThread; - -impl ExecutionStrategy for SameThread { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, - client_data: D, - force_show_panics: bool, - ) -> Buffer { - let mut dispatch = |b| dispatcher.dispatch(b); - - run_client( - Bridge { cached_buffer: input, dispatch: (&mut dispatch).into(), force_show_panics }, - client_data, - ) - } -} - -// NOTE(eddyb) Two implementations are provided, the second one is a bit -// faster but neither is anywhere near as fast as same-thread execution. - -pub struct CrossThread1; - -impl ExecutionStrategy for CrossThread1 { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, - client_data: D, - force_show_panics: bool, - ) -> Buffer { - use std::sync::mpsc::channel; - - let (req_tx, req_rx) = channel(); - let (res_tx, res_rx) = channel(); - - let join_handle = thread::spawn(move || { - let mut dispatch = |b| { - req_tx.send(b).unwrap(); - res_rx.recv().unwrap() - }; - - run_client( - Bridge { - cached_buffer: input, - dispatch: (&mut dispatch).into(), - force_show_panics, - }, - client_data, - ) - }); - - for b in req_rx { - res_tx.send(dispatcher.dispatch(b)).unwrap(); - } - - join_handle.join().unwrap() - } -} - -pub struct CrossThread2; - -impl ExecutionStrategy for CrossThread2 { - fn run_bridge_and_client( - &self, - dispatcher: &mut impl DispatcherTrait, - input: Buffer, - run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, - client_data: D, - force_show_panics: bool, - ) -> Buffer { - use std::sync::{Arc, Mutex}; - - enum State { - Req(T), - Res(T), - } - - let mut state = Arc::new(Mutex::new(State::Res(Buffer::new()))); - - let server_thread = thread::current(); - let state2 = state.clone(); - let join_handle = thread::spawn(move || { - let mut dispatch = |b| { - *state2.lock().unwrap() = State::Req(b); - server_thread.unpark(); - loop { - thread::park(); - if let State::Res(b) = &mut *state2.lock().unwrap() { - break b.take(); - } - } - }; - - let r = run_client( - Bridge { - cached_buffer: input, - dispatch: (&mut dispatch).into(), - force_show_panics, - }, - client_data, - ); - - // Wake up the server so it can exit the dispatch loop. - drop(state2); - server_thread.unpark(); - - r - }); - - // Check whether `state2` was dropped, to know when to stop. - while Arc::get_mut(&mut state).is_none() { - thread::park(); - let mut b = match &mut *state.lock().unwrap() { - State::Req(b) => b.take(), - _ => continue, - }; - b = dispatcher.dispatch(b.take()); - *state.lock().unwrap() = State::Res(b); - join_handle.thread().unpark(); - } - - join_handle.join().unwrap() - } -} - -fn run_server< - S: Server, - I: Encode>>, - O: for<'a, 's> DecodeMut<'a, 's, HandleStore>>, - D: Copy + Send + 'static, ->( - strategy: &impl ExecutionStrategy, - handle_counters: &'static client::HandleCounters, - server: S, - input: I, - run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, - client_data: D, - force_show_panics: bool, -) -> Result { - let mut dispatcher = - Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) }; - - let mut b = Buffer::new(); - input.encode(&mut b, &mut dispatcher.handle_store); - - b = strategy.run_bridge_and_client( - &mut dispatcher, - b, - run_client, - client_data, - force_show_panics, - ); - - Result::decode(&mut &b[..], &mut dispatcher.handle_store) -} - -impl client::Client super::super::TokenStream> { - pub fn run( - &self, - strategy: &impl ExecutionStrategy, - server: S, - input: S::TokenStream, - force_show_panics: bool, - ) -> Result { - let client::Client { get_handle_counters, run, f } = *self; - run_server( - strategy, - get_handle_counters(), - server, - as Types>::TokenStream::mark(input), - run, - f, - force_show_panics, - ) - .map( as Types>::TokenStream::unmark) - } -} - -impl - client::Client< - fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream, - > -{ - pub fn run( - &self, - strategy: &impl ExecutionStrategy, - server: S, - input: S::TokenStream, - input2: S::TokenStream, - force_show_panics: bool, - ) -> Result { - let client::Client { get_handle_counters, run, f } = *self; - run_server( - strategy, - get_handle_counters(), - server, - ( - as Types>::TokenStream::mark(input), - as Types>::TokenStream::mark(input2), - ), - run, - f, - force_show_panics, - ) - .map( as Types>::TokenStream::unmark) - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs deleted file mode 100644 index cda239f878500..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs +++ /dev/null @@ -1,166 +0,0 @@ -//! lib-proc-macro diagnostic -//! -//! Copy from -//! augmented with removing unstable features - -use super::Span; - -/// An enum representing a diagnostic level. -#[derive(Copy, Clone, Debug)] -#[non_exhaustive] -pub enum Level { - /// An error. - Error, - /// A warning. - Warning, - /// A note. - Note, - /// A help message. - Help, -} - -/// Trait implemented by types that can be converted into a set of `Span`s. -pub trait MultiSpan { - /// Converts `self` into a `Vec`. - fn into_spans(self) -> Vec; -} - -impl MultiSpan for Span { - fn into_spans(self) -> Vec { - vec![self] - } -} - -impl MultiSpan for Vec { - fn into_spans(self) -> Vec { - self - } -} - -impl<'a> MultiSpan for &'a [Span] { - fn into_spans(self) -> Vec { - self.to_vec() - } -} - -/// A structure representing a diagnostic message and associated children -/// messages. -#[derive(Clone, Debug)] -pub struct Diagnostic { - level: Level, - message: String, - spans: Vec, - children: Vec, -} - -macro_rules! diagnostic_child_methods { - ($spanned:ident, $regular:ident, $level:expr) => { - #[doc = concat!("Adds a new child diagnostics message to `self` with the [`", - stringify!($level), "`] level, and the given `spans` and `message`.")] - pub fn $spanned(mut self, spans: S, message: T) -> Diagnostic - where - S: MultiSpan, - T: Into, - { - self.children.push(Diagnostic::spanned(spans, $level, message)); - self - } - - #[doc = concat!("Adds a new child diagnostic message to `self` with the [`", - stringify!($level), "`] level, and the given `message`.")] - pub fn $regular>(mut self, message: T) -> Diagnostic { - self.children.push(Diagnostic::new($level, message)); - self - } - }; -} - -/// Iterator over the children diagnostics of a `Diagnostic`. -#[derive(Debug, Clone)] -pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>); - -impl<'a> Iterator for Children<'a> { - type Item = &'a Diagnostic; - - fn next(&mut self) -> Option { - self.0.next() - } -} - -impl Diagnostic { - /// Creates a new diagnostic with the given `level` and `message`. - pub fn new>(level: Level, message: T) -> Diagnostic { - Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } - } - - /// Creates a new diagnostic with the given `level` and `message` pointing to - /// the given set of `spans`. - pub fn spanned(spans: S, level: Level, message: T) -> Diagnostic - where - S: MultiSpan, - T: Into, - { - Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] } - } - - diagnostic_child_methods!(span_error, error, Level::Error); - diagnostic_child_methods!(span_warning, warning, Level::Warning); - diagnostic_child_methods!(span_note, note, Level::Note); - diagnostic_child_methods!(span_help, help, Level::Help); - - /// Returns the diagnostic `level` for `self`. - pub fn level(&self) -> Level { - self.level - } - - /// Sets the level in `self` to `level`. - pub fn set_level(&mut self, level: Level) { - self.level = level; - } - - /// Returns the message in `self`. - pub fn message(&self) -> &str { - &self.message - } - - /// Sets the message in `self` to `message`. - pub fn set_message>(&mut self, message: T) { - self.message = message.into(); - } - - /// Returns the `Span`s in `self`. - pub fn spans(&self) -> &[Span] { - &self.spans - } - - /// Sets the `Span`s in `self` to `spans`. - pub fn set_spans(&mut self, spans: S) { - self.spans = spans.into_spans(); - } - - /// Returns an iterator over the children diagnostics of `self`. - pub fn children(&self) -> Children<'_> { - Children(self.children.iter()) - } - - /// Emit the diagnostic. - pub fn emit(self) { - fn to_internal(spans: Vec) -> super::bridge::client::MultiSpan { - let mut multi_span = super::bridge::client::MultiSpan::new(); - for span in spans { - multi_span.push(span.0); - } - multi_span - } - - let mut diag = super::bridge::client::Diagnostic::new( - self.level, - &self.message[..], - to_internal(self.spans), - ); - for c in self.children { - diag.sub(c.level, &c.message[..], to_internal(c.spans)); - } - diag.emit(); - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs deleted file mode 100644 index a94b909941d96..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs +++ /dev/null @@ -1,1056 +0,0 @@ -//! A support library for macro authors when defining new macros. -//! -//! This library, provided by the standard distribution, provides the types -//! consumed in the interfaces of procedurally defined macro definitions such as -//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and -//! custom derive attributes`#[proc_macro_derive]`. -//! -//! See [the book] for more. -//! -//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes - -#[doc(hidden)] -pub mod bridge; - -mod diagnostic; - -pub use diagnostic::{Diagnostic, Level, MultiSpan}; - -use std::cmp::Ordering; -use std::ops::RangeBounds; -use std::path::PathBuf; -use std::str::FromStr; -use std::{error, fmt, iter, mem}; - -/// Determines whether proc_macro has been made accessible to the currently -/// running program. -/// -/// The proc_macro crate is only intended for use inside the implementation of -/// procedural macros. All the functions in this crate panic if invoked from -/// outside of a procedural macro, such as from a build script or unit test or -/// ordinary Rust binary. -/// -/// With consideration for Rust libraries that are designed to support both -/// macro and non-macro use cases, `proc_macro::is_available()` provides a -/// non-panicking way to detect whether the infrastructure required to use the -/// API of proc_macro is presently available. Returns true if invoked from -/// inside of a procedural macro, false if invoked from any other binary. -pub fn is_available() -> bool { - bridge::Bridge::is_available() -} - -/// The main type provided by this crate, representing an abstract stream of -/// tokens, or, more specifically, a sequence of token trees. -/// The type provide interfaces for iterating over those token trees and, conversely, -/// collecting a number of token trees into one stream. -/// -/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]` -/// and `#[proc_macro_derive]` definitions. -#[derive(Clone)] -pub struct TokenStream(bridge::client::TokenStream); - -/// Error returned from `TokenStream::from_str`. -#[non_exhaustive] -#[derive(Debug)] -pub struct LexError; - -impl fmt::Display for LexError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("cannot parse string into token stream") - } -} - -impl error::Error for LexError {} - -/// Error returned from `TokenStream::expand_expr`. -#[non_exhaustive] -#[derive(Debug)] -pub struct ExpandError; - -impl fmt::Display for ExpandError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("macro expansion failed") - } -} - -impl error::Error for ExpandError {} - -impl TokenStream { - /// Returns an empty `TokenStream` containing no token trees. - pub fn new() -> TokenStream { - TokenStream(bridge::client::TokenStream::new()) - } - - /// Checks if this `TokenStream` is empty. - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - /// Parses this `TokenStream` as an expression and attempts to expand any - /// macros within it. Returns the expanded `TokenStream`. - /// - /// Currently only expressions expanding to literals will succeed, although - /// this may be relaxed in the future. - /// - /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded, - /// report an error, failing compilation, and/or return an `Err(..)`. The - /// specific behavior for any error condition, and what conditions are - /// considered errors, is unspecified and may change in the future. - pub fn expand_expr(&self) -> Result { - match bridge::client::TokenStream::expand_expr(&self.0) { - Ok(stream) => Ok(TokenStream(stream)), - Err(_) => Err(ExpandError), - } - } -} - -/// Attempts to break the string into tokens and parse those tokens into a token stream. -/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters -/// or characters not existing in the language. -/// All tokens in the parsed stream get `Span::call_site()` spans. -/// -/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to -/// change these errors into `LexError`s later. -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - Ok(TokenStream(bridge::client::TokenStream::from_str(src))) - } -} - -/// Prints the token stream as a string that is supposed to be losslessly convertible back -/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters and negative numeric literals. -impl fmt::Display for TokenStream { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -/// Prints token in a form convenient for debugging. -impl fmt::Debug for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str("TokenStream ")?; - f.debug_list().entries(self.clone()).finish() - } -} - -impl Default for TokenStream { - fn default() -> Self { - TokenStream::new() - } -} - -pub use quote::{quote, quote_span}; - -/// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream(bridge::client::TokenStream::from_token_tree(match tree { - TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0), - TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0), - TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0), - TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0), - })) - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { - trees.into_iter().map(TokenStream::from).collect() - } -} - -/// A "flattening" operation on token streams, collects token trees -/// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut builder = bridge::client::TokenStreamBuilder::new(); - streams.into_iter().for_each(|stream| builder.push(stream.0)); - TokenStream(builder.build()) - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { - self.extend(trees.into_iter().map(TokenStream::from)); - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - // FIXME(eddyb) Use an optimized implementation if/when possible. - *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect(); - } -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree}; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, - /// and returns whole groups as token trees. - #[derive(Clone)] - pub struct IntoIter(bridge::client::TokenStreamIter); - - impl Iterator for IntoIter { - type Item = TokenTree; - - fn next(&mut self) -> Option { - self.0.next().map(|tree| match tree { - bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)), - bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)), - bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)), - bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)), - }) - } - } - - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = IntoIter; - - fn into_iter(self) -> IntoIter { - IntoIter(self.0.into_iter()) - } - } -} - -/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input. -/// For example, `quote!(a + b)` will produce an expression, that, when evaluated, constructs -/// the `TokenStream` `[Ident("a"), Punct('+', Alone), Ident("b")]`. -/// -/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. -/// To quote `$` itself, use `$$`. -//pub macro quote($($t:tt)*) { -//[> compiler built-in <] -//} - -#[doc(hidden)] -mod quote; - -/// A region of source code, along with macro expansion information. -#[derive(Copy, Clone)] -pub struct Span(bridge::client::Span); - -macro_rules! diagnostic_method { - ($name:ident, $level:expr) => { - /// Creates a new `Diagnostic` with the given `message` at the span - /// `self`. - pub fn $name>(self, message: T) -> Diagnostic { - Diagnostic::spanned(self, $level, message) - } - }; -} - -impl Span { - /// A span that resolves at the macro definition site. - pub fn def_site() -> Span { - Span(bridge::client::Span::def_site()) - } - - /// The span of the invocation of the current procedural macro. - /// Identifiers created with this span will be resolved as if they were written - /// directly at the macro call location (call-site hygiene) and other code - /// at the macro call site will be able to refer to them as well. - pub fn call_site() -> Span { - Span(bridge::client::Span::call_site()) - } - - /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro - /// definition site (local variables, labels, `$crate`) and sometimes at the macro - /// call site (everything else). - /// The span location is taken from the call-site. - pub fn mixed_site() -> Span { - Span(bridge::client::Span::mixed_site()) - } - - /// The original source file into which this span points. - pub fn source_file(&self) -> SourceFile { - SourceFile(self.0.source_file()) - } - - /// The `Span` for the tokens in the previous macro expansion from which - /// `self` was generated from, if any. - pub fn parent(&self) -> Option { - self.0.parent().map(Span) - } - - /// The span for the origin source code that `self` was generated from. If - /// this `Span` wasn't generated from other macro expansions then the return - /// value is the same as `*self`. - pub fn source(&self) -> Span { - Span(self.0.source()) - } - - /// Gets the starting line/column in the source file for this span. - pub fn start(&self) -> LineColumn { - self.0.start().add_1_to_column() - } - - /// Gets the ending line/column in the source file for this span. - pub fn end(&self) -> LineColumn { - self.0.end().add_1_to_column() - } - - /// Creates an empty span pointing to directly before this span. - pub fn before(&self) -> Span { - Span(self.0.before()) - } - - /// Creates an empty span pointing to directly after this span. - pub fn after(&self) -> Span { - Span(self.0.after()) - } - - /// Creates a new span encompassing `self` and `other`. - /// - /// Returns `None` if `self` and `other` are from different files. - pub fn join(&self, other: Span) -> Option { - self.0.join(other.0).map(Span) - } - - /// Creates a new span with the same line/column information as `self` but - /// that resolves symbols as though it were at `other`. - pub fn resolved_at(&self, other: Span) -> Span { - Span(self.0.resolved_at(other.0)) - } - - /// Creates a new span with the same name resolution behavior as `self` but - /// with the line/column information of `other`. - pub fn located_at(&self, other: Span) -> Span { - other.resolved_at(*self) - } - - /// Compares to spans to see if they're equal. - pub fn eq(&self, other: &Span) -> bool { - self.0 == other.0 - } - - /// Returns the source text behind a span. This preserves the original source - /// code, including spaces and comments. It only returns a result if the span - /// corresponds to real source code. - /// - /// Note: The observable result of a macro should only rely on the tokens and - /// not on this source text. The result of this function is a best effort to - /// be used for diagnostics only. - pub fn source_text(&self) -> Option { - self.0.source_text() - } - - // Used by the implementation of `Span::quote` - #[doc(hidden)] - pub fn save_span(&self) -> usize { - self.0.save_span() - } - - // Used by the implementation of `Span::quote` - #[doc(hidden)] - pub fn recover_proc_macro_span(id: usize) -> Span { - Span(bridge::client::Span::recover_proc_macro_span(id)) - } - - diagnostic_method!(error, Level::Error); - diagnostic_method!(warning, Level::Warning); - diagnostic_method!(note, Level::Note); - diagnostic_method!(help, Level::Help); -} - -/// Prints a span in a form convenient for debugging. -impl fmt::Debug for Span { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -/// A line-column pair representing the start or end of a `Span`. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct LineColumn { - /// The 1-indexed line in the source file on which the span starts or ends (inclusive). - pub line: usize, - /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source - /// file on which the span starts or ends (inclusive). - pub column: usize, -} - -impl LineColumn { - fn add_1_to_column(self) -> Self { - LineColumn { line: self.line, column: self.column + 1 } - } -} - -impl Ord for LineColumn { - fn cmp(&self, other: &Self) -> Ordering { - self.line.cmp(&other.line).then(self.column.cmp(&other.column)) - } -} - -impl PartialOrd for LineColumn { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -/// The source file of a given `Span`. -#[derive(Clone)] -pub struct SourceFile(bridge::client::SourceFile); - -impl SourceFile { - /// Gets the path to this source file. - /// - /// ### Note - /// If the code span associated with this `SourceFile` was generated by an external macro, this - /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check. - /// - /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on - /// the command line, the path as given might not actually be valid. - /// - /// [`is_real`]: Self::is_real - pub fn path(&self) -> PathBuf { - PathBuf::from(self.0.path()) - } - - /// Returns `true` if this source file is a real source file, and not generated by an external - /// macro's expansion. - pub fn is_real(&self) -> bool { - // This is a hack until intercrate spans are implemented and we can have real source files - // for spans generated in external macros. - // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368 - self.0.is_real() - } -} - -impl fmt::Debug for SourceFile { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SourceFile") - .field("path", &self.path()) - .field("is_real", &self.is_real()) - .finish() - } -} - -impl PartialEq for SourceFile { - fn eq(&self, other: &Self) -> bool { - self.0.eq(&other.0) - } -} - -impl Eq for SourceFile {} - -/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`). -#[derive(Clone)] -pub enum TokenTree { - /// A token stream surrounded by bracket delimiters. - Group(Group), - /// An identifier. - Ident(Ident), - /// A single punctuation character (`+`, `,`, `$`, etc.). - Punct(Punct), - /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc. - Literal(Literal), -} - -impl TokenTree { - /// Returns the span of this tree, delegating to the `span` method of - /// the contained token or a delimited stream. - pub fn span(&self) -> Span { - match *self { - TokenTree::Group(ref t) => t.span(), - TokenTree::Ident(ref t) => t.span(), - TokenTree::Punct(ref t) => t.span(), - TokenTree::Literal(ref t) => t.span(), - } - } - - /// Configures the span for *only this token*. - /// - /// Note that if this token is a `Group` then this method will not configure - /// the span of each of the internal tokens, this will simply delegate to - /// the `set_span` method of each variant. - pub fn set_span(&mut self, span: Span) { - match *self { - TokenTree::Group(ref mut t) => t.set_span(span), - TokenTree::Ident(ref mut t) => t.set_span(span), - TokenTree::Punct(ref mut t) => t.set_span(span), - TokenTree::Literal(ref mut t) => t.set_span(span), - } - } -} - -/// Prints token tree in a form convenient for debugging. -impl fmt::Debug for TokenTree { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Each of these has the name in the struct type in the derived debug, - // so don't bother with an extra layer of indirection - match *self { - TokenTree::Group(ref tt) => tt.fmt(f), - TokenTree::Ident(ref tt) => tt.fmt(f), - TokenTree::Punct(ref tt) => tt.fmt(f), - TokenTree::Literal(ref tt) => tt.fmt(f), - } - } -} - -impl From for TokenTree { - fn from(g: Group) -> TokenTree { - TokenTree::Group(g) - } -} - -impl From for TokenTree { - fn from(g: Ident) -> TokenTree { - TokenTree::Ident(g) - } -} - -impl From for TokenTree { - fn from(g: Punct) -> TokenTree { - TokenTree::Punct(g) - } -} - -impl From for TokenTree { - fn from(g: Literal) -> TokenTree { - TokenTree::Literal(g) - } -} - -/// Prints the token tree as a string that is supposed to be losslessly convertible back -/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters and negative numeric literals. -impl fmt::Display for TokenTree { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -/// A delimited token stream. -/// -/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s. -#[derive(Clone)] -pub struct Group(bridge::client::Group); - -/// Describes how a sequence of token trees is delimited. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Delimiter { - /// `( ... )` - Parenthesis, - /// `{ ... }` - Brace, - /// `[ ... ]` - Bracket, - /// `Ø ... Ø` - /// An implicit delimiter, that may, for example, appear around tokens coming from a - /// "macro variable" `$var`. It is important to preserve operator priorities in cases like - /// `$var * 3` where `$var` is `1 + 2`. - /// Implicit delimiters might not survive roundtrip of a token stream through a string. - None, -} - -impl Group { - /// Creates a new `Group` with the given delimiter and token stream. - /// - /// This constructor will set the span for this group to - /// `Span::call_site()`. To change the span you can use the `set_span` - /// method below. - pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group { - Group(bridge::client::Group::new(delimiter, stream.0)) - } - - /// Returns the delimiter of this `Group` - pub fn delimiter(&self) -> Delimiter { - self.0.delimiter() - } - - /// Returns the `TokenStream` of tokens that are delimited in this `Group`. - /// - /// Note that the returned token stream does not include the delimiter - /// returned above. - pub fn stream(&self) -> TokenStream { - TokenStream(self.0.stream()) - } - - /// Returns the span for the delimiters of this token stream, spanning the - /// entire `Group`. - /// - /// ```text - /// pub fn span(&self) -> Span { - /// ^^^^^^^ - /// ``` - pub fn span(&self) -> Span { - Span(self.0.span()) - } - - /// Returns the span pointing to the opening delimiter of this group. - /// - /// ```text - /// pub fn span_open(&self) -> Span { - /// ^ - /// ``` - pub fn span_open(&self) -> Span { - Span(self.0.span_open()) - } - - /// Returns the span pointing to the closing delimiter of this group. - /// - /// ```text - /// pub fn span_close(&self) -> Span { - /// ^ - /// ``` - pub fn span_close(&self) -> Span { - Span(self.0.span_close()) - } - - /// Configures the span for this `Group`'s delimiters, but not its internal - /// tokens. - /// - /// This method will **not** set the span of all the internal tokens spanned - /// by this group, but rather it will only set the span of the delimiter - /// tokens at the level of the `Group`. - pub fn set_span(&mut self, span: Span) { - self.0.set_span(span.0); - } -} - -/// Prints the group as a string that should be losslessly convertible back -/// into the same group (modulo spans), except for possibly `TokenTree::Group`s -/// with `Delimiter::None` delimiters. -impl fmt::Display for Group { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -impl fmt::Debug for Group { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Group") - .field("delimiter", &self.delimiter()) - .field("stream", &self.stream()) - .field("span", &self.span()) - .finish() - } -} - -/// A `Punct` is a single punctuation character such as `+`, `-` or `#`. -/// -/// Multi-character operators like `+=` are represented as two instances of `Punct` with different -/// forms of `Spacing` returned. -#[derive(Clone)] -pub struct Punct(bridge::client::Punct); - -/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or -/// by a different token or whitespace ([`Spacing::Alone`]). -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum Spacing { - /// A `Punct` is not immediately followed by another `Punct`. - /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`. - Alone, - /// A `Punct` is immediately followed by another `Punct`. - /// E.g. `+` is `Joint` in `+=` and `++`. - /// - /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`. - Joint, -} - -impl Punct { - /// Creates a new `Punct` from the given character and spacing. - /// The `ch` argument must be a valid punctuation character permitted by the language, - /// otherwise the function will panic. - /// - /// The returned `Punct` will have the default span of `Span::call_site()` - /// which can be further configured with the `set_span` method below. - pub fn new(ch: char, spacing: Spacing) -> Punct { - Punct(bridge::client::Punct::new(ch, spacing)) - } - - /// Returns the value of this punctuation character as `char`. - pub fn as_char(&self) -> char { - self.0.as_char() - } - - /// Returns the spacing of this punctuation character, indicating whether it's immediately - /// followed by another `Punct` in the token stream, so they can potentially be combined into - /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace - /// (`Alone`) so the operator has certainly ended. - pub fn spacing(&self) -> Spacing { - self.0.spacing() - } - - /// Returns the span for this punctuation character. - pub fn span(&self) -> Span { - Span(self.0.span()) - } - - /// Configure the span for this punctuation character. - pub fn set_span(&mut self, span: Span) { - self.0 = self.0.with_span(span.0); - } -} - -/// Prints the punctuation character as a string that should be losslessly convertible -/// back into the same character. -impl fmt::Display for Punct { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -impl fmt::Debug for Punct { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Punct") - .field("ch", &self.as_char()) - .field("spacing", &self.spacing()) - .field("span", &self.span()) - .finish() - } -} - -impl PartialEq for Punct { - fn eq(&self, rhs: &char) -> bool { - self.as_char() == *rhs - } -} - -impl PartialEq for char { - fn eq(&self, rhs: &Punct) -> bool { - *self == rhs.as_char() - } -} - -/// An identifier (`ident`). -#[derive(Clone)] -pub struct Ident(bridge::client::Ident); - -impl Ident { - /// Creates a new `Ident` with the given `string` as well as the specified - /// `span`. - /// The `string` argument must be a valid identifier permitted by the - /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic. - /// - /// Note that `span`, currently in rustc, configures the hygiene information - /// for this identifier. - /// - /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene - /// meaning that identifiers created with this span will be resolved as if they were written - /// directly at the location of the macro call, and other code at the macro call site will be - /// able to refer to them as well. - /// - /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene - /// meaning that identifiers created with this span will be resolved at the location of the - /// macro definition and other code at the macro call site will not be able to refer to them. - /// - /// Due to the current importance of hygiene this constructor, unlike other - /// tokens, requires a `Span` to be specified at construction. - pub fn new(string: &str, span: Span) -> Ident { - Ident(bridge::client::Ident::new(string, span.0, false)) - } - - /// Same as `Ident::new`, but creates a raw identifier (`r#ident`). - /// The `string` argument be a valid identifier permitted by the language - /// (including keywords, e.g. `fn`). Keywords which are usable in path segments - /// (e.g. `self`, `super`) are not supported, and will cause a panic. - pub fn new_raw(string: &str, span: Span) -> Ident { - Ident(bridge::client::Ident::new(string, span.0, true)) - } - - /// Returns the span of this `Ident`, encompassing the entire string returned - /// by [`to_string`](Self::to_string). - pub fn span(&self) -> Span { - Span(self.0.span()) - } - - /// Configures the span of this `Ident`, possibly changing its hygiene context. - pub fn set_span(&mut self, span: Span) { - self.0 = self.0.with_span(span.0); - } -} - -/// Prints the identifier as a string that should be losslessly convertible -/// back into the same identifier. -impl fmt::Display for Ident { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -impl fmt::Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Ident") - .field("ident", &self.to_string()) - .field("span", &self.span()) - .finish() - } -} - -/// A literal string (`"hello"`), byte string (`b"hello"`), -/// character (`'a'`), byte character (`b'a'`), an integer or floating point number -/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`). -/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s. -#[derive(Clone)] -pub struct Literal(bridge::client::Literal); - -macro_rules! suffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new suffixed integer literal with the specified value. - /// - /// This function will create an integer like `1u32` where the integer - /// value specified is the first part of the token and the integral is - /// also suffixed at the end. - /// Literals created from negative numbers might not survive round-trips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind))) - } - )*) -} - -macro_rules! unsuffixed_int_literals { - ($($name:ident => $kind:ident,)*) => ($( - /// Creates a new unsuffixed integer literal with the specified value. - /// - /// This function will create an integer like `1` where the integer - /// value specified is the first part of the token. No suffix is - /// specified on this token, meaning that invocations like - /// `Literal::i8_unsuffixed(1)` are equivalent to - /// `Literal::u32_unsuffixed(1)`. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// Literals created through this method have the `Span::call_site()` - /// span by default, which can be configured with the `set_span` method - /// below. - pub fn $name(n: $kind) -> Literal { - Literal(bridge::client::Literal::integer(&n.to_string())) - } - )*) -} - -impl Literal { - suffixed_int_literals! { - u8_suffixed => u8, - u16_suffixed => u16, - u32_suffixed => u32, - u64_suffixed => u64, - u128_suffixed => u128, - usize_suffixed => usize, - i8_suffixed => i8, - i16_suffixed => i16, - i32_suffixed => i32, - i64_suffixed => i64, - i128_suffixed => i128, - isize_suffixed => isize, - } - - unsuffixed_int_literals! { - u8_unsuffixed => u8, - u16_unsuffixed => u16, - u32_unsuffixed => u32, - u64_unsuffixed => u64, - u128_unsuffixed => u128, - usize_unsuffixed => usize, - i8_unsuffixed => i8, - i16_unsuffixed => i16, - i32_unsuffixed => i32, - i64_unsuffixed => i64, - i128_unsuffixed => i128, - isize_unsuffixed => isize, - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f32_unsuffixed(n: f32) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - let mut repr = n.to_string(); - if !repr.contains('.') { - repr.push_str(".0"); - } - Literal(bridge::client::Literal::float(&repr)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f32` where the value - /// specified is the preceding part of the token and `f32` is the suffix of - /// the token. This token will always be inferred to be an `f32` in the - /// compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f32_suffixed(n: f32) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - Literal(bridge::client::Literal::f32(&n.to_string())) - } - - /// Creates a new unsuffixed floating-point literal. - /// - /// This constructor is similar to those like `Literal::i8_unsuffixed` where - /// the float's value is emitted directly into the token but no suffix is - /// used, so it may be inferred to be a `f64` later in the compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f64_unsuffixed(n: f64) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - let mut repr = n.to_string(); - if !repr.contains('.') { - repr.push_str(".0"); - } - Literal(bridge::client::Literal::float(&repr)) - } - - /// Creates a new suffixed floating-point literal. - /// - /// This constructor will create a literal like `1.0f64` where the value - /// specified is the preceding part of the token and `f64` is the suffix of - /// the token. This token will always be inferred to be an `f64` in the - /// compiler. - /// Literals created from negative numbers might not survive rountrips through - /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal). - /// - /// # Panics - /// - /// This function requires that the specified float is finite, for - /// example if it is infinity or NaN this function will panic. - pub fn f64_suffixed(n: f64) -> Literal { - if !n.is_finite() { - panic!("Invalid float literal {n}"); - } - Literal(bridge::client::Literal::f64(&n.to_string())) - } - - /// String literal. - pub fn string(string: &str) -> Literal { - Literal(bridge::client::Literal::string(string)) - } - - /// Character literal. - pub fn character(ch: char) -> Literal { - Literal(bridge::client::Literal::character(ch)) - } - - /// Byte string literal. - pub fn byte_string(bytes: &[u8]) -> Literal { - Literal(bridge::client::Literal::byte_string(bytes)) - } - - /// Returns the span encompassing this literal. - pub fn span(&self) -> Span { - Span(self.0.span()) - } - - /// Configures the span associated for this literal. - pub fn set_span(&mut self, span: Span) { - self.0.set_span(span.0); - } - - /// Returns a `Span` that is a subset of `self.span()` containing only the - /// source bytes in range `range`. Returns `None` if the would-be trimmed - /// span is outside the bounds of `self`. - // FIXME(SergioBenitez): check that the byte range starts and ends at a - // UTF-8 boundary of the source. otherwise, it's likely that a panic will - // occur elsewhere when the source text is printed. - // FIXME(SergioBenitez): there is no way for the user to know what - // `self.span()` actually maps to, so this method can currently only be - // called blindly. For example, `to_string()` for the character 'c' returns - // "'\u{63}'"; there is no way for the user to know whether the source text - // was 'c' or whether it was '\u{63}'. - pub fn subspan>(&self, range: R) -> Option { - self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span) - } -} - -/// Parse a single literal from its stringified representation. -/// -/// In order to parse successfully, the input string must not contain anything -/// but the literal token. Specifically, it must not contain whitespace or -/// comments in addition to the literal. -/// -/// The resulting literal token will have a `Span::call_site()` span. -/// -/// NOTE: some errors may cause panics instead of returning `LexError`. We -/// reserve the right to change these errors into `LexError`s later. -impl FromStr for Literal { - type Err = LexError; - - fn from_str(src: &str) -> Result { - match bridge::client::Literal::from_str(src) { - Ok(literal) => Ok(Literal(literal)), - Err(()) => Err(LexError), - } - } -} - -/// Prints the literal as a string that should be losslessly convertible -/// back into the same literal (except for possible rounding for floating point literals). -impl fmt::Display for Literal { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - unimplemented!() - } -} - -impl fmt::Debug for Literal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -/// Tracked access to environment variables. -pub mod tracked_env { - use std::env::{self, VarError}; - use std::ffi::OsStr; - - /// Retrieve an environment variable and add it to build dependency info. - /// Build system executing the compiler will know that the variable was accessed during - /// compilation, and will be able to rerun the build when the value of that variable changes. - /// Besides the dependency tracking this function should be equivalent to `env::var` from the - /// standard library, except that the argument must be UTF-8. - pub fn var + AsRef>(key: K) -> Result { - let key: &str = key.as_ref(); - let value = env::var(key); - super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok()); - value - } -} - -/// Tracked access to additional files. -pub mod tracked_path { - - /// Track a file explicitly. - /// - /// Commonly used for tracking asset preprocessing. - pub fn path>(path: P) { - let path: &str = path.as_ref(); - super::bridge::client::FreeFunctions::track_path(path); - } -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs b/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs deleted file mode 100644 index b539ab9c0c6d6..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs +++ /dev/null @@ -1,140 +0,0 @@ -//! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `quote!`. - -//! This quasiquoter uses macros 2.0 hygiene to reliably access -//! items from `proc_macro`, to build a `proc_macro::TokenStream`. - -use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree}; - -macro_rules! quote_tt { - (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) }; - ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) }; - ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) }; - (,) => { Punct::new(',', Spacing::Alone) }; - (.) => { Punct::new('.', Spacing::Alone) }; - (:) => { Punct::new(':', Spacing::Alone) }; - (;) => { Punct::new(';', Spacing::Alone) }; - (!) => { Punct::new('!', Spacing::Alone) }; - (<) => { Punct::new('<', Spacing::Alone) }; - (>) => { Punct::new('>', Spacing::Alone) }; - (&) => { Punct::new('&', Spacing::Alone) }; - (=) => { Punct::new('=', Spacing::Alone) }; - ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) }; -} - -macro_rules! quote_ts { - ((@ $($t:tt)*)) => { $($t)* }; - (::) => { - [ - TokenTree::from(Punct::new(':', Spacing::Joint)), - TokenTree::from(Punct::new(':', Spacing::Alone)), - ].iter() - .cloned() - .map(|mut x| { - x.set_span(Span::def_site()); - x - }) - .collect::() - }; - ($t:tt) => { TokenTree::from(quote_tt!($t)) }; -} - -/// Simpler version of the real `quote!` macro, implemented solely -/// through `macro_rules`, for bootstrapping the real implementation -/// (see the `quote` function), which does not have access to the -/// real `quote!` macro due to the `proc_macro` crate not being -/// able to depend on itself. -/// -/// Note: supported tokens are a subset of the real `quote!`, but -/// unquoting is different: instead of `$x`, this uses `(@ expr)`. -macro_rules! quote { - () => { TokenStream::new() }; - ($($t:tt)*) => { - [ - $(TokenStream::from(quote_ts!($t)),)* - ].iter().cloned().collect::() - }; -} - -/// Quote a `TokenStream` into a `TokenStream`. -/// This is the actual implementation of the `quote!()` proc macro. -/// -/// It is loaded by the compiler in `register_builtin_macros`. -pub fn quote(stream: TokenStream) -> TokenStream { - if stream.is_empty() { - return quote!(crate::TokenStream::new()); - } - let proc_macro_crate = quote!(crate); - let mut after_dollar = false; - let tokens = stream - .into_iter() - .filter_map(|tree| { - if after_dollar { - after_dollar = false; - match tree { - TokenTree::Ident(_) => { - return Some(quote!(Into::::into( - Clone::clone(&(@ tree))),)); - } - TokenTree::Punct(ref tt) if tt.as_char() == '$' => {} - _ => panic!("`$` must be followed by an ident or `$` in `quote!`"), - } - } else if let TokenTree::Punct(ref tt) = tree { - if tt.as_char() == '$' { - after_dollar = true; - return None; - } - } - - Some(quote!(crate::TokenStream::from((@ match tree { - TokenTree::Punct(tt) => quote!(crate::TokenTree::Punct(crate::Punct::new( - (@ TokenTree::from(Literal::character(tt.as_char()))), - (@ match tt.spacing() { - Spacing::Alone => quote!(crate::Spacing::Alone), - Spacing::Joint => quote!(crate::Spacing::Joint), - }), - ))), - TokenTree::Group(tt) => quote!(crate::TokenTree::Group(crate::Group::new( - (@ match tt.delimiter() { - Delimiter::Parenthesis => quote!(crate::Delimiter::Parenthesis), - Delimiter::Brace => quote!(crate::Delimiter::Brace), - Delimiter::Bracket => quote!(crate::Delimiter::Bracket), - Delimiter::None => quote!(crate::Delimiter::None), - }), - (@ quote(tt.stream())), - ))), - TokenTree::Ident(tt) => quote!(crate::TokenTree::Ident(crate::Ident::new( - (@ TokenTree::from(Literal::string(&tt.to_string()))), - (@ quote_span(proc_macro_crate.clone(), tt.span())), - ))), - TokenTree::Literal(tt) => quote!(crate::TokenTree::Literal({ - let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string()))) - .parse::() - .unwrap() - .into_iter(); - if let (Some(crate::TokenTree::Literal(mut lit)), None) = - (iter.next(), iter.next()) - { - lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span()))); - lit - } else { - unreachable!() - } - })) - })),)) - }) - .collect::(); - - if after_dollar { - panic!("unexpected trailing `$` in `quote!`"); - } - - quote!([(@ tokens)].iter().cloned().collect::()) -} - -/// Quote a `Span` into a `TokenStream`. -/// This is needed to implement a custom quoter. -pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream { - let id = span.save_span(); - quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id))))) -} diff --git a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs deleted file mode 100644 index 22d4ad94f770e..0000000000000 --- a/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs +++ /dev/null @@ -1,822 +0,0 @@ -//! Rustc proc-macro server implementation with tt -//! -//! Based on idea from -//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that -//! we could provide any TokenStream implementation. -//! The original idea from fedochet is using proc-macro2 as backend, -//! we use tt instead for better integration with RA. -//! -//! FIXME: No span and source file information is implemented yet - -use super::proc_macro::bridge::{self, server}; - -use std::collections::HashMap; -use std::hash::Hash; -use std::ops::Bound; -use std::{ascii, vec::IntoIter}; - -type Group = tt::Subtree; -type TokenTree = tt::TokenTree; -type Punct = tt::Punct; -type Spacing = tt::Spacing; -type Literal = tt::Literal; -type Span = tt::TokenId; - -#[derive(Debug, Clone)] -pub struct TokenStream { - pub token_trees: Vec, -} - -impl TokenStream { - pub fn new() -> Self { - TokenStream { token_trees: Default::default() } - } - - pub fn with_subtree(subtree: tt::Subtree) -> Self { - if subtree.delimiter.is_some() { - TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } - } else { - TokenStream { token_trees: subtree.token_trees } - } - } - - pub fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: None, token_trees: self.token_trees } - } - - pub fn is_empty(&self) -> bool { - self.token_trees.is_empty() - } -} - -/// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { - TokenStream { token_trees: vec![tree] } - } -} - -/// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { - trees.into_iter().map(TokenStream::from).collect() - } -} - -/// A "flattening" operation on token streams, collects token trees -/// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { - let mut builder = TokenStreamBuilder::new(); - streams.into_iter().for_each(|stream| builder.push(stream)); - builder.build() - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { - self.extend(trees.into_iter().map(TokenStream::from)); - } -} - -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { - for item in streams { - for tkn in item { - match tkn { - tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { - self.token_trees.extend(subtree.token_trees); - } - _ => { - self.token_trees.push(tkn); - } - } - } - } - } -} - -#[derive(Clone)] -pub struct SourceFile { - // FIXME stub -} - -type Level = super::proc_macro::Level; -type LineColumn = super::proc_macro::LineColumn; - -/// A structure representing a diagnostic message and associated children -/// messages. -#[derive(Clone, Debug)] -pub struct Diagnostic { - level: Level, - message: String, - spans: Vec, - children: Vec, -} - -impl Diagnostic { - /// Creates a new diagnostic with the given `level` and `message`. - pub fn new>(level: Level, message: T) -> Diagnostic { - Diagnostic { level, message: message.into(), spans: vec![], children: vec![] } - } -} - -// Rustc Server Ident has to be `Copyable` -// We use a stub here for bypassing -#[derive(Hash, Eq, PartialEq, Copy, Clone)] -pub struct IdentId(u32); - -#[derive(Clone, Hash, Eq, PartialEq)] -struct IdentData(tt::Ident); - -#[derive(Default)] -struct IdentInterner { - idents: HashMap, - ident_data: Vec, -} - -impl IdentInterner { - fn intern(&mut self, data: &IdentData) -> u32 { - if let Some(index) = self.idents.get(data) { - return *index; - } - - let index = self.idents.len() as u32; - self.ident_data.push(data.clone()); - self.idents.insert(data.clone(), index); - index - } - - fn get(&self, index: u32) -> &IdentData { - &self.ident_data[index as usize] - } - - #[allow(unused)] - fn get_mut(&mut self, index: u32) -> &mut IdentData { - self.ident_data.get_mut(index as usize).expect("Should be consistent") - } -} - -pub struct TokenStreamBuilder { - acc: TokenStream, -} - -/// Public implementation details for the `TokenStream` type, such as iterators. -pub mod token_stream { - use std::str::FromStr; - - use super::{TokenStream, TokenTree}; - - /// An iterator over `TokenStream`'s `TokenTree`s. - /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, - /// and returns whole groups as token trees. - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = super::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.token_trees.into_iter() - } - } - - type LexError = String; - - /// Attempts to break the string into tokens and parse those tokens into a token stream. - /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters - /// or characters not existing in the language. - /// All tokens in the parsed stream get `Span::call_site()` spans. - /// - /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to - /// change these errors into `LexError`s later. - impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - let (subtree, _token_map) = - mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; - - let subtree = subtree_replace_token_ids_with_unspecified(subtree); - Ok(TokenStream::with_subtree(subtree)) - } - } - - impl ToString for TokenStream { - fn to_string(&self) -> String { - tt::pretty(&self.token_trees) - } - } - - fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { - tt::Subtree { - delimiter: subtree - .delimiter - .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }), - token_trees: subtree - .token_trees - .into_iter() - .map(token_tree_replace_token_ids_with_unspecified) - .collect(), - } - } - - fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree { - match tt { - tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf)) - } - tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree)) - } - } - } - - fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { - match leaf { - tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit }) - } - tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct }) - } - tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident }) - } - } - } -} - -impl TokenStreamBuilder { - fn new() -> TokenStreamBuilder { - TokenStreamBuilder { acc: TokenStream::new() } - } - - fn push(&mut self, stream: TokenStream) { - self.acc.extend(stream.into_iter()) - } - - fn build(self) -> TokenStream { - self.acc - } -} - -pub struct FreeFunctions; - -#[derive(Clone)] -pub struct TokenStreamIter { - trees: IntoIter, -} - -#[derive(Default)] -pub struct RustAnalyzer { - ident_interner: IdentInterner, - // FIXME: store span information here. -} - -impl server::Types for RustAnalyzer { - type FreeFunctions = FreeFunctions; - type TokenStream = TokenStream; - type TokenStreamBuilder = TokenStreamBuilder; - type TokenStreamIter = TokenStreamIter; - type Group = Group; - type Punct = Punct; - type Ident = IdentId; - type Literal = Literal; - type SourceFile = SourceFile; - type Diagnostic = Diagnostic; - type Span = Span; - type MultiSpan = Vec; -} - -impl server::FreeFunctions for RustAnalyzer { - fn track_env_var(&mut self, _var: &str, _value: Option<&str>) { - // FIXME: track env var accesses - // https://github.com/rust-lang/rust/pull/71858 - } - fn track_path(&mut self, _path: &str) {} -} - -impl server::TokenStream for RustAnalyzer { - fn new(&mut self) -> Self::TokenStream { - Self::TokenStream::new() - } - - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { - use std::str::FromStr; - - Self::TokenStream::from_str(src).expect("cannot parse string") - } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - stream.to_string() - } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Ident(IdentId(index)) => { - let IdentData(ident) = self.ident_interner.get(index).clone(); - let ident: tt::Ident = ident; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Literal(literal) => { - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - - bridge::TokenTree::Punct(p) => { - let leaf = tt::Leaf::from(p); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(vec![tree]) - } - } - } - - fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { - let trees: Vec = stream.into_iter().collect(); - TokenStreamIter { trees: trees.into_iter() } - } - - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { - Ok(self_.clone()) - } -} - -impl server::TokenStreamBuilder for RustAnalyzer { - fn new(&mut self) -> Self::TokenStreamBuilder { - Self::TokenStreamBuilder::new() - } - fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { - builder.push(stream) - } - fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream { - builder.build() - } -} - -impl server::TokenStreamIter for RustAnalyzer { - fn next( - &mut self, - iter: &mut Self::TokenStreamIter, - ) -> Option> { - iter.trees.next().map(|tree| match tree { - TokenTree::Subtree(group) => bridge::TokenTree::Group(group), - TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident)))) - } - TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal), - TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct), - }) - } -} - -fn delim_to_internal(d: bridge::Delimiter) -> Option { - let kind = match d { - bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, - bridge::Delimiter::Brace => tt::DelimiterKind::Brace, - bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, - bridge::Delimiter::None => return None, - }; - Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) -} - -fn delim_to_external(d: Option) -> bridge::Delimiter { - match d.map(|it| it.kind) { - Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, - Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, - Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, - None => bridge::Delimiter::None, - } -} - -fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing { - match spacing { - bridge::Spacing::Alone => Spacing::Alone, - bridge::Spacing::Joint => Spacing::Joint, - } -} - -fn spacing_to_external(spacing: Spacing) -> bridge::Spacing { - match spacing { - Spacing::Alone => bridge::Spacing::Alone, - Spacing::Joint => bridge::Spacing::Joint, - } -} - -impl server::Group for RustAnalyzer { - fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group { - Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees } - } - fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter { - delim_to_external(group.delimiter) - } - - // NOTE: Return value of do not include delimiter - fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { - TokenStream { token_trees: group.token_trees.clone() } - } - - fn span(&mut self, group: &Self::Group) -> Self::Span { - group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified) - } - - fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) { - if let Some(delim) = &mut group.delimiter { - delim.id = span; - } - } - - fn span_open(&mut self, group: &Self::Group) -> Self::Span { - // FIXME we only store one `TokenId` for the delimiters - group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified) - } - - fn span_close(&mut self, group: &Self::Group) -> Self::Span { - // FIXME we only store one `TokenId` for the delimiters - group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified) - } -} - -impl server::Punct for RustAnalyzer { - fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct { - tt::Punct { - char: ch, - spacing: spacing_to_internal(spacing), - id: tt::TokenId::unspecified(), - } - } - fn as_char(&mut self, punct: Self::Punct) -> char { - punct.char - } - fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing { - spacing_to_external(punct.spacing) - } - fn span(&mut self, punct: Self::Punct) -> Self::Span { - punct.id - } - fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct { - tt::Punct { id: span, ..punct } - } -} - -impl server::Ident for RustAnalyzer { - fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw( - string.into(), - span, - is_raw, - )))) - } - - fn span(&mut self, ident: Self::Ident) -> Self::Span { - self.ident_interner.get(ident.0).0.id - } - fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident { - let data = self.ident_interner.get(ident.0); - let new = IdentData(tt::Ident { id: span, ..data.0.clone() }); - IdentId(self.ident_interner.intern(&new)) - } -} - -impl server::Literal for RustAnalyzer { - fn debug_kind(&mut self, _literal: &Self::Literal) -> String { - // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these. - // They must still be present to be ABI-compatible and work with upstream proc_macro. - "".to_owned() - } - fn from_str(&mut self, s: &str) -> Result { - Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() }) - } - fn symbol(&mut self, literal: &Self::Literal) -> String { - literal.text.to_string() - } - fn suffix(&mut self, _literal: &Self::Literal) -> Option { - None - } - - fn to_string(&mut self, literal: &Self::Literal) -> String { - literal.to_string() - } - - fn integer(&mut self, n: &str) -> Self::Literal { - let n = match n.parse::() { - Ok(n) => n.to_string(), - Err(_) => n.parse::().unwrap().to_string(), - }; - Literal { text: n.into(), id: tt::TokenId::unspecified() } - } - - fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { - macro_rules! def_suffixed_integer { - ($kind:ident, $($ty:ty),*) => { - match $kind { - $( - stringify!($ty) => { - let n: $ty = n.parse().unwrap(); - format!(concat!("{}", stringify!($ty)), n) - } - )* - _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind), - } - } - } - - let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize}; - - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn float(&mut self, n: &str) -> Self::Literal { - let n: f64 = n.parse().unwrap(); - let mut text = f64::to_string(&n); - if !text.contains('.') { - text += ".0" - } - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn f32(&mut self, n: &str) -> Self::Literal { - let n: f32 = n.parse().unwrap(); - let text = format!("{n}f32"); - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn f64(&mut self, n: &str) -> Self::Literal { - let n: f64 = n.parse().unwrap(); - let text = format!("{n}f64"); - Literal { text: text.into(), id: tt::TokenId::unspecified() } - } - - fn string(&mut self, string: &str) -> Self::Literal { - let mut escaped = String::new(); - for ch in string.chars() { - escaped.extend(ch.escape_debug()); - } - Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() } - } - - fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() } - } - - fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { - let string = bytes - .iter() - .cloned() - .flat_map(ascii::escape_default) - .map(Into::::into) - .collect::(); - - Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() } - } - - fn span(&mut self, literal: &Self::Literal) -> Self::Span { - literal.id - } - - fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) { - literal.id = span; - } - - fn subspan( - &mut self, - _literal: &Self::Literal, - _start: Bound, - _end: Bound, - ) -> Option { - // FIXME handle span - None - } -} - -impl server::SourceFile for RustAnalyzer { - // FIXME these are all stubs - fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { - true - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} - -impl server::Diagnostic for RustAnalyzer { - fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic { - let mut diag = Diagnostic::new(level, msg); - diag.spans = spans; - diag - } - - fn sub( - &mut self, - _diag: &mut Self::Diagnostic, - _level: Level, - _msg: &str, - _spans: Self::MultiSpan, - ) { - // FIXME handle diagnostic - // - } - - fn emit(&mut self, _diag: Self::Diagnostic) { - // FIXME handle diagnostic - // diag.emit() - } -} - -impl server::Span for RustAnalyzer { - fn debug(&mut self, span: Self::Span) -> String { - format!("{:?}", span.0) - } - fn def_site(&mut self) -> Self::Span { - // MySpan(self.span_interner.intern(&MySpanData(Span::def_site()))) - // FIXME handle span - tt::TokenId::unspecified() - } - fn call_site(&mut self) -> Self::Span { - // MySpan(self.span_interner.intern(&MySpanData(Span::call_site()))) - // FIXME handle span - tt::TokenId::unspecified() - } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - SourceFile {} - } - fn save_span(&mut self, _span: Self::Span) -> usize { - // FIXME stub - 0 - } - fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { - // FIXME stub - tt::TokenId::unspecified() - } - /// Recent feature, not yet in the proc_macro - /// - /// See PR: - /// https://github.com/rust-lang/rust/pull/55780 - fn source_text(&mut self, _span: Self::Span) -> Option { - None - } - - fn parent(&mut self, _span: Self::Span) -> Option { - // FIXME handle span - None - } - fn source(&mut self, span: Self::Span) -> Self::Span { - // FIXME handle span - span - } - fn start(&mut self, _span: Self::Span) -> LineColumn { - // FIXME handle span - LineColumn { line: 0, column: 0 } - } - fn end(&mut self, _span: Self::Span) -> LineColumn { - // FIXME handle span - LineColumn { line: 0, column: 0 } - } - fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { - // Just return the first span again, because some macros will unwrap the result. - Some(first) - } - fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } - - fn mixed_site(&mut self) -> Self::Span { - // FIXME handle span - tt::TokenId::unspecified() - } - - fn after(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() - } - - fn before(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() - } -} - -impl server::MultiSpan for RustAnalyzer { - fn new(&mut self) -> Self::MultiSpan { - // FIXME handle span - vec![] - } - - fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) { - //TODP - other.push(span) - } -} - -#[cfg(test)] -mod tests { - use super::super::proc_macro::bridge::server::Literal; - use super::*; - - #[test] - fn test_ra_server_literals() { - let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() }; - assert_eq!(srv.integer("1234").text, "1234"); - - assert_eq!(srv.typed_integer("12", "u8").text, "12u8"); - assert_eq!(srv.typed_integer("255", "u16").text, "255u16"); - assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32"); - assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64"); - assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128"); - assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize"); - assert_eq!(srv.typed_integer("127", "i8").text, "127i8"); - assert_eq!(srv.typed_integer("255", "i16").text, "255i16"); - assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32"); - assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64"); - assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128"); - assert_eq!(srv.float("0").text, "0.0"); - assert_eq!(srv.float("15684.5867").text, "15684.5867"); - assert_eq!(srv.f32("15684.58").text, "15684.58f32"); - assert_eq!(srv.f64("15684.58").text, "15684.58f64"); - - assert_eq!(srv.string("hello_world").text, "\"hello_world\""); - assert_eq!(srv.character('c').text, "'c'"); - assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); - - // u128::max - assert_eq!( - srv.integer("340282366920938463463374607431768211455").text, - "340282366920938463463374607431768211455" - ); - // i128::min - assert_eq!( - srv.integer("-170141183460469231731687303715884105728").text, - "-170141183460469231731687303715884105728" - ); - } - - #[test] - fn test_ra_server_to_string() { - let s = TokenStream { - token_trees: vec![ - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "struct".into(), - id: tt::TokenId::unspecified(), - })), - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "T".into(), - id: tt::TokenId::unspecified(), - })), - tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), - kind: tt::DelimiterKind::Brace, - }), - token_trees: vec![], - }), - ], - }; - - assert_eq!(s.to_string(), "struct T {}"); - } - - #[test] - fn test_ra_server_from_str() { - use std::str::FromStr; - let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), - kind: tt::DelimiterKind::Parenthesis, - }), - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "a".into(), - id: tt::TokenId::unspecified(), - }))], - }); - - let t1 = TokenStream::from_str("(a)").unwrap(); - assert_eq!(t1.token_trees.len(), 1); - assert_eq!(t1.token_trees[0], subtree_paren_a); - - let t2 = TokenStream::from_str("(a);").unwrap(); - assert_eq!(t2.token_trees.len(), 2); - assert_eq!(t2.token_trees[0], subtree_paren_a); - - let underscore = TokenStream::from_str("_").unwrap(); - assert_eq!( - underscore.token_trees[0], - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "_".into(), - id: tt::TokenId::unspecified(), - })) - ); - } -} diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index 5b8aca4d8164e..9f874fb02b250 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -23,7 +23,6 @@ //! for the relevant versions of the rust compiler //! -mod abi_1_58; mod abi_1_63; #[cfg(feature = "sysroot-abi")] mod abi_sysroot; @@ -36,7 +35,6 @@ include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); pub(crate) use abi_sysroot::TokenStream as TestTokenStream; use super::dylib::LoadProcMacroDylibError; -pub(crate) use abi_1_58::Abi as Abi_1_58; pub(crate) use abi_1_63::Abi as Abi_1_63; #[cfg(feature = "sysroot-abi")] pub(crate) use abi_sysroot::Abi as Abi_Sysroot; @@ -54,7 +52,6 @@ impl PanicMessage { } pub(crate) enum Abi { - Abi1_58(Abi_1_58), Abi1_63(Abi_1_63), #[cfg(feature = "sysroot-abi")] AbiSysroot(Abi_Sysroot), @@ -109,10 +106,6 @@ impl Abi { // FIXME: this should use exclusive ranges when they're stable // https://github.com/rust-lang/rust/issues/37854 match (info.version.0, info.version.1) { - (1, 58..=62) => { - let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?; - Ok(Abi::Abi1_58(inner)) - } (1, 63) => { let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?; Ok(Abi::Abi1_63(inner)) @@ -128,7 +121,6 @@ impl Abi { attributes: Option<&tt::Subtree>, ) -> Result { match self { - Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes), Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes), #[cfg(feature = "sysroot-abi")] Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes), @@ -137,7 +129,6 @@ impl Abi { pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { match self { - Self::Abi1_58(abi) => abi.list_macros(), Self::Abi1_63(abi) => abi.list_macros(), #[cfg(feature = "sysroot-abi")] Self::AbiSysroot(abi) => abi.list_macros(), From 41fda7615244349076e7cfe07b7b742acbe8e647 Mon Sep 17 00:00:00 2001 From: Maybe Waffle Date: Tue, 31 Jan 2023 11:01:01 +0000 Subject: [PATCH 218/501] simplify `is_exit` check --- lib/lsp-server/src/stdio.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lib/lsp-server/src/stdio.rs b/lib/lsp-server/src/stdio.rs index cdee6432df847..49a825e579b5e 100644 --- a/lib/lsp-server/src/stdio.rs +++ b/lib/lsp-server/src/stdio.rs @@ -21,10 +21,7 @@ pub(crate) fn stdio_transport() -> (Sender, Receiver, IoThread let stdin = stdin(); let mut stdin = stdin.lock(); while let Some(msg) = Message::read(&mut stdin)? { - let is_exit = match &msg { - Message::Notification(n) => n.is_exit(), - _ => false, - }; + let is_exit = matches!(&msg, Message::Notification(n) if n.is_exit()); reader_sender.send(msg).unwrap(); From 0df70d37fc60195a85eab7f650f221fe74d64a33 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 30 Jan 2023 19:44:51 +0900 Subject: [PATCH 219/501] Minor refactoring --- crates/hir/src/lib.rs | 8 ++++---- crates/hir/src/semantics.rs | 7 ++----- .../src/handlers/generate_function.rs | 18 +++++++++--------- 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 4415bef4bb1c7..fc6a398ed3277 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -3139,15 +3139,15 @@ impl Type { } pub fn is_closure(&self) -> bool { - matches!(&self.ty.kind(Interner), TyKind::Closure { .. }) + matches!(self.ty.kind(Interner), TyKind::Closure { .. }) } pub fn is_fn(&self) -> bool { - matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. }) + matches!(self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. }) } pub fn is_array(&self) -> bool { - matches!(&self.ty.kind(Interner), TyKind::Array(..)) + matches!(self.ty.kind(Interner), TyKind::Array(..)) } pub fn is_packed(&self, db: &dyn HirDatabase) -> bool { @@ -3164,7 +3164,7 @@ impl Type { } pub fn is_raw_ptr(&self) -> bool { - matches!(&self.ty.kind(Interner), TyKind::Raw(..)) + matches!(self.ty.kind(Interner), TyKind::Raw(..)) } pub fn contains_unknown(&self) -> bool { diff --git a/crates/hir/src/semantics.rs b/crates/hir/src/semantics.rs index b801cd785e008..486b7ee62ed3a 100644 --- a/crates/hir/src/semantics.rs +++ b/crates/hir/src/semantics.rs @@ -1319,10 +1319,7 @@ impl<'db> SemanticsImpl<'db> { let _p = profile::span("Semantics::analyze_impl"); let node = self.find_file(node); - let container = match self.with_ctx(|ctx| ctx.find_container(node)) { - Some(it) => it, - None => return None, - }; + let container = self.with_ctx(|ctx| ctx.find_container(node))?; let resolver = match container { ChildContainer::DefWithBodyId(def) => { @@ -1582,7 +1579,7 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode { node.ancestors().last().unwrap() } -/// `SemanticScope` encapsulates the notion of a scope (the set of visible +/// `SemanticsScope` encapsulates the notion of a scope (the set of visible /// names) at a particular program point. /// /// It is a bit tricky, as scopes do not really exist inside the compiler. diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index da9b0cda5b594..b7c8df5867fc0 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -16,8 +16,7 @@ use syntax::{ }; use crate::{ - utils::convert_reference_type, - utils::{find_struct_impl, render_snippet, Cursor}, + utils::{convert_reference_type, find_struct_impl, render_snippet, Cursor}, AssistContext, AssistId, AssistKind, Assists, }; @@ -107,7 +106,7 @@ fn fn_target_info( match path.qualifier() { Some(qualifier) => match ctx.sema.resolve_path(&qualifier) { Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => { - get_fn_target_info(ctx, &Some(module), call.clone()) + get_fn_target_info(ctx, Some(module), call.clone()) } Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => { if let hir::Adt::Enum(_) = adt { @@ -125,7 +124,7 @@ fn fn_target_info( } _ => None, }, - _ => get_fn_target_info(ctx, &None, call.clone()), + _ => get_fn_target_info(ctx, None, call.clone()), } } @@ -396,16 +395,16 @@ fn make_return_type( fn get_fn_target_info( ctx: &AssistContext<'_>, - target_module: &Option, + target_module: Option, call: CallExpr, ) -> Option { let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?; - Some(TargetInfo::new(*target_module, None, target, file, insert_offset)) + Some(TargetInfo::new(target_module, None, target, file, insert_offset)) } fn get_fn_target( ctx: &AssistContext<'_>, - target_module: &Option, + target_module: Option, call: CallExpr, ) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> { let mut file = ctx.file_id(); @@ -640,10 +639,11 @@ fn next_space_for_fn_in_module( } fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option { - if let Some(last_item) = impl_.assoc_item_list().and_then(|it| it.assoc_items().last()) { + let assoc_item_list = impl_.assoc_item_list()?; + if let Some(last_item) = assoc_item_list.assoc_items().last() { Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())) } else { - Some(GeneratedFunctionTarget::InEmptyItemList(impl_.assoc_item_list()?.syntax().clone())) + Some(GeneratedFunctionTarget::InEmptyItemList(assoc_item_list.syntax().clone())) } } From 32955c30cd745ec1f9191590de0e8314b77a40a0 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 31 Jan 2023 19:26:08 +0900 Subject: [PATCH 220/501] Add method to get generic parameters in a type --- crates/hir-ty/src/lib.rs | 69 +++++++++++++++++++++++++++++++++++++++- crates/hir/src/lib.rs | 8 +++++ 2 files changed, 76 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index a1182445ede58..cbeb61067dfbe 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -39,11 +39,13 @@ use std::sync::Arc; use chalk_ir::{ fold::{Shift, TypeFoldable}, interner::HasInterner, - NoSolution, + visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor}, + NoSolution, TyData, }; use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId}; use hir_expand::name; use itertools::Either; +use rustc_hash::FxHashSet; use traits::FnTrait; use utils::Generics; @@ -562,3 +564,68 @@ pub fn callable_sig_from_fnonce( Some(CallableSig::from_params_and_return(params, ret_ty, false, Safety::Safe)) } + +struct PlaceholderCollector<'db> { + db: &'db dyn HirDatabase, + placeholders: FxHashSet, +} + +impl PlaceholderCollector<'_> { + fn collect(&mut self, idx: PlaceholderIndex) { + let id = from_placeholder_idx(self.db, idx); + self.placeholders.insert(id); + } +} + +impl TypeVisitor for PlaceholderCollector<'_> { + type BreakTy = (); + + fn as_dyn(&mut self) -> &mut dyn TypeVisitor { + self + } + + fn interner(&self) -> Interner { + Interner + } + + fn visit_ty( + &mut self, + ty: &Ty, + outer_binder: DebruijnIndex, + ) -> std::ops::ControlFlow { + let has_placeholder_bits = TypeFlags::HAS_TY_PLACEHOLDER | TypeFlags::HAS_CT_PLACEHOLDER; + let TyData { kind, flags } = ty.data(Interner); + + if let TyKind::Placeholder(idx) = kind { + self.collect(*idx); + } else if flags.intersects(has_placeholder_bits) { + return ty.super_visit_with(self, outer_binder); + } else { + // Fast path: don't visit inner types (e.g. generic arguments) when `flags` indicate + // that there are no placeholders. + } + + std::ops::ControlFlow::Continue(()) + } + + fn visit_const( + &mut self, + constant: &chalk_ir::Const, + _outer_binder: DebruijnIndex, + ) -> std::ops::ControlFlow { + if let chalk_ir::ConstValue::Placeholder(idx) = constant.data(Interner).value { + self.collect(idx); + } + std::ops::ControlFlow::Continue(()) + } +} + +/// Returns unique placeholders for types and consts contained in `value`. +pub fn collect_placeholders(value: &T, db: &dyn HirDatabase) -> Vec +where + T: ?Sized + TypeVisitable, +{ + let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() }; + value.visit_with(&mut collector, DebruijnIndex::INNERMOST); + collector.placeholders.into_iter().collect() +} diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index fc6a398ed3277..24faa127e4f0a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -3599,6 +3599,14 @@ impl Type { _ => None, } } + + /// Returns unique `GenericParam`s contained in this type. + pub fn generic_params(&self, db: &dyn HirDatabase) -> FxHashSet { + hir_ty::collect_placeholders(&self.ty, db) + .into_iter() + .map(|id| TypeOrConstParam { id }.split(db).either_into()) + .collect() + } } #[derive(Debug)] From 3edde6fcc1506d5c1bf7c74e42defaa44423595c Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 31 Jan 2023 19:49:18 +0900 Subject: [PATCH 221/501] Support generic function in `generate_function` assist --- crates/hir/src/lib.rs | 18 + .../src/handlers/generate_delegate_methods.rs | 2 +- .../src/handlers/generate_function.rs | 882 +++++++++++++++++- crates/ide-db/src/path_transform.rs | 24 +- crates/syntax/src/ast/make.rs | 7 +- 5 files changed, 888 insertions(+), 45 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 24faa127e4f0a..4190707884516 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2165,6 +2165,16 @@ impl AsAssocItem for ModuleDef { } } } +impl AsAssocItem for DefWithBody { + fn as_assoc_item(self, db: &dyn HirDatabase) -> Option { + match self { + DefWithBody::Function(it) => it.as_assoc_item(db), + DefWithBody::Const(it) => it.as_assoc_item(db), + DefWithBody::Static(_) | DefWithBody::Variant(_) => None, + } + } +} + fn as_assoc_item(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option where ID: Lookup>, @@ -2560,6 +2570,14 @@ impl GenericParam { GenericParam::LifetimeParam(it) => it.name(db), } } + + pub fn parent(self) -> GenericDef { + match self { + GenericParam::TypeParam(it) => it.id.parent().into(), + GenericParam::ConstParam(it) => it.id.parent().into(), + GenericParam::LifetimeParam(it) => it.id.parent.into(), + } + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index c8d0493d097c5..ed1b8f4e28d30 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -109,7 +109,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let tail_expr_finished = if is_async { make::expr_await(tail_expr) } else { tail_expr }; let body = make::block_expr([], Some(tail_expr_finished)); - let f = make::fn_(vis, name, type_params, params, body, ret_type, is_async) + let f = make::fn_(vis, name, type_params, None, params, body, ret_type, is_async) .indent(ast::edit::IndentLevel(1)) .clone_for_update(); diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index b7c8df5867fc0..fa93a4c887621 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1,8 +1,11 @@ -use hir::{Adt, HasSource, HirDisplay, Module, Semantics, TypeInfo}; +use hir::{ + Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo, +}; use ide_db::{ base_db::FileId, defs::{Definition, NameRefClass}, famous_defs::FamousDefs, + path_transform::PathTransform, FxHashMap, FxHashSet, RootDatabase, SnippetCap, }; use stdx::to_lower_snake_case; @@ -10,7 +13,7 @@ use syntax::{ ast::{ self, edit::{AstNodeEdit, IndentLevel}, - make, AstNode, CallExpr, HasArgList, HasModuleItem, + make, AstNode, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds, }, SyntaxKind, SyntaxNode, TextRange, TextSize, }; @@ -135,7 +138,8 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { } let fn_name = call.name_ref()?; - let adt = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references().as_adt()?; + let receiver_ty = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references(); + let adt = receiver_ty.as_adt()?; let current_module = ctx.sema.scope(call.syntax())?.module(); let target_module = adt.module(ctx.sema.db); @@ -146,8 +150,14 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?; let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; - let function_builder = - FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?; + let function_builder = FunctionBuilder::from_method_call( + ctx, + &call, + &fn_name, + receiver_ty, + target_module, + target, + )?; let text_range = call.syntax().text_range(); let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; let label = format!("Generate {} method", function_builder.fn_name); @@ -178,6 +188,7 @@ fn add_func_to_accumulator( let function_template = function_builder.render(adt_name.is_some()); let mut func = function_template.to_string(ctx.config.snippet_cap); if let Some(name) = adt_name { + // FIXME: adt may have generic params. func = format!("\n{indent}impl {name} {{\n{func}\n{indent}}}"); } builder.edit_file(file); @@ -237,7 +248,8 @@ impl FunctionTemplate { struct FunctionBuilder { target: GeneratedFunctionTarget, fn_name: ast::Name, - type_params: Option, + generic_param_list: Option, + where_clause: Option, params: ast::ParamList, ret_type: Option, should_focus_return_type: bool, @@ -259,19 +271,32 @@ impl FunctionBuilder { let target_module = target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?; let fn_name = make::name(fn_name); - let (type_params, params) = - fn_args(ctx, target_module, ast::CallableExpr::Call(call.clone()))?; + let mut necessary_generic_params = FxHashSet::default(); + let params = fn_args( + ctx, + target_module, + ast::CallableExpr::Call(call.clone()), + &mut necessary_generic_params, + )?; let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast); let is_async = await_expr.is_some(); - let (ret_type, should_focus_return_type) = - make_return_type(ctx, &ast::Expr::CallExpr(call.clone()), target_module); + let (ret_type, should_focus_return_type) = make_return_type( + ctx, + &ast::Expr::CallExpr(call.clone()), + target_module, + &mut necessary_generic_params, + ); + + let (generic_param_list, where_clause) = + fn_generic_params(ctx, necessary_generic_params, &target); Some(Self { target, fn_name, - type_params, + generic_param_list, + where_clause, params, ret_type, should_focus_return_type, @@ -284,25 +309,40 @@ impl FunctionBuilder { ctx: &AssistContext<'_>, call: &ast::MethodCallExpr, name: &ast::NameRef, + receiver_ty: Type, target_module: Module, target: GeneratedFunctionTarget, ) -> Option { let needs_pub = !module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx); let fn_name = make::name(&name.text()); - let (type_params, params) = - fn_args(ctx, target_module, ast::CallableExpr::MethodCall(call.clone()))?; + let mut necessary_generic_params = FxHashSet::default(); + necessary_generic_params.extend(receiver_ty.generic_params(ctx.db())); + let params = fn_args( + ctx, + target_module, + ast::CallableExpr::MethodCall(call.clone()), + &mut necessary_generic_params, + )?; let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast); let is_async = await_expr.is_some(); - let (ret_type, should_focus_return_type) = - make_return_type(ctx, &ast::Expr::MethodCallExpr(call.clone()), target_module); + let (ret_type, should_focus_return_type) = make_return_type( + ctx, + &ast::Expr::MethodCallExpr(call.clone()), + target_module, + &mut necessary_generic_params, + ); + + let (generic_param_list, where_clause) = + fn_generic_params(ctx, necessary_generic_params, &target); Some(Self { target, fn_name, - type_params, + generic_param_list, + where_clause, params, ret_type, should_focus_return_type, @@ -318,7 +358,8 @@ impl FunctionBuilder { let mut fn_def = make::fn_( visibility, self.fn_name, - self.type_params, + self.generic_param_list, + self.where_clause, self.params, fn_body, self.ret_type, @@ -374,6 +415,7 @@ fn make_return_type( ctx: &AssistContext<'_>, call: &ast::Expr, target_module: Module, + necessary_generic_params: &mut FxHashSet, ) -> (Option, bool) { let (ret_ty, should_focus_return_type) = { match ctx.sema.type_of_expr(call).map(TypeInfo::original) { @@ -381,6 +423,7 @@ fn make_return_type( None => (Some(make::ty_placeholder()), true), Some(ty) if ty.is_unit() => (None, false), Some(ty) => { + necessary_generic_params.extend(ty.generic_params(ctx.db())); let rendered = ty.display_source_code(ctx.db(), target_module.into()); match rendered { Ok(rendered) => (Some(make::ty(&rendered)), false), @@ -472,37 +515,385 @@ impl GeneratedFunctionTarget { GeneratedFunctionTarget::InEmptyItemList(it) => it, } } + + fn parent(&self) -> SyntaxNode { + match self { + GeneratedFunctionTarget::BehindItem(it) => it.parent().expect("item without parent"), + GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(), + } + } } -/// Computes the type variables and arguments required for the generated function +/// Computes parameter list for the generated function. fn fn_args( ctx: &AssistContext<'_>, target_module: hir::Module, call: ast::CallableExpr, -) -> Option<(Option, ast::ParamList)> { + necessary_generic_params: &mut FxHashSet, +) -> Option { let mut arg_names = Vec::new(); let mut arg_types = Vec::new(); for arg in call.arg_list()?.args() { arg_names.push(fn_arg_name(&ctx.sema, &arg)); - arg_types.push(fn_arg_type(ctx, target_module, &arg)); + arg_types.push(fn_arg_type(ctx, target_module, &arg, necessary_generic_params)); } deduplicate_arg_names(&mut arg_names); let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| { make::param(make::ext::simple_ident_pat(make::name(&name)).into(), make::ty(&ty)) }); - Some(( - None, - make::param_list( - match call { - ast::CallableExpr::Call(_) => None, - ast::CallableExpr::MethodCall(_) => Some(make::self_param()), - }, - params, - ), + Some(make::param_list( + match call { + ast::CallableExpr::Call(_) => None, + ast::CallableExpr::MethodCall(_) => Some(make::self_param()), + }, + params, )) } +/// Gets parameter bounds and where predicates in scope and filters out irrelevant ones. +/// +/// See comment on `filter_unnecessary_bounds()` for what bounds we consider relevant. +/// +/// NOTE: Generic parameters returned from this function may cause name clash at `target`. We don't +/// currently do anything about it because it's actually easy to resolve it after the assist: just +/// use the Rename functionality. +fn fn_generic_params( + ctx: &AssistContext<'_>, + necessary_params: FxHashSet, + target: &GeneratedFunctionTarget, +) -> (Option, Option) { + if necessary_params.is_empty() { + // Not really needed but fast path. + return (None, None); + } + + // 1. Get generic parameters (with bounds) and where predicates in scope. + let (generic_params, where_preds) = params_and_where_preds_in_scope(ctx); + + // 2. Extract type parameters included in each bound. + let mut generic_params = generic_params + .into_iter() + .filter_map(|it| compute_contained_params_in_generic_param(ctx, it)) + .collect(); + let mut where_preds = where_preds + .into_iter() + .filter_map(|it| compute_contained_params_in_where_pred(ctx, it)) + .collect(); + + // 3. Filter out unnecessary bounds. + filter_unnecessary_bounds(&mut generic_params, &mut where_preds, necessary_params); + filter_bounds_in_scope(&mut generic_params, &mut where_preds, ctx, target); + + let generic_params: Vec<_> = + generic_params.into_iter().map(|it| it.node.clone_for_update()).collect(); + let where_preds: Vec<_> = + where_preds.into_iter().map(|it| it.node.clone_for_update()).collect(); + + // 4. Rewrite paths + if let Some(param) = generic_params.first() { + let source_scope = ctx.sema.scope(param.syntax()).unwrap(); + let target_scope = ctx.sema.scope(&target.parent()).unwrap(); + if source_scope.module() != target_scope.module() { + let transform = PathTransform::generic_transformation(&target_scope, &source_scope); + let generic_params = generic_params.iter().map(|it| it.syntax()); + let where_preds = where_preds.iter().map(|it| it.syntax()); + transform.apply_all(generic_params.chain(where_preds)); + } + } + + let generic_param_list = make::generic_param_list(generic_params); + let where_clause = + if where_preds.is_empty() { None } else { Some(make::where_clause(where_preds)) }; + + (Some(generic_param_list), where_clause) +} + +fn params_and_where_preds_in_scope( + ctx: &AssistContext<'_>, +) -> (Vec, Vec) { + let Some(body) = containing_body(ctx) else { return Default::default(); }; + + let mut generic_params = Vec::new(); + let mut where_clauses = Vec::new(); + + // There are two items where generic parameters currently in scope may be declared: the item + // the cursor is at, and its parent (if any). + // + // We handle parent first so that their generic parameters appear first in the generic + // parameter list of the function we're generating. + let db = ctx.db(); + if let Some(parent) = body.as_assoc_item(db).map(|it| it.container(db)) { + match parent { + hir::AssocItemContainer::Impl(it) => { + let (params, clauses) = get_bounds_in_scope(ctx, it); + generic_params.extend(params); + where_clauses.extend(clauses); + } + hir::AssocItemContainer::Trait(it) => { + let (params, clauses) = get_bounds_in_scope(ctx, it); + generic_params.extend(params); + where_clauses.extend(clauses); + } + } + } + + // Other defs with body may inherit generic parameters from its parent, but never have their + // own generic parameters. + if let hir::DefWithBody::Function(it) = body { + let (params, clauses) = get_bounds_in_scope(ctx, it); + generic_params.extend(params); + where_clauses.extend(clauses); + } + + (generic_params, where_clauses) +} + +fn containing_body(ctx: &AssistContext<'_>) -> Option { + let item: ast::Item = ctx.find_node_at_offset()?; + let def = match item { + ast::Item::Fn(it) => ctx.sema.to_def(&it)?.into(), + ast::Item::Const(it) => ctx.sema.to_def(&it)?.into(), + ast::Item::Static(it) => ctx.sema.to_def(&it)?.into(), + _ => return None, + }; + Some(def) +} + +fn get_bounds_in_scope( + ctx: &AssistContext<'_>, + def: D, +) -> (impl Iterator, impl Iterator) +where + D: HasSource, + D::Ast: HasGenericParams, +{ + // This function should be only called with `Impl`, `Trait`, or `Function`, for which it's + // infallible to get source ast. + let node = ctx.sema.source(def).unwrap().value; + let generic_params = node.generic_param_list().into_iter().flat_map(|it| it.generic_params()); + let where_clauses = node.where_clause().into_iter().flat_map(|it| it.predicates()); + (generic_params, where_clauses) +} + +#[derive(Debug)] +struct ParamBoundWithParams { + node: ast::GenericParam, + /// Generic parameter `node` introduces. + /// + /// ```text + /// impl S { + /// fn f>() {} + /// ^ this + /// } + /// ``` + /// + /// `U` in this example. + self_ty_param: hir::GenericParam, + /// Generic parameters contained in the trait reference of this bound. + /// + /// ```text + /// impl S { + /// fn f>() {} + /// ^^^^^^^^ params in this part + /// } + /// ``` + /// + /// `T` in this example. + other_params: FxHashSet, +} + +#[derive(Debug)] +struct WherePredWithParams { + node: ast::WherePred, + /// Generic parameters contained in the "self type" of this where predicate. + /// + /// ```text + /// Struct: Trait, + /// ^^^^^^^^^^^^ params in this part + /// ``` + /// + /// `T` and `U` in this example. + self_ty_params: FxHashSet, + /// Generic parameters contained in the trait reference of this where predicate. + /// + /// ```text + /// Struct: Trait, + /// ^^^^^^^^^^^^^^^^^^^ params in this part + /// ``` + /// + /// `T` and `V` in this example. + other_params: FxHashSet, +} + +fn compute_contained_params_in_generic_param( + ctx: &AssistContext<'_>, + node: ast::GenericParam, +) -> Option { + match &node { + ast::GenericParam::TypeParam(ty) => { + let self_ty_param = ctx.sema.to_def(ty)?.into(); + + let other_params = ty + .type_bound_list() + .into_iter() + .flat_map(|it| it.bounds()) + .flat_map(|bound| bound.syntax().descendants()) + .filter_map(|node| filter_generic_params(ctx, node)) + .collect(); + + Some(ParamBoundWithParams { node, self_ty_param, other_params }) + } + ast::GenericParam::ConstParam(ct) => { + let self_ty_param = ctx.sema.to_def(ct)?.into(); + Some(ParamBoundWithParams { node, self_ty_param, other_params: FxHashSet::default() }) + } + ast::GenericParam::LifetimeParam(_) => { + // FIXME: It might be a good idea to handle lifetime parameters too. + None + } + } +} + +fn compute_contained_params_in_where_pred( + ctx: &AssistContext<'_>, + node: ast::WherePred, +) -> Option { + let self_ty = node.ty()?; + let bound_list = node.type_bound_list()?; + + let self_ty_params = self_ty + .syntax() + .descendants() + .filter_map(|node| filter_generic_params(ctx, node)) + .collect(); + + let other_params = bound_list + .bounds() + .flat_map(|bound| bound.syntax().descendants()) + .filter_map(|node| filter_generic_params(ctx, node)) + .collect(); + + Some(WherePredWithParams { node, self_ty_params, other_params }) +} + +fn filter_generic_params(ctx: &AssistContext<'_>, node: SyntaxNode) -> Option { + let path = ast::Path::cast(node)?; + match ctx.sema.resolve_path(&path)? { + PathResolution::TypeParam(it) => Some(it.into()), + PathResolution::ConstParam(it) => Some(it.into()), + _ => None, + } +} + +/// Filters out irrelevant bounds from `generic_params` and `where_preds`. +/// +/// Say we have a trait bound `Struct: Trait`. Given `necessary_params`, when is it relevant +/// and when not? Some observations: +/// - When `necessary_params` contains `T`, it's likely that we want this bound, but now we have +/// an extra param to consider: `U`. +/// - On the other hand, when `necessary_params` contains `U` (but not `T`), then it's unlikely +/// that we want this bound because it doesn't really constrain `U`. +/// +/// (FIXME?: The latter clause might be overstating. We may want to include the bound if the self +/// type does *not* include generic params at all - like `Option: From`) +/// +/// Can we make this a bit more formal? Let's define "dependency" between generic parameters and +/// trait bounds: +/// - A generic parameter `T` depends on a trait bound if `T` appears in the self type (i.e. left +/// part) of the bound. +/// - A trait bound depends on a generic parameter `T` if `T` appears in the bound. +/// +/// Using the notion, what we want is all the bounds that params in `necessary_params` +/// *transitively* depend on! +/// +/// Now it's not hard to solve: we build a dependency graph and compute all reachable nodes from +/// nodes that represent params in `necessary_params` by usual and boring DFS. +/// +/// The time complexity is O(|generic_params| + |where_preds| + |necessary_params|). +fn filter_unnecessary_bounds( + generic_params: &mut Vec, + where_preds: &mut Vec, + necessary_params: FxHashSet, +) { + // All `self_ty_param` should be unique as they were collected from `ast::GenericParamList`s. + let param_map: FxHashMap = + generic_params.iter().map(|it| it.self_ty_param).zip(0..).collect(); + let param_count = param_map.len(); + let generic_params_upper_bound = param_count + generic_params.len(); + let node_count = generic_params_upper_bound + where_preds.len(); + + // | node index range | what the node represents | + // |-----------------------------------------|--------------------------| + // | 0..param_count | generic parameter | + // | param_count..generic_params_upper_bound | `ast::GenericParam` | + // | generic_params_upper_bound..node_count | `ast::WherePred` | + let mut graph = Graph::new(node_count); + for (pred, pred_idx) in generic_params.iter().zip(param_count..) { + let param_idx = param_map[&pred.self_ty_param]; + graph.add_edge(param_idx, pred_idx); + graph.add_edge(pred_idx, param_idx); + + for param in &pred.other_params { + let param_idx = param_map[param]; + graph.add_edge(pred_idx, param_idx); + } + } + for (pred, pred_idx) in where_preds.iter().zip(generic_params_upper_bound..) { + for param in &pred.self_ty_params { + let param_idx = param_map[param]; + graph.add_edge(param_idx, pred_idx); + graph.add_edge(pred_idx, param_idx); + } + for param in &pred.other_params { + let param_idx = param_map[param]; + graph.add_edge(pred_idx, param_idx); + } + } + + let starting_nodes = necessary_params.iter().map(|param| param_map[param]); + let reachable = graph.compute_reachable_nodes(starting_nodes); + + // Not pretty, but effective. If only there were `Vec::retain_index()`... + let mut idx = param_count; + generic_params.retain(|_| { + idx += 1; + reachable[idx - 1] + }); + stdx::always!(idx == generic_params_upper_bound, "inconsistent index"); + where_preds.retain(|_| { + idx += 1; + reachable[idx - 1] + }); +} + +/// Filters out bounds from impl if we're generating the function into the same impl we're +/// generating from. +fn filter_bounds_in_scope( + generic_params: &mut Vec, + where_preds: &mut Vec, + ctx: &AssistContext<'_>, + target: &GeneratedFunctionTarget, +) -> Option<()> { + let target_impl = target.parent().ancestors().find_map(ast::Impl::cast)?; + let target_impl = ctx.sema.to_def(&target_impl)?; + // It's sufficient to test only the first element of `generic_params` because of the order of + // insertion (see `relevant_parmas_and_where_clauses()`). + let def = generic_params.first()?.self_ty_param.parent(); + if def != hir::GenericDef::Impl(target_impl) { + return None; + } + + // Now we know every element that belongs to an impl would be in scope at `target`, we can + // filter them out just by lookint at their parent. + generic_params.retain(|it| !matches!(it.self_ty_param.parent(), hir::GenericDef::Impl(_))); + where_preds.retain(|it| { + it.node.syntax().parent().and_then(|it| it.parent()).and_then(ast::Impl::cast).is_none() + }); + + Some(()) +} + /// Makes duplicate argument names unique by appending incrementing numbers. /// /// ``` @@ -563,17 +954,25 @@ fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> Stri } } -fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast::Expr) -> String { +fn fn_arg_type( + ctx: &AssistContext<'_>, + target_module: hir::Module, + fn_arg: &ast::Expr, + generic_params: &mut FxHashSet, +) -> String { fn maybe_displayed_type( ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast::Expr, + generic_params: &mut FxHashSet, ) -> Option { let ty = ctx.sema.type_of_expr(fn_arg)?.adjusted(); if ty.is_unknown() { return None; } + generic_params.extend(ty.generic_params(ctx.db())); + if ty.is_reference() || ty.is_mutable_reference() { let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate()); convert_reference_type(ty.strip_references(), ctx.db(), famous_defs) @@ -584,7 +983,8 @@ fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast } } - maybe_displayed_type(ctx, target_module, fn_arg).unwrap_or_else(|| String::from("_")) + maybe_displayed_type(ctx, target_module, fn_arg, generic_params) + .unwrap_or_else(|| String::from("_")) } /// Returns the position inside the current mod or file @@ -659,6 +1059,73 @@ fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistCon false } +// This is never intended to be used as a generic graph strucuture. If there's ever another need of +// graph algorithm, consider adding a library for that (and replace the following). +/// Minimally implemented directed graph structure represented by adjacency list. +struct Graph { + edges: Vec>, +} + +impl Graph { + fn new(node_count: usize) -> Self { + Self { edges: vec![Vec::new(); node_count] } + } + + fn add_edge(&mut self, from: usize, to: usize) { + self.edges[from].push(to); + } + + fn edges_for(&self, node_idx: usize) -> &[usize] { + &self.edges[node_idx] + } + + fn len(&self) -> usize { + self.edges.len() + } + + fn compute_reachable_nodes( + &self, + starting_nodes: impl IntoIterator, + ) -> Vec { + let mut visitor = Visitor::new(self); + for idx in starting_nodes { + visitor.mark_reachable(idx); + } + visitor.visited + } +} + +struct Visitor<'g> { + graph: &'g Graph, + visited: Vec, + // Stack is held in this struct so we can reuse its buffer. + stack: Vec, +} + +impl<'g> Visitor<'g> { + fn new(graph: &'g Graph) -> Self { + let visited = vec![false; graph.len()]; + Self { graph, visited, stack: Vec::new() } + } + + fn mark_reachable(&mut self, start_idx: usize) { + // non-recursive DFS + stdx::always!(self.stack.is_empty()); + + self.stack.push(start_idx); + while let Some(idx) = self.stack.pop() { + if !self.visited[idx] { + self.visited[idx] = true; + for &neighbor in self.graph.edges_for(idx) { + if !self.visited[neighbor] { + self.stack.push(neighbor); + } + } + } + } + } +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -1087,27 +1554,302 @@ fn bar(baz: Baz::Bof) { } #[test] - fn add_function_with_generic_arg() { - // FIXME: This is wrong, generated `bar` should include generic parameter. + fn generate_function_with_generic_param() { + check_assist( + generate_function, + r" +fn foo(t: [T; N]) { $0bar(t) } +", + r" +fn foo(t: [T; N]) { bar(t) } + +fn bar(t: [T; N]) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn generate_function_with_parent_generic_param() { + check_assist( + generate_function, + r" +struct S(T); +impl S { + fn foo(t: T, u: U) { $0bar(t, u) } +} +", + r" +struct S(T); +impl S { + fn foo(t: T, u: U) { bar(t, u) } +} + +fn bar(t: T, u: U) { + ${0:todo!()} +} +", + ) + } + + #[test] + fn generic_param_in_receiver_type() { + // FIXME: Generic parameter `T` should be part of impl, not method. + check_assist( + generate_function, + r" +struct S(T); +fn foo(s: S, u: U) { s.$0foo(u) } +", + r" +struct S(T); +impl S { + fn foo(&self, u: U) { + ${0:todo!()} + } +} +fn foo(s: S, u: U) { s.foo(u) } +", + ) + } + + #[test] + fn generic_param_in_return_type() { + check_assist( + generate_function, + r" +fn foo() -> [T; N] { $0bar() } +", + r" +fn foo() -> [T; N] { bar() } + +fn bar() -> [T; N] { + ${0:todo!()} +} +", + ) + } + + #[test] + fn generate_fn_with_bounds() { + // FIXME: where predicates should be on next lines. + check_assist( + generate_function, + r" +trait A {} +struct S(T); +impl> S +where + T: A, +{ + fn foo(t: T, u: U) + where + T: A<()>, + U: A + A, + { + $0bar(t, u) + } +} +", + r" +trait A {} +struct S(T); +impl> S +where + T: A, +{ + fn foo(t: T, u: U) + where + T: A<()>, + U: A + A, + { + bar(t, u) + } +} + +fn bar, U>(t: T, u: U) where T: A, T: A<()>, U: A + A { + ${0:todo!()} +} +", + ) + } + + #[test] + fn include_transitive_param_dependency() { + // FIXME: where predicates should be on next lines. + check_assist( + generate_function, + r" +trait A { type Assoc; } +trait B { type Item; } +struct S(T); +impl S<(T, U, V, W)> +where + T: A, + S: A, +{ + fn foo(t: T, u: U) + where + U: A, + { + $0bar(u) + } +} +", + r" +trait A { type Assoc; } +trait B { type Item; } +struct S(T); +impl S<(T, U, V, W)> +where + T: A, + S: A, +{ + fn foo(t: T, u: U) + where + U: A, + { + bar(u) + } +} + +fn bar(u: U) where T: A, S: A, U: A { + ${0:todo!()} +} +", + ) + } + + #[test] + fn irrelevant_bounds_are_filtered_out() { + check_assist( + generate_function, + r" +trait A {} +struct S(T); +impl S<(T, U, V, W)> +where + T: A, + V: A, +{ + fn foo(t: T, u: U) + where + U: A + A, + { + $0bar(u) + } +} +", + r" +trait A {} +struct S(T); +impl S<(T, U, V, W)> +where + T: A, + V: A, +{ + fn foo(t: T, u: U) + where + U: A + A, + { + bar(u) + } +} + +fn bar(u: U) where T: A, U: A + A { + ${0:todo!()} +} +", + ) + } + + #[test] + fn params_in_trait_arg_are_not_dependency() { + // Even though `bar` depends on `U` and `I`, we don't have to copy these bounds: + // `T: A` and `T: A`. check_assist( generate_function, r" -fn foo(t: T) { - $0bar(t) +trait A {} +struct S(T); +impl S<(T, U)> +where + T: A, +{ + fn foo(t: T, u: U) + where + T: A, + U: A, + { + $0bar(u) + } } ", r" -fn foo(t: T) { - bar(t) +trait A {} +struct S(T); +impl S<(T, U)> +where + T: A, +{ + fn foo(t: T, u: U) + where + T: A, + U: A, + { + bar(u) + } } -fn bar(t: T) { +fn bar(u: U) where U: A { ${0:todo!()} } ", ) } + #[test] + fn dont_copy_bounds_already_in_scope() { + check_assist( + generate_function, + r" +trait A {} +struct S(T); +impl> S +where + T: A, +{ + fn foo>(t: T, u: U) + where + T: A>, + { + Self::$0bar(t, u); + } +} +", + r" +trait A {} +struct S(T); +impl> S +where + T: A, +{ + fn foo>(t: T, u: U) + where + T: A>, + { + Self::bar(t, u); + } + + fn bar>(t: T, u: U) ${0:-> _} where T: A> { + todo!() + } +} +", + ) + } + #[test] fn add_function_with_fn_arg() { // FIXME: The argument in `bar` is wrong. @@ -1289,6 +2031,50 @@ fn baz(foo: foo::Foo) { ) } + #[test] + fn qualified_path_in_generic_bounds_uses_correct_scope() { + check_assist( + generate_function, + r" +mod a { + pub trait A {}; +} +pub mod b { + pub struct S(T); +} +struct S(T); +impl S +where + T: a::A, +{ + fn foo(t: b::S, u: S) { + a::$0bar(t, u); + } +} +", + r" +mod a { + pub trait A {} + + pub(crate) fn bar(t: crate::b::S, u: crate::S) ${0:-> _} where T: self::A { + todo!() + }; +} +pub mod b { + pub struct S(T); +} +struct S(T); +impl S +where + T: a::A, +{ + fn foo(t: b::S, u: S) { + a::bar(t, u); + } +} +", + ) + } #[test] fn add_function_in_module_containing_other_items() { check_assist( @@ -1606,6 +2392,26 @@ fn foo() {S::bar();} ) } + #[test] + fn create_generic_static_method() { + check_assist( + generate_function, + r" +struct S; +fn foo(t: [T; N]) { S::bar$0(t); } +", + r" +struct S; +impl S { + fn bar(t: [T; N]) ${0:-> _} { + todo!() + } +} +fn foo(t: [T; N]) { S::bar(t); } +", + ) + } + #[test] fn create_static_method_within_an_impl() { check_assist( diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index 12d873b4a0aa8..6402a84a68bb3 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -33,7 +33,7 @@ use syntax::{ /// } /// ``` pub struct PathTransform<'a> { - generic_def: hir::GenericDef, + generic_def: Option, substs: Vec, target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScope<'a>, @@ -49,7 +49,7 @@ impl<'a> PathTransform<'a> { PathTransform { source_scope, target_scope, - generic_def: trait_.into(), + generic_def: Some(trait_.into()), substs: get_syntactic_substs(impl_).unwrap_or_default(), } } @@ -63,28 +63,42 @@ impl<'a> PathTransform<'a> { PathTransform { source_scope, target_scope, - generic_def: function.into(), + generic_def: Some(function.into()), substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(), } } + pub fn generic_transformation( + target_scope: &'a SemanticsScope<'a>, + source_scope: &'a SemanticsScope<'a>, + ) -> PathTransform<'a> { + PathTransform { source_scope, target_scope, generic_def: None, substs: Vec::new() } + } + pub fn apply(&self, syntax: &SyntaxNode) { self.build_ctx().apply(syntax) } + pub fn apply_all<'b>(&self, nodes: impl IntoIterator) { + let ctx = self.build_ctx(); + for node in nodes { + ctx.apply(node); + } + } + fn build_ctx(&self) -> Ctx<'a> { let db = self.source_scope.db; let target_module = self.target_scope.module(); let source_module = self.source_scope.module(); let skip = match self.generic_def { // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky - hir::GenericDef::Trait(_) => 1, + Some(hir::GenericDef::Trait(_)) => 1, _ => 0, }; let substs_by_param: FxHashMap<_, _> = self .generic_def - .type_params(db) .into_iter() + .flat_map(|it| it.type_params(db)) .skip(skip) // The actual list of trait type parameters may be longer than the one // used in the `impl` block due to trailing default type parameters. diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index a35983435c7b4..78ed2a73e581a 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -823,6 +823,7 @@ pub fn fn_( visibility: Option, fn_name: ast::Name, type_params: Option, + where_clause: Option, params: ast::ParamList, body: ast::BlockExpr, ret_type: Option, @@ -832,6 +833,10 @@ pub fn fn_( Some(type_params) => format!("{type_params}"), None => "".into(), }; + let where_clause = match where_clause { + Some(it) => format!("{it} "), + None => "".into(), + }; let ret_type = match ret_type { Some(ret_type) => format!("{ret_type} "), None => "".into(), @@ -844,7 +849,7 @@ pub fn fn_( let async_literal = if is_async { "async " } else { "" }; ast_from_text(&format!( - "{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{body}", + "{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}", )) } From 41a46a78f2410653a7c38ce240c439993b97be61 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 31 Jan 2023 11:49:49 +0100 Subject: [PATCH 222/501] Make tt generic over the span data --- crates/base-db/src/fixture.rs | 9 +- crates/base-db/src/input.rs | 2 +- crates/cfg/src/cfg_expr.rs | 4 +- crates/hir-def/src/adt.rs | 4 +- crates/hir-def/src/attr.rs | 5 +- crates/hir-def/src/data.rs | 2 +- crates/hir-def/src/lib.rs | 23 +- crates/hir-def/src/macro_expansion_tests.rs | 8 +- crates/hir-def/src/nameres/collector.rs | 12 +- crates/hir-def/src/nameres/proc_macro.rs | 2 +- crates/hir-expand/src/attrs.rs | 9 +- crates/hir-expand/src/builtin_attr_macro.rs | 6 +- crates/hir-expand/src/builtin_derive_macro.rs | 10 +- crates/hir-expand/src/builtin_fn_macro.rs | 66 +++-- crates/hir-expand/src/db.rs | 14 +- crates/hir-expand/src/eager.rs | 6 +- crates/hir-expand/src/fixup.rs | 18 +- crates/hir-expand/src/hygiene.rs | 2 +- crates/hir-expand/src/lib.rs | 2 + crates/hir-expand/src/name.rs | 2 +- crates/hir-expand/src/proc_macro.rs | 24 +- crates/hir-expand/src/quote.rs | 112 ++++----- crates/mbe/src/benchmark.rs | 28 ++- crates/mbe/src/expander.rs | 7 +- crates/mbe/src/expander/matcher.rs | 41 ++-- crates/mbe/src/expander/transcriber.rs | 70 ++++-- crates/mbe/src/lib.rs | 55 +++-- crates/mbe/src/parser.rs | 20 +- crates/mbe/src/syntax_bridge.rs | 126 ++++++---- crates/mbe/src/syntax_bridge/tests.rs | 4 +- crates/mbe/src/to_parser_input.rs | 29 ++- crates/mbe/src/tt_iter.rs | 7 +- crates/proc-macro-api/src/lib.rs | 9 +- crates/proc-macro-api/src/msg.rs | 18 +- crates/proc-macro-api/src/msg/flat.rs | 43 ++-- .../proc-macro-srv/src/abis/abi_1_63/mod.rs | 1 + .../src/abis/abi_1_63/ra_server.rs | 119 ++++----- .../src/abis/abi_sysroot/mod.rs | 2 +- .../src/abis/abi_sysroot/ra_server.rs | 65 ++--- .../abi_sysroot/ra_server/token_stream.rs | 28 ++- crates/proc-macro-srv/src/abis/mod.rs | 2 + crates/proc-macro-srv/src/dylib.rs | 2 + crates/proc-macro-srv/src/lib.rs | 2 + crates/proc-macro-srv/src/tests/mod.rs | 26 +- crates/rust-analyzer/src/reload.rs | 4 +- crates/stdx/src/macros.rs | 9 + crates/tt/src/buffer.rs | 84 ++++--- crates/tt/src/lib.rs | 228 +++++++++++------- 48 files changed, 804 insertions(+), 567 deletions(-) diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 5b0ed1648db5b..60d1e488d8a43 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashMap; use test_utils::{ extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER, }; -use tt::Subtree; +use tt::token_id::Subtree; use vfs::{file_set::FileSet, VfsPath}; use crate::{ @@ -495,16 +495,15 @@ impl ProcMacroExpander for MirrorProcMacroExpander { _: &Env, ) -> Result { fn traverse(input: &Subtree) -> Subtree { - let mut res = Subtree::default(); - res.delimiter = input.delimiter; + let mut token_trees = vec![]; for tt in input.token_trees.iter().rev() { let tt = match tt { tt::TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(leaf.clone()), tt::TokenTree::Subtree(sub) => tt::TokenTree::Subtree(traverse(sub)), }; - res.token_trees.push(tt); + token_trees.push(tt); } - res + Subtree { delimiter: input.delimiter, token_trees } } Ok(traverse(input)) } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index db9589ca186ec..43388e915b5d3 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -12,7 +12,7 @@ use cfg::CfgOptions; use rustc_hash::FxHashMap; use stdx::hash::{NoHashHashMap, NoHashHashSet}; use syntax::SmolStr; -use tt::Subtree; +use tt::token_id::Subtree; use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath}; /// Files are grouped into source roots. A source root is a directory on the diff --git a/crates/cfg/src/cfg_expr.rs b/crates/cfg/src/cfg_expr.rs index 5f4eefa836619..fb7505ba2dd5b 100644 --- a/crates/cfg/src/cfg_expr.rs +++ b/crates/cfg/src/cfg_expr.rs @@ -66,7 +66,7 @@ impl From for CfgExpr { } impl CfgExpr { - pub fn parse(tt: &tt::Subtree) -> CfgExpr { + pub fn parse(tt: &tt::Subtree) -> CfgExpr { next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid) } /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. @@ -85,7 +85,7 @@ impl CfgExpr { } } -fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { +fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option { let name = match it.next() { None => return None, Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(), diff --git a/crates/hir-def/src/adt.rs b/crates/hir-def/src/adt.rs index cd35ba00f6f52..dcea679567a53 100644 --- a/crates/hir-def/src/adt.rs +++ b/crates/hir-def/src/adt.rs @@ -2,6 +2,7 @@ use std::sync::Arc; +use crate::tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; use base_db::CrateId; use either::Either; use hir_expand::{ @@ -12,7 +13,6 @@ use intern::Interned; use la_arena::{Arena, ArenaMap}; use rustc_abi::{Integer, IntegerType}; use syntax::ast::{self, HasName, HasVisibility}; -use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}; use crate::{ body::{CfgExpander, LowerCtx}, @@ -82,7 +82,7 @@ fn repr_from_value( fn parse_repr_tt(tt: &Subtree) -> Option { match tt.delimiter { - Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {} + Delimiter { kind: DelimiterKind::Parenthesis, .. } => {} _ => return None, } diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index 4e49217329697..fcd92ad338583 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -16,7 +16,6 @@ use syntax::{ ast::{self, HasAttrs, IsString}, AstPtr, AstToken, SmolStr, TextRange, TextSize, }; -use tt::Subtree; use crate::{ db::DefDatabase, @@ -234,7 +233,7 @@ impl Attrs { pub fn has_doc_hidden(&self) -> bool { self.by_key("doc").tt_values().any(|tt| { - tt.delimiter_kind() == Some(DelimiterKind::Parenthesis) && + tt.delimiter.kind == DelimiterKind::Parenthesis && matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden") }) } @@ -628,7 +627,7 @@ pub struct AttrQuery<'attr> { } impl<'attr> AttrQuery<'attr> { - pub fn tt_values(self) -> impl Iterator { + pub fn tt_values(self) -> impl Iterator { self.attrs().filter_map(|attr| attr.token_tree_value()) } diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index f461e85b0182c..c3c1dfd39ac0b 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -142,7 +142,7 @@ impl FunctionData { } } -fn parse_rustc_legacy_const_generics(tt: &tt::Subtree) -> Box<[u32]> { +fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> { let mut indices = Vec::new(); for args in tt.token_trees.chunks(2) { match &args[0] { diff --git a/crates/hir-def/src/lib.rs b/crates/hir-def/src/lib.rs index aabd694820813..d07c5fb67c6f6 100644 --- a/crates/hir-def/src/lib.rs +++ b/crates/hir-def/src/lib.rs @@ -79,6 +79,8 @@ use nameres::DefMap; use stdx::impl_from; use syntax::ast; +use ::tt::token_id as tt; + use crate::{ adt::VariantData, builtin_type::BuiltinType, @@ -973,15 +975,19 @@ fn attr_macro_as_call_id( def: MacroDefId, is_derive: bool, ) -> MacroCallId { - let mut arg = match macro_attr.input.as_deref() { - Some(AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()), - _ => Default::default(), + let arg = match macro_attr.input.as_deref() { + Some(AttrInput::TokenTree(tt, map)) => ( + { + let mut tt = tt.clone(); + tt.delimiter = tt::Delimiter::UNSPECIFIED; + tt + }, + map.clone(), + ), + _ => (tt::Subtree::empty(), Default::default()), }; - // The parentheses are always disposed here. - arg.0.delimiter = None; - - let res = def.as_lazy_macro( + def.as_lazy_macro( db.upcast(), krate, MacroCallKind::Attr { @@ -990,8 +996,7 @@ fn attr_macro_as_call_id( invoc_attr_index: macro_attr.id, is_derive, }, - ); - res + ) } intern::impl_internable!( crate::type_ref::TypeRef, diff --git a/crates/hir-def/src/macro_expansion_tests.rs b/crates/hir-def/src/macro_expansion_tests.rs index 4907f237f6b8a..5ab90d92d9bd1 100644 --- a/crates/hir-def/src/macro_expansion_tests.rs +++ b/crates/hir-def/src/macro_expansion_tests.rs @@ -30,7 +30,7 @@ use syntax::{ SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT}, SyntaxNode, TextRange, T, }; -use tt::{Subtree, TokenId}; +use tt::token_id::{Subtree, TokenId}; use crate::{ db::DefDatabase, macro_id_to_def_id, nameres::ModuleSource, resolver::HasResolver, @@ -253,9 +253,9 @@ fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tre tree.token_trees.iter().for_each(|tree| match tree { tt::TokenTree::Leaf(leaf) => { let id = match leaf { - tt::Leaf::Literal(it) => it.id, - tt::Leaf::Punct(it) => it.id, - tt::Leaf::Ident(it) => it.id, + tt::Leaf::Literal(it) => it.span, + tt::Leaf::Punct(it) => it.span, + tt::Leaf::Ident(it) => it.span, }; ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id))); } diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 6c2bbc74d60ba..4b39a20d86c6e 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -46,6 +46,7 @@ use crate::{ }, path::{ImportAlias, ModPath, PathKind}, per_ns::PerNs, + tt, visibility::{RawVisibility, Visibility}, AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc, @@ -83,7 +84,8 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T .enumerate() .map(|(idx, it)| { // FIXME: a hacky way to create a Name from string. - let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() }; + let name = + tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; ( name.as_name(), ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)), @@ -451,7 +453,10 @@ impl DefCollector<'_> { directive.module_id, MacroCallKind::Attr { ast_id: ast_id.ast_id, - attr_args: Default::default(), + attr_args: std::sync::Arc::new(( + tt::Subtree::empty(), + Default::default(), + )), invoc_attr_index: attr.id, is_derive: false, }, @@ -1947,7 +1952,8 @@ impl ModCollector<'_, '_> { let name = match attrs.by_key("rustc_builtin_macro").string_value() { Some(it) => { // FIXME: a hacky way to create a Name from string. - name = tt::Ident { text: it.clone(), id: tt::TokenId::unspecified() }.as_name(); + name = + tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name(); &name } None => { diff --git a/crates/hir-def/src/nameres/proc_macro.rs b/crates/hir-def/src/nameres/proc_macro.rs index 06b23392cfe46..caad4a1f38172 100644 --- a/crates/hir-def/src/nameres/proc_macro.rs +++ b/crates/hir-def/src/nameres/proc_macro.rs @@ -1,9 +1,9 @@ //! Nameres-specific procedural macro data and helpers. use hir_expand::name::{AsName, Name}; -use tt::{Leaf, TokenTree}; use crate::attr::Attrs; +use crate::tt::{Leaf, TokenTree}; #[derive(Debug, PartialEq, Eq)] pub struct ProcMacroDef { diff --git a/crates/hir-expand/src/attrs.rs b/crates/hir-expand/src/attrs.rs index c7135732b8a2a..5c04f8e8b8f37 100644 --- a/crates/hir-expand/src/attrs.rs +++ b/crates/hir-expand/src/attrs.rs @@ -8,13 +8,13 @@ use intern::Interned; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use smallvec::{smallvec, SmallVec}; use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; -use tt::Subtree; use crate::{ db::AstDatabase, hygiene::Hygiene, mod_path::{ModPath, PathKind}, name::AsName, + tt::{self, Subtree}, InFile, }; @@ -117,7 +117,10 @@ impl RawAttrs { let index = attr.id; let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { - let tree = Subtree { delimiter: None, token_trees: attr.to_vec() }; + let tree = Subtree { + delimiter: tt::Delimiter::unspecified(), + token_trees: attr.to_vec(), + }; // FIXME hygiene let hygiene = Hygiene::new_unhygienic(); Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx)) @@ -266,7 +269,7 @@ impl Attr { pub fn parse_path_comma_token_tree(&self) -> Option + '_> { let args = self.token_tree_value()?; - if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) { + if args.delimiter.kind != DelimiterKind::Parenthesis { return None; } let paths = args diff --git a/crates/hir-expand/src/builtin_attr_macro.rs b/crates/hir-expand/src/builtin_attr_macro.rs index 58d192f9fe008..906ca991d73be 100644 --- a/crates/hir-expand/src/builtin_attr_macro.rs +++ b/crates/hir-expand/src/builtin_attr_macro.rs @@ -1,6 +1,6 @@ //! Builtin attributes. -use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind}; +use crate::{db::AstDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; macro_rules! register_builtin { ( $(($name:ident, $variant:ident) => $expand:ident),* ) => { @@ -97,7 +97,7 @@ fn derive_attr_expand( let loc = db.lookup_intern_macro_call(id); let derives = match &loc.kind { MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0, - _ => return ExpandResult::ok(Default::default()), + _ => return ExpandResult::ok(tt::Subtree::empty()), }; pseudo_derive_attr_expansion(tt, derives) } @@ -110,7 +110,7 @@ pub fn pseudo_derive_attr_expansion( tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char, spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })) }; diff --git a/crates/hir-expand/src/builtin_derive_macro.rs b/crates/hir-expand/src/builtin_derive_macro.rs index 8966047c9b259..060a680542fd9 100644 --- a/crates/hir-expand/src/builtin_derive_macro.rs +++ b/crates/hir-expand/src/builtin_derive_macro.rs @@ -3,11 +3,11 @@ use base_db::{CrateOrigin, LangCrateOrigin}; use tracing::debug; +use crate::tt::{self, TokenId}; use syntax::{ ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName}, match_ast, }; -use tt::TokenId; use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; @@ -92,7 +92,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result { })?; let name_token_id = token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); - let name_token = tt::Ident { id: name_token_id, text: name.text().into() }; + let name_token = tt::Ident { span: name_token_id, text: name.text().into() }; let param_types = params .into_iter() .flat_map(|param_list| param_list.type_or_const_params()) @@ -101,7 +101,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result { let ty = param .ty() .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0) - .unwrap_or_default(); + .unwrap_or_else(tt::Subtree::empty); Some(ty) } else { None @@ -114,7 +114,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result { fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult { let info = match parse_adt(tt) { Ok(info) => info, - Err(e) => return ExpandResult::only_err(e), + Err(e) => return ExpandResult::with_err(tt::Subtree::empty(), e), }; let (params, args): (Vec<_>, Vec<_>) = info .param_types @@ -122,7 +122,7 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu .enumerate() .map(|(idx, param_ty)| { let ident = tt::Leaf::Ident(tt::Ident { - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), text: format!("T{idx}").into(), }); let ident_ = ident.clone(); diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 5522bdf3b3fe2..9f3fa73d4e60e 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -9,7 +9,9 @@ use syntax::{ SmolStr, }; -use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId, MacroCallLoc}; +use crate::{ + db::AstDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc, +}; macro_rules! register_builtin { ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),* ) => { @@ -61,7 +63,7 @@ macro_rules! register_builtin { }; } -#[derive(Debug, Default)] +#[derive(Debug)] pub struct ExpandedEager { pub(crate) subtree: tt::Subtree, /// The included file ID of the include macro. @@ -116,7 +118,7 @@ register_builtin! { } const DOLLAR_CRATE: tt::Ident = - tt::Ident { text: SmolStr::new_inline("$crate"), id: tt::TokenId::unspecified() }; + tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() }; fn module_path_expand( _db: &dyn AstDatabase, @@ -162,7 +164,7 @@ fn stringify_expand( _id: MacroCallId, tt: &tt::Subtree, ) -> ExpandResult { - let pretty = tt::pretty(&tt.token_trees); + let pretty = ::tt::pretty(&tt.token_trees); let expanded = quote! { #pretty @@ -194,11 +196,11 @@ fn assert_expand( let expanded = match &*args { [cond, panic_args @ ..] => { let comma = tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))], }; let cond = cond.clone(); @@ -247,7 +249,10 @@ fn format_args_expand( let mut args = parse_exprs_with_sep(tt, ','); if args.is_empty() { - return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule.into()); + return ExpandResult::with_err( + tt::Subtree::empty(), + mbe::ExpandError::NoMatchingRule.into(), + ); } for arg in &mut args { // Remove `key =`. @@ -282,7 +287,7 @@ fn asm_expand( for tt in tt.token_trees.chunks(2) { match tt { [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))] - | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', id: _, spacing: _ }))] => + | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] => { let krate = DOLLAR_CRATE.clone(); literals.push(quote!(#krate::format_args!(#lit);)); @@ -400,7 +405,7 @@ fn concat_expand( // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses // to ensure the right parsing order, so skip the parentheses here. Ideally we'd // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623 - if let tt::TokenTree::Subtree(tt::Subtree { delimiter: Some(delim), token_trees }) = t { + if let tt::TokenTree::Subtree(tt::Subtree { delimiter: delim, token_trees }) = t { if let [tt] = &**token_trees { if delim.kind == tt::DelimiterKind::Parenthesis { t = tt; @@ -459,9 +464,7 @@ fn concat_bytes_expand( } } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (), - tt::TokenTree::Subtree(tree) - if tree.delimiter_kind() == Some(tt::DelimiterKind::Bracket) => - { + tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => { if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) { err.get_or_insert(e); break; @@ -473,7 +476,7 @@ fn concat_bytes_expand( } } } - let ident = tt::Ident { text: bytes.join(", ").into(), id: tt::TokenId::unspecified() }; + let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() }; ExpandResult { value: ExpandedEager::new(quote!([#ident])), err } } @@ -521,7 +524,7 @@ fn concat_idents_expand( } } } - let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() }; + let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() }; ExpandResult { value: ExpandedEager::new(quote!(#ident)), err } } @@ -572,7 +575,10 @@ fn include_expand( Ok((subtree, file_id)) => { ExpandResult::ok(ExpandedEager { subtree, included_file: Some(file_id) }) } - Err(e) => ExpandResult::only_err(e), + Err(e) => ExpandResult::with_err( + ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, + e, + ), } } @@ -582,15 +588,18 @@ fn include_bytes_expand( tt: &tt::Subtree, ) -> ExpandResult { if let Err(e) = parse_string(tt) { - return ExpandResult::only_err(e); + return ExpandResult::with_err( + ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, + e, + ); } // FIXME: actually read the file here if the user asked for macro expansion let res = tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { text: r#"b"""#.into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))], }; ExpandResult::ok(ExpandedEager::new(res)) @@ -603,7 +612,12 @@ fn include_str_expand( ) -> ExpandResult { let path = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::only_err(e), + Err(e) => { + return ExpandResult::with_err( + ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, + e, + ) + } }; // FIXME: we're not able to read excluded files (which is most of them because @@ -635,7 +649,12 @@ fn env_expand( ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::only_err(e), + Err(e) => { + return ExpandResult::with_err( + ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, + e, + ) + } }; let mut err = None; @@ -666,7 +685,12 @@ fn option_env_expand( ) -> ExpandResult { let key = match parse_string(tt) { Ok(it) => it, - Err(e) => return ExpandResult::only_err(e), + Err(e) => { + return ExpandResult::with_err( + ExpandedEager { subtree: tt::Subtree::empty(), included_file: None }, + e, + ) + } }; let expanded = match get_env_inner(db, arg_id, &key) { diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 2ab78c32871ca..76016274f0e85 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -14,7 +14,7 @@ use syntax::{ use crate::{ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup, - hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, + hygiene::HygieneFrame, tt, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, }; @@ -175,7 +175,7 @@ pub fn expand_speculative( match attr.token_tree() { Some(token_tree) => { let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax()); - tree.delimiter = None; + tree.delimiter = tt::Delimiter::unspecified(); let shift = mbe::Shift::new(&tt); shift.shift_all(&mut tree); @@ -210,7 +210,7 @@ pub fn expand_speculative( // Otherwise the expand query will fetch the non speculative attribute args and pass those instead. let mut speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { - tt.delimiter = None; + tt.delimiter = tt::Delimiter::unspecified(); expander.expand(db, loc.krate, &tt, attr_arg.as_ref()) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { @@ -316,9 +316,8 @@ fn macro_arg( if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included - tt.delimiter = None; + tt.delimiter = tt::Delimiter::unspecified(); } - Some(Arc::new((tt, tmap, fixups.undo_info))) } @@ -479,7 +478,10 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult it, None => { - return ExpandResult::only_err(ExpandError::Other("No arguments for proc-macro".into())) + return ExpandResult::with_err( + tt::Subtree::empty(), + ExpandError::Other("No arguments for proc-macro".into()), + ) } }; diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs index a1474c44e6c6f..dfab7ec92c763 100644 --- a/crates/hir-expand/src/eager.rs +++ b/crates/hir-expand/src/eager.rs @@ -108,7 +108,7 @@ pub fn expand_eager_macro( .value .token_tree() .map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0) - .unwrap_or_default(); + .unwrap_or_else(tt::Subtree::empty); let ast_map = db.ast_id_map(macro_call.file_id); let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value)); @@ -165,9 +165,9 @@ pub fn expand_eager_macro( } } -fn to_subtree(node: &SyntaxNode) -> tt::Subtree { +fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree { let mut subtree = mbe::syntax_node_to_token_tree(node).0; - subtree.delimiter = None; + subtree.delimiter = crate::tt::Delimiter::unspecified(); subtree } diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index 75d364d5f846b..c811d1c66a82d 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -9,7 +9,7 @@ use syntax::{ ast::{self, AstNode, HasLoopBody}, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, }; -use tt::Subtree; +use tt::token_id::Subtree; /// The result of calculating fixes for a syntax node -- a bunch of changes /// (appending to and replacing nodes), the information that is needed to @@ -297,9 +297,11 @@ pub(crate) fn reverse_fixups( tt.token_trees = tts .into_iter() .filter(|tt| match tt { - tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID), + tt::TokenTree::Leaf(leaf) => { + token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID) + } tt::TokenTree::Subtree(st) => { - st.delimiter.map_or(true, |d| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)) + token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID) } }) .flat_map(|tt| match tt { @@ -308,9 +310,9 @@ pub(crate) fn reverse_fixups( SmallVec::from_const([tt.into()]) } tt::TokenTree::Leaf(leaf) => { - if let Some(id) = token_map.synthetic_token_id(leaf.id()) { + if let Some(id) = token_map.synthetic_token_id(*leaf.span()) { let original = undo_info.original[id.0 as usize].clone(); - if original.delimiter.is_none() { + if original.delimiter.kind == tt::DelimiterKind::Invisible { original.token_trees.into() } else { SmallVec::from_const([original.into()]) @@ -327,6 +329,8 @@ pub(crate) fn reverse_fixups( mod tests { use expect_test::{expect, Expect}; + use crate::tt; + use super::reverse_fixups; // The following three functions are only meant to check partial structural equivalence of @@ -341,7 +345,7 @@ mod tests { } fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool { - a.delimiter.map(|it| it.kind) == b.delimiter.map(|it| it.kind) + a.delimiter.kind == b.delimiter.kind && a.token_trees.len() == b.token_trees.len() && a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b)) } @@ -386,7 +390,7 @@ mod tests { let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node()); assert!( check_subtree_eq(&tt, &original_as_tt), - "different token tree: {tt:?}, {original_as_tt:?}" + "different token tree: {tt:?},\n{original_as_tt:?}" ); } diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs index 5a55dc5c9e25c..2300ee9d08998 100644 --- a/crates/hir-expand/src/hygiene.rs +++ b/crates/hir-expand/src/hygiene.rs @@ -128,7 +128,7 @@ struct HygieneInfo { attr_input_or_mac_def_start: Option>, macro_def: Arc, - macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, + macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, macro_arg_shift: mbe::Shift, exp_map: Arc, } diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index 967a8fedbd108..bc941b5417242 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -22,6 +22,8 @@ mod fixup; pub use mbe::{Origin, ValueResult}; +use ::tt::token_id as tt; + use std::{fmt, hash::Hash, iter, sync::Arc}; use base_db::{ diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index b62f4fe770160..b515472501345 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -191,7 +191,7 @@ impl AsName for ast::NameOrNameRef { } } -impl AsName for tt::Ident { +impl AsName for tt::Ident { fn as_name(&self) -> Name { Name::resolve(&self.text) } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 5afdcc0e66dbc..3f4d2540c099a 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -3,7 +3,7 @@ use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use stdx::never; -use crate::{db::AstDatabase, ExpandError, ExpandResult}; +use crate::{db::AstDatabase, tt, ExpandError, ExpandResult}; #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] pub struct ProcMacroExpander { @@ -39,7 +39,10 @@ impl ProcMacroExpander { Ok(proc_macros) => proc_macros, Err(_) => { never!("Non-dummy expander even though there are no proc macros"); - return ExpandResult::only_err(ExpandError::Other("Internal error".into())); + return ExpandResult::with_err( + tt::Subtree::empty(), + ExpandError::Other("Internal error".into()), + ); } }; let proc_macro = match proc_macros.get(id.0 as usize) { @@ -50,7 +53,10 @@ impl ProcMacroExpander { proc_macros.len(), id.0 ); - return ExpandResult::only_err(ExpandError::Other("Internal error".into())); + return ExpandResult::with_err( + tt::Subtree::empty(), + ExpandError::Other("Internal error".into()), + ); } }; @@ -69,13 +75,17 @@ impl ProcMacroExpander { } } ProcMacroExpansionError::System(text) - | ProcMacroExpansionError::Panic(text) => { - ExpandResult::only_err(ExpandError::Other(text.into())) - } + | ProcMacroExpansionError::Panic(text) => ExpandResult::with_err( + tt::Subtree::empty(), + ExpandError::Other(text.into()), + ), }, } } - None => ExpandResult::only_err(ExpandError::UnresolvedProcMacro(self.krate)), + None => ExpandResult::with_err( + tt::Subtree::empty(), + ExpandError::UnresolvedProcMacro(self.krate), + ), } } } diff --git a/crates/hir-expand/src/quote.rs b/crates/hir-expand/src/quote.rs index c0a7bc7ca8815..63586f9daf069 100644 --- a/crates/hir-expand/src/quote.rs +++ b/crates/hir-expand/src/quote.rs @@ -9,17 +9,18 @@ #[macro_export] macro_rules! __quote { () => { - Vec::::new() + Vec::::new() }; ( @SUBTREE $delim:ident $($tt:tt)* ) => { { let children = $crate::__quote!($($tt)*); - tt::Subtree { - delimiter: Some(tt::Delimiter { - kind: tt::DelimiterKind::$delim, - id: tt::TokenId::unspecified(), - }), + crate::tt::Subtree { + delimiter: crate::tt::Delimiter { + kind: crate::tt::DelimiterKind::$delim, + open: crate::tt::TokenId::unspecified(), + close: crate::tt::TokenId::unspecified(), + }, token_trees: $crate::quote::IntoTt::to_tokens(children), } } @@ -28,10 +29,10 @@ macro_rules! __quote { ( @PUNCT $first:literal ) => { { vec![ - tt::Leaf::Punct(tt::Punct { + crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), + spacing: crate::tt::Spacing::Alone, + span: crate::tt::TokenId::unspecified(), }).into() ] } @@ -40,15 +41,15 @@ macro_rules! __quote { ( @PUNCT $first:literal, $sec:literal ) => { { vec![ - tt::Leaf::Punct(tt::Punct { + crate::tt::Leaf::Punct(crate::tt::Punct { char: $first, - spacing: tt::Spacing::Joint, - id: tt::TokenId::unspecified(), + spacing: crate::tt::Spacing::Joint, + span: crate::tt::TokenId::unspecified(), }).into(), - tt::Leaf::Punct(tt::Punct { + crate::tt::Leaf::Punct(crate::tt::Punct { char: $sec, - spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), + spacing: crate::tt::Spacing::Alone, + span: crate::tt::TokenId::unspecified(), }).into() ] } @@ -67,7 +68,7 @@ macro_rules! __quote { ( ## $first:ident $($tail:tt)* ) => { { - let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::>(); + let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::>(); let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); tokens.append(&mut tail_tokens); tokens @@ -86,9 +87,9 @@ macro_rules! __quote { // Ident ( $tt:ident ) => { vec![ { - tt::Leaf::Ident(tt::Ident { + crate::tt::Leaf::Ident(crate::tt::Ident { text: stringify!($tt).into(), - id: tt::TokenId::unspecified(), + span: crate::tt::TokenId::unspecified(), }).into() }] }; @@ -127,42 +128,42 @@ macro_rules! quote { } pub(crate) trait IntoTt { - fn to_subtree(self) -> tt::Subtree; - fn to_tokens(self) -> Vec; + fn to_subtree(self) -> crate::tt::Subtree; + fn to_tokens(self) -> Vec; } -impl IntoTt for Vec { - fn to_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: None, token_trees: self } +impl IntoTt for Vec { + fn to_subtree(self) -> crate::tt::Subtree { + crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self } } - fn to_tokens(self) -> Vec { + fn to_tokens(self) -> Vec { self } } -impl IntoTt for tt::Subtree { - fn to_subtree(self) -> tt::Subtree { +impl IntoTt for crate::tt::Subtree { + fn to_subtree(self) -> crate::tt::Subtree { self } - fn to_tokens(self) -> Vec { - vec![tt::TokenTree::Subtree(self)] + fn to_tokens(self) -> Vec { + vec![crate::tt::TokenTree::Subtree(self)] } } pub(crate) trait ToTokenTree { - fn to_token(self) -> tt::TokenTree; + fn to_token(self) -> crate::tt::TokenTree; } -impl ToTokenTree for tt::TokenTree { - fn to_token(self) -> tt::TokenTree { +impl ToTokenTree for crate::tt::TokenTree { + fn to_token(self) -> crate::tt::TokenTree { self } } -impl ToTokenTree for tt::Subtree { - fn to_token(self) -> tt::TokenTree { +impl ToTokenTree for crate::tt::Subtree { + fn to_token(self) -> crate::tt::TokenTree { self.into() } } @@ -171,15 +172,15 @@ macro_rules! impl_to_to_tokentrees { ($($ty:ty => $this:ident $im:block);*) => { $( impl ToTokenTree for $ty { - fn to_token($this) -> tt::TokenTree { - let leaf: tt::Leaf = $im.into(); + fn to_token($this) -> crate::tt::TokenTree { + let leaf: crate::tt::Leaf = $im.into(); leaf.into() } } impl ToTokenTree for &$ty { - fn to_token($this) -> tt::TokenTree { - let leaf: tt::Leaf = $im.clone().into(); + fn to_token($this) -> crate::tt::TokenTree { + let leaf: crate::tt::Leaf = $im.clone().into(); leaf.into() } } @@ -188,16 +189,16 @@ macro_rules! impl_to_to_tokentrees { } impl_to_to_tokentrees! { - u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} }; - usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} }; - i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} }; - bool => self { tt::Ident{text: self.to_string().into(), id: tt::TokenId::unspecified()} }; - tt::Leaf => self { self }; - tt::Literal => self { self }; - tt::Ident => self { self }; - tt::Punct => self { self }; - &str => self { tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), id: tt::TokenId::unspecified()}}; - String => self { tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), id: tt::TokenId::unspecified()}} + u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; + usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; + i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; + bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; + crate::tt::Leaf => self { self }; + crate::tt::Literal => self { self }; + crate::tt::Ident => self { self }; + crate::tt::Punct => self { self }; + &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}; + String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}} } #[cfg(test)] @@ -223,8 +224,8 @@ mod tests { assert_eq!(quote!(#s).to_string(), "\"hello\""); } - fn mk_ident(name: &str) -> tt::Ident { - tt::Ident { text: name.into(), id: tt::TokenId::unspecified() } + fn mk_ident(name: &str) -> crate::tt::Ident { + crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() } } #[test] @@ -234,7 +235,7 @@ mod tests { let quoted = quote!(#a); assert_eq!(quoted.to_string(), "hello"); let t = format!("{quoted:?}"); - assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295"); + assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295"); } #[test] @@ -263,11 +264,12 @@ mod tests { let fields = [mk_ident("name"), mk_ident("id")]; let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees); - let list = tt::Subtree { - delimiter: Some(tt::Delimiter { - kind: tt::DelimiterKind::Brace, - id: tt::TokenId::unspecified(), - }), + let list = crate::tt::Subtree { + delimiter: crate::tt::Delimiter { + kind: crate::tt::DelimiterKind::Brace, + open: crate::tt::TokenId::unspecified(), + close: crate::tt::TokenId::unspecified(), + }, token_trees: fields.collect(), }; diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index 0fee6dfe43336..894355fcbc9b3 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ parser::{MetaVarKind, Op, RepeatKind, Separator}, - syntax_node_to_token_tree, DeclarativeMacro, + syntax_node_to_token_tree, tt, DeclarativeMacro, }; #[test] @@ -91,7 +91,14 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri // So we just skip any error cases and try again let mut try_cnt = 0; loop { - let mut subtree = tt::Subtree::default(); + let mut subtree = tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId::UNSPECIFIED, + close: tt::TokenId::UNSPECIFIED, + kind: tt::DelimiterKind::Invisible, + }, + token_trees: vec![], + }; for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } @@ -196,12 +203,15 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri *seed } fn make_ident(ident: &str) -> tt::TokenTree { - tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) }) - .into() + tt::Leaf::Ident(tt::Ident { + span: tt::TokenId::unspecified(), + text: SmolStr::new(ident), + }) + .into() } fn make_punct(char: char) -> tt::TokenTree { tt::Leaf::Punct(tt::Punct { - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), char, spacing: tt::Spacing::Alone, }) @@ -209,7 +219,7 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri } fn make_literal(lit: &str) -> tt::TokenTree { tt::Leaf::Literal(tt::Literal { - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), text: SmolStr::new(lit), }) .into() @@ -219,7 +229,11 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri token_trees: Option>, ) -> tt::TokenTree { tt::Subtree { - delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }), + delimiter: tt::Delimiter { + open: tt::TokenId::unspecified(), + close: tt::TokenId::unspecified(), + kind, + }, token_trees: token_trees.unwrap_or_default(), } .into() diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 100ec6bfb93ac..7537dc3226149 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -8,7 +8,7 @@ mod transcriber; use rustc_hash::FxHashMap; use syntax::SmolStr; -use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; +use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult}; pub(crate) fn expand_rules( rules: &[crate::Rule], @@ -45,7 +45,10 @@ pub(crate) fn expand_rules( transcriber::transcribe(&rule.rhs, &match_.bindings); ExpandResult { value, err: match_.err.or(transcribe_err) } } else { - ExpandResult::only_err(ExpandError::NoMatchingRule) + ExpandResult::with_err( + tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, + ExpandError::NoMatchingRule, + ) } } diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 88eae136f7329..f4ea9e5c81658 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -67,6 +67,7 @@ use syntax::SmolStr; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, + tt, tt_iter::TtIter, ExpandError, MetaTemplate, ValueResult, }; @@ -75,7 +76,8 @@ impl Bindings { fn push_optional(&mut self, name: &SmolStr) { // FIXME: Do we have a better way to represent an empty token ? // Insert an empty subtree for empty token - let tt = tt::Subtree::default().into(); + let tt = + tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into(); self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); } @@ -462,9 +464,9 @@ fn match_loop_inner<'t>( } OpDelimited::Op(Op::Subtree { tokens, delimiter }) => { if let Ok(subtree) = src.clone().expect_subtree() { - if subtree.delimiter_kind() == delimiter.map(|it| it.kind) { + if subtree.delimiter.kind == delimiter.kind { item.stack.push(item.dot); - item.dot = tokens.iter_delimited(delimiter.as_ref()); + item.dot = tokens.iter_delimited(Some(delimiter)); cur_items.push(item); } } @@ -663,8 +665,8 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match { } res.add_err(ExpandError::LeftoverTokens); - if let Some(error_reover_item) = error_recover_item { - res.bindings = bindings_builder.build(&error_reover_item); + if let Some(error_recover_item) = error_recover_item { + res.bindings = bindings_builder.build(&error_recover_item); } return res; } @@ -782,7 +784,7 @@ fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult lit.into(), Some(neg) => tt::TokenTree::Subtree(tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::unspecified(), token_trees: vec![neg, lit.into()], }), } @@ -810,7 +812,11 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) } impl MetaTemplate { fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> { - OpDelimitedIter { inner: &self.0, idx: 0, delimited } + OpDelimitedIter { + inner: &self.0, + idx: 0, + delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED), + } } } @@ -824,20 +830,21 @@ enum OpDelimited<'a> { #[derive(Debug, Clone, Copy)] struct OpDelimitedIter<'a> { inner: &'a [Op], - delimited: Option<&'a tt::Delimiter>, + delimited: &'a tt::Delimiter, idx: usize, } impl<'a> OpDelimitedIter<'a> { fn is_eof(&self) -> bool { - let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 }; + let len = self.inner.len() + + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 }; self.idx >= len } fn peek(&self) -> Option> { - match self.delimited { - None => self.inner.get(self.idx).map(OpDelimited::Op), - Some(_) => match self.idx { + match self.delimited.kind { + tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op), + _ => match self.idx { 0 => Some(OpDelimited::Open), i if i == self.inner.len() + 1 => Some(OpDelimited::Close), i => self.inner.get(i - 1).map(OpDelimited::Op), @@ -860,7 +867,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } fn size_hint(&self) -> (usize, Option) { - let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 }; + let len = self.inner.len() + + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 }; let remain = len.saturating_sub(self.idx); (remain, Some(remain)) } @@ -904,7 +912,10 @@ impl<'a> TtIter<'a> { } else { let puncts = self.expect_glued_punct()?; let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); - Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter: None, token_trees })) + Ok(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::unspecified(), + token_trees, + })) } } else { self.next().ok_or(()).cloned() @@ -919,7 +930,7 @@ impl<'a> TtIter<'a> { let ident = self.expect_ident_or_underscore()?; Ok(tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::unspecified(), token_trees: vec![ tt::Leaf::Punct(*punct).into(), tt::Leaf::Ident(ident.clone()).into(), diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index c956770896260..dffb40d4bc886 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -2,11 +2,11 @@ //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}` use syntax::SmolStr; -use tt::{Delimiter, Subtree}; use crate::{ expander::{Binding, Bindings, Fragment}, parser::{MetaVarKind, Op, RepeatKind, Separator}, + tt::{self, Delimiter}, ExpandError, ExpandResult, MetaTemplate, }; @@ -44,22 +44,23 @@ impl Bindings { Binding::Missing(it) => Ok(match it { MetaVarKind::Stmt => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), char: ';', spacing: tt::Spacing::Alone, }))) } MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), + delimiter: tt::Delimiter { + open: tt::TokenId::unspecified(), + close: tt::TokenId::unspecified(), kind: tt::DelimiterKind::Brace, - }), + }, token_trees: vec![], })), // FIXME: Meta and Item should get proper defaults MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![], })) } @@ -71,19 +72,19 @@ impl Bindings { | MetaVarKind::Ident => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: SmolStr::new_inline("missing"), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))) } MetaVarKind::Lifetime => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: SmolStr::new_inline("'missing"), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))) } MetaVarKind::Literal => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: SmolStr::new_inline("\"missing\""), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))) } }), @@ -143,7 +144,7 @@ fn expand_subtree( } Op::Subtree { tokens, delimiter } => { let ExpandResult { value: tt, err: e } = - expand_subtree(ctx, tokens, *delimiter, arena); + expand_subtree(ctx, tokens, Some(*delimiter), arena); err = err.or(e); arena.push(tt.into()); } @@ -170,7 +171,7 @@ fn expand_subtree( arena.push( tt::Leaf::Literal(tt::Literal { text: index.to_string().into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }) .into(), ); @@ -179,7 +180,13 @@ fn expand_subtree( } // drain the elements added in this instance of expand_subtree let tts = arena.drain(start_elements..).collect(); - ExpandResult { value: tt::Subtree { delimiter, token_trees: tts }, err } + ExpandResult { + value: tt::Subtree { + delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified), + token_trees: tts, + }, + err, + } } fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult { @@ -201,17 +208,24 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe // ``` // We just treat it a normal tokens let tt = tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![ - tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, id }).into(), - tt::Leaf::from(tt::Ident { text: v.clone(), id }).into(), + tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) + .into(), + tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(), ], } .into(); ExpandResult::ok(Fragment::Tokens(tt)) } else { ctx.bindings.get(v, &mut ctx.nesting).map_or_else( - |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, + |e| ExpandResult { + value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::unspecified(), + token_trees: vec![], + })), + err: Some(e), + }, ExpandResult::ok, ) } @@ -249,7 +263,10 @@ fn expand_repeat( ctx ); return ExpandResult { - value: Fragment::Tokens(Subtree::default().into()), + value: Fragment::Tokens( + tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] } + .into(), + ), err: Some(ExpandError::LimitExceeded), }; } @@ -258,7 +275,7 @@ fn expand_repeat( continue; } - t.delimiter = None; + t.delimiter = tt::Delimiter::unspecified(); push_subtree(&mut buf, t); if let Some(sep) = separator { @@ -292,7 +309,7 @@ fn expand_repeat( // Check if it is a single token subtree without any delimiter // e.g {Delimiter:None> ['>'] /Delimiter:None>} - let tt = tt::Subtree { delimiter: None, token_trees: buf }.into(); + let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into(); if RepeatKind::OneOrMore == kind && counter == 0 { return ExpandResult { @@ -307,11 +324,12 @@ fn push_fragment(buf: &mut Vec, fragment: Fragment) { match fragment { Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => { - if tt.delimiter.is_none() { - tt.delimiter = Some(tt::Delimiter { - id: tt::TokenId::unspecified(), + if tt.delimiter.kind == tt::DelimiterKind::Invisible { + tt.delimiter = tt::Delimiter { + open: tt::TokenId::UNSPECIFIED, + close: tt::TokenId::UNSPECIFIED, kind: tt::DelimiterKind::Parenthesis, - }) + }; } buf.push(tt.into()) } @@ -320,8 +338,8 @@ fn push_fragment(buf: &mut Vec, fragment: Fragment) { } fn push_subtree(buf: &mut Vec, tt: tt::Subtree) { - match tt.delimiter { - None => buf.extend(tt.token_trees), - Some(_) => buf.push(tt.into()), + match tt.delimiter.kind { + tt::DelimiterKind::Invisible => buf.extend(tt.token_trees), + _ => buf.push(tt.into()), } } diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 2373db97a3e41..ac107a0d6d6d1 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -18,6 +18,8 @@ mod to_parser_input; mod benchmark; mod token_map; +use ::tt::token_id as tt; + use std::fmt; use crate::{ @@ -26,8 +28,8 @@ use crate::{ }; // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces +pub use self::tt::{Delimiter, DelimiterKind, Punct}; pub use ::parser::TopEntryPoint; -pub use tt::{Delimiter, DelimiterKind, Punct}; pub use crate::{ syntax_bridge::{ @@ -125,24 +127,26 @@ impl Shift { // Find the max token id inside a subtree fn max_id(subtree: &tt::Subtree) -> Option { - let filter = |tt: &_| match tt { - tt::TokenTree::Subtree(subtree) => { - let tree_id = max_id(subtree); - match subtree.delimiter { - Some(it) if it.id != tt::TokenId::unspecified() => { - Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0))) + let filter = + |tt: &_| match tt { + tt::TokenTree::Subtree(subtree) => { + let tree_id = max_id(subtree); + if subtree.delimiter.open != tt::TokenId::unspecified() { + Some(tree_id.map_or(subtree.delimiter.open.0, |t| { + t.max(subtree.delimiter.open.0) + })) + } else { + tree_id } - _ => tree_id, } - } - tt::TokenTree::Leaf(leaf) => { - let &(tt::Leaf::Ident(tt::Ident { id, .. }) - | tt::Leaf::Punct(tt::Punct { id, .. }) - | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf; + tt::TokenTree::Leaf(leaf) => { + let &(tt::Leaf::Ident(tt::Ident { span, .. }) + | tt::Leaf::Punct(tt::Punct { span, .. }) + | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf; - (id != tt::TokenId::unspecified()).then_some(id.0) - } - }; + (span != tt::TokenId::unspecified()).then_some(span.0) + } + }; subtree.token_trees.iter().filter_map(filter).max() } } @@ -152,14 +156,13 @@ impl Shift { for t in &mut tt.token_trees { match t { tt::TokenTree::Leaf( - tt::Leaf::Ident(tt::Ident { id, .. }) - | tt::Leaf::Punct(tt::Punct { id, .. }) - | tt::Leaf::Literal(tt::Literal { id, .. }), - ) => *id = self.shift(*id), + tt::Leaf::Ident(tt::Ident { span, .. }) + | tt::Leaf::Punct(tt::Punct { span, .. }) + | tt::Leaf::Literal(tt::Literal { span, .. }), + ) => *span = self.shift(*span), tt::TokenTree::Subtree(tt) => { - if let Some(it) = tt.delimiter.as_mut() { - it.id = self.shift(it.id); - } + tt.delimiter.open = self.shift(tt.delimiter.open); + tt.delimiter.close = self.shift(tt.delimiter.close); self.shift_all(tt) } } @@ -216,7 +219,7 @@ impl DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); - if Some(tt::DelimiterKind::Brace) == tt.delimiter_kind() { + if tt::DelimiterKind::Brace == tt.delimiter.kind { cov_mark::hit!(parse_macro_def_rules); while src.len() > 0 { let rule = Rule::parse(&mut src, true)?; @@ -325,6 +328,10 @@ impl ValueResult { Self { value, err: None } } + pub fn with_err(value: T, err: E) -> Self { + Self { value, err: Some(err) } + } + pub fn only_err(err: E) -> Self where T: Default, diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index 875dc1ad87bd0..fd3d64719ac9a 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -4,7 +4,7 @@ use smallvec::{smallvec, SmallVec}; use syntax::SmolStr; -use crate::{tt_iter::TtIter, ParseError}; +use crate::{tt, tt_iter::TtIter, ParseError}; /// Consider /// @@ -54,7 +54,7 @@ pub(crate) enum Op { Ignore { name: SmolStr, id: tt::TokenId }, Index { depth: u32 }, Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option }, - Subtree { tokens: MetaTemplate, delimiter: Option }, + Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter }, Literal(tt::Literal), Punct(SmallVec<[tt::Punct; 3]>), Ident(tt::Ident), @@ -130,13 +130,13 @@ fn next_op( Some(it) => it, }; match second { - tt::TokenTree::Subtree(subtree) => match subtree.delimiter_kind() { - Some(tt::DelimiterKind::Parenthesis) => { + tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind { + tt::DelimiterKind::Parenthesis => { let (separator, kind) = parse_repeat(src)?; let tokens = MetaTemplate::parse(subtree, mode)?; Op::Repeat { tokens, separator, kind } } - Some(tt::DelimiterKind::Brace) => match mode { + tt::DelimiterKind::Brace => match mode { Mode::Template => { parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| { ParseError::unexpected("invalid metavariable expression") @@ -157,18 +157,18 @@ fn next_op( tt::TokenTree::Leaf(leaf) => match leaf { tt::Leaf::Ident(ident) if ident.text == "crate" => { // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path. - Op::Ident(tt::Ident { text: "$crate".into(), id: ident.id }) + Op::Ident(tt::Ident { text: "$crate".into(), span: ident.span }) } tt::Leaf::Ident(ident) => { let kind = eat_fragment_kind(src, mode)?; let name = ident.text.clone(); - let id = ident.id; + let id = ident.span; Op::Var { name, kind, id } } tt::Leaf::Literal(lit) if is_boolean_literal(lit) => { let kind = eat_fragment_kind(src, mode)?; let name = lit.text.clone(); - let id = lit.id; + let id = lit.span; Op::Var { name, kind, id } } tt::Leaf::Punct(punct @ tt::Punct { char: '$', .. }) => match mode { @@ -284,7 +284,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result { let func = src.expect_ident()?; let args = src.expect_subtree()?; - if args.delimiter_kind() != Some(tt::DelimiterKind::Parenthesis) { + if args.delimiter.kind != tt::DelimiterKind::Parenthesis { return Err(()); } @@ -293,7 +293,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result { let op = match &*func.text { "ignore" => { let ident = args.expect_ident()?; - Op::Ignore { name: ident.text.clone(), id: ident.id } + Op::Ignore { name: ident.text.clone(), id: ident.span } } "index" => { let depth = if args.len() == 0 { 0 } else { args.expect_u32_literal()? }; diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 5c965055634eb..fbf6b53006ad1 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -8,9 +8,16 @@ use syntax::{ SyntaxKind::*, SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; -use tt::buffer::{Cursor, TokenBuffer}; -use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; +use crate::{ + to_parser_input::to_parser_input, + tt::{ + self, + buffer::{Cursor, TokenBuffer}, + }, + tt_iter::TtIter, + TokenMap, +}; #[cfg(test)] mod tests; @@ -74,9 +81,10 @@ pub fn token_tree_to_syntax_node( entry_point: parser::TopEntryPoint, ) -> (Parse, TokenMap) { let buffer = match tt { - tt::Subtree { delimiter: None, token_trees } => { - TokenBuffer::from_tokens(token_trees.as_slice()) - } + tt::Subtree { + delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. }, + token_trees, + } => TokenBuffer::from_tokens(token_trees.as_slice()), _ => TokenBuffer::from_subtree(tt), }; let parser_input = to_parser_input(&buffer); @@ -92,8 +100,7 @@ pub fn token_tree_to_syntax_node( parser::Step::Error { msg } => tree_sink.error(msg.to_string()), } } - let (parse, range_map) = tree_sink.finish(); - (parse, range_map) + tree_sink.finish() } /// Convert a string to a `TokenTree` @@ -132,7 +139,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { res.push(match expanded.value { None => break, Some(tt @ tt::TokenTree::Leaf(_)) => { - tt::Subtree { delimiter: None, token_trees: vec![tt] } + tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] } } Some(tt::TokenTree::Subtree(tt)) => tt, }); @@ -145,7 +152,10 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { } if iter.peek_n(0).is_some() { - res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() }); + res.push(tt::Subtree { + delimiter: tt::Delimiter::unspecified(), + token_trees: iter.cloned().collect(), + }); } res @@ -159,7 +169,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { } let entry = StackEntry { - subtree: tt::Subtree { delimiter: None, ..Default::default() }, + subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, // never used (delimiter is `None`) idx: !0, open_range: TextRange::empty(TextSize::of('.')), @@ -186,7 +196,7 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) = sub.token_trees.get_mut(2) { - lit.id = id + lit.span = id } } tt @@ -199,13 +209,14 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { assert_eq!(range.len(), TextSize::of('.')); } - if let Some(delim) = subtree.delimiter { - let expected = match delim.kind { - tt::DelimiterKind::Parenthesis => T![')'], - tt::DelimiterKind::Brace => T!['}'], - tt::DelimiterKind::Bracket => T![']'], - }; + let expected = match subtree.delimiter.kind { + tt::DelimiterKind::Parenthesis => Some(T![')']), + tt::DelimiterKind::Brace => Some(T!['}']), + tt::DelimiterKind::Bracket => Some(T![']']), + tt::DelimiterKind::Invisible => None, + }; + if let Some(expected) = expected { if kind == expected { if let Some(entry) = stack.pop() { conv.id_alloc().close_delim(entry.idx, Some(range)); @@ -223,9 +234,11 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { }; if let Some(kind) = delim { - let mut subtree = tt::Subtree::default(); let (id, idx) = conv.id_alloc().open_delim(range, synth_id); - subtree.delimiter = Some(tt::Delimiter { id, kind }); + let subtree = tt::Subtree { + delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind }, + token_trees: vec![], + }; stack.push(StackEntry { subtree, idx, open_range: range }); continue; } @@ -240,13 +253,20 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { panic!("Token from lexer must be single char: token = {token:#?}"); } }; - tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) }) - .into() + tt::Leaf::from(tt::Punct { + char, + spacing, + span: conv.id_alloc().alloc(range, synth_id), + }) + .into() } else { macro_rules! make_leaf { ($i:ident) => { - tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) } - .into() + tt::$i { + span: conv.id_alloc().alloc(range, synth_id), + text: token.to_text(conv), + } + .into() }; } let leaf: tt::Leaf = match kind { @@ -261,14 +281,14 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { let apostrophe = tt::Leaf::from(tt::Punct { char: '\'', spacing: tt::Spacing::Joint, - id: conv.id_alloc().alloc(r, synth_id), + span: conv.id_alloc().alloc(r, synth_id), }); result.push(apostrophe.into()); let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); let ident = tt::Leaf::from(tt::Ident { text: SmolStr::new(&token.to_text(conv)[1..]), - id: conv.id_alloc().alloc(r, synth_id), + span: conv.id_alloc().alloc(r, synth_id), }); result.push(ident.into()); continue; @@ -289,11 +309,12 @@ fn convert_tokens(conv: &mut C) -> tt::Subtree { conv.id_alloc().close_delim(entry.idx, None); let leaf: tt::Leaf = tt::Punct { - id: conv.id_alloc().alloc(entry.open_range, None), - char: match entry.subtree.delimiter.unwrap().kind { + span: conv.id_alloc().alloc(entry.open_range, None), + char: match entry.subtree.delimiter.kind { tt::DelimiterKind::Parenthesis => '(', tt::DelimiterKind::Brace => '{', tt::DelimiterKind::Bracket => '[', + tt::DelimiterKind::Invisible => '$', }, spacing: tt::Spacing::Alone, } @@ -373,10 +394,11 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option token_trees.push(mk_punct('!')); } token_trees.push(tt::TokenTree::from(tt::Subtree { - delimiter: Some(tt::Delimiter { + delimiter: tt::Delimiter { + open: tt::TokenId::UNSPECIFIED, + close: tt::TokenId::UNSPECIFIED, kind: tt::DelimiterKind::Bracket, - id: tt::TokenId::unspecified(), - }), + }, token_trees: meta_tkns, })); @@ -386,7 +408,7 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option fn mk_ident(s: &str) -> tt::TokenTree { tt::TokenTree::from(tt::Leaf::from(tt::Ident { text: s.into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })) } @@ -394,12 +416,12 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone, - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })) } fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree { - let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() }; + let lit = tt::Literal { text: doc_comment_text(comment), span: tt::TokenId::unspecified() }; tt::TokenTree::from(tt::Leaf::from(lit)) } @@ -761,15 +783,16 @@ impl<'a> TtTreeSink<'a> { } } -fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str { +fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { let texts = match d { tt::DelimiterKind::Parenthesis => "()", tt::DelimiterKind::Brace => "{}", tt::DelimiterKind::Bracket => "[]", + tt::DelimiterKind::Invisible => return None, }; let idx = closing as usize; - &texts[idx..texts.len() - (1 - idx)] + Some(&texts[idx..texts.len() - (1 - idx)]) } impl<'a> TtTreeSink<'a> { @@ -790,13 +813,16 @@ impl<'a> TtTreeSink<'a> { Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { // Mark the range if needed let (text, id) = match leaf { - tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id), + tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span), tt::Leaf::Punct(punct) => { assert!(punct.char.is_ascii()); tmp = punct.char as u8; - (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id) + ( + std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), + punct.span, + ) } - tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id), + tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span), }; let range = TextRange::at(self.text_pos, TextSize::of(text)); self.token_map.insert(id, range); @@ -805,10 +831,10 @@ impl<'a> TtTreeSink<'a> { } Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { self.cursor = self.cursor.subtree().unwrap(); - match subtree.delimiter { - Some(d) => { - self.open_delims.insert(d.id, self.text_pos); - delim_to_str(d.kind, false) + match delim_to_str(subtree.delimiter.kind, false) { + Some(it) => { + self.open_delims.insert(subtree.delimiter.open, self.text_pos); + it } None => continue, } @@ -816,15 +842,21 @@ impl<'a> TtTreeSink<'a> { None => { let parent = self.cursor.end().unwrap(); self.cursor = self.cursor.bump(); - match parent.delimiter { - Some(d) => { - if let Some(open_delim) = self.open_delims.get(&d.id) { + match delim_to_str(parent.delimiter.kind, true) { + Some(it) => { + if let Some(open_delim) = + self.open_delims.get(&parent.delimiter.open) + { let open_range = TextRange::at(*open_delim, TextSize::of('(')); let close_range = TextRange::at(self.text_pos, TextSize::of('(')); - self.token_map.insert_delim(d.id, open_range, close_range); + self.token_map.insert_delim( + parent.delimiter.open, + open_range, + close_range, + ); } - delim_to_str(d.kind, true) + it } None => continue, } diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs index c1a6083655822..fa0125f3e9e04 100644 --- a/crates/mbe/src/syntax_bridge/tests.rs +++ b/crates/mbe/src/syntax_bridge/tests.rs @@ -29,8 +29,8 @@ fn check_punct_spacing(fixture: &str) { let mut cursor = buf.begin(); while !cursor.eof() { while let Some(token_tree) = cursor.token_tree() { - if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree { - if let Some(expected) = annotations.remove(id) { + if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree { + if let Some(expected) = annotations.remove(span) { assert_eq!(expected, *spacing); } } diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 7013aa58b55dc..d4c19b3ab8cd1 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -2,7 +2,8 @@ //! format that works for our parser. use syntax::{SyntaxKind, SyntaxKind::*, T}; -use tt::buffer::TokenBuffer; + +use crate::tt::buffer::TokenBuffer; pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { let mut res = parser::Input::default(); @@ -70,23 +71,25 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { cursor.bump() } Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => { - if let Some(d) = subtree.delimiter_kind() { - res.push(match d { - tt::DelimiterKind::Parenthesis => T!['('], - tt::DelimiterKind::Brace => T!['{'], - tt::DelimiterKind::Bracket => T!['['], - }); + if let Some(kind) = match subtree.delimiter.kind { + tt::DelimiterKind::Parenthesis => Some(T!['(']), + tt::DelimiterKind::Brace => Some(T!['{']), + tt::DelimiterKind::Bracket => Some(T!['[']), + tt::DelimiterKind::Invisible => None, + } { + res.push(kind); } cursor.subtree().unwrap() } None => match cursor.end() { Some(subtree) => { - if let Some(d) = subtree.delimiter_kind() { - res.push(match d { - tt::DelimiterKind::Parenthesis => T![')'], - tt::DelimiterKind::Brace => T!['}'], - tt::DelimiterKind::Bracket => T![']'], - }) + if let Some(kind) = match subtree.delimiter.kind { + tt::DelimiterKind::Parenthesis => Some(T![')']), + tt::DelimiterKind::Brace => Some(T!['}']), + tt::DelimiterKind::Bracket => Some(T![']']), + tt::DelimiterKind::Invisible => None, + } { + res.push(kind); } cursor.bump() } diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index 7787c74da8837..e5f6b1372209f 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -3,9 +3,8 @@ use smallvec::{smallvec, SmallVec}; use syntax::SyntaxKind; -use tt::buffer::TokenBuffer; -use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult}; +use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult}; #[derive(Debug, Clone)] pub(crate) struct TtIter<'a> { @@ -135,7 +134,7 @@ impl<'a> TtIter<'a> { &mut self, entry_point: parser::PrefixEntryPoint, ) -> ExpandResult> { - let buffer = TokenBuffer::from_tokens(self.inner.as_slice()); + let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice()); let parser_input = to_parser_input(&buffer); let tree_traversal = entry_point.parse(&parser_input); @@ -178,7 +177,7 @@ impl<'a> TtIter<'a> { 1 => Some(res[0].cloned()), 0 => None, _ => Some(tt::TokenTree::Subtree(tt::Subtree { - delimiter: None, + delimiter: tt::Delimiter::unspecified(), token_trees: res.into_iter().map(|it| it.cloned()).collect(), })), }; diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 52f976e4576af..bb381c4d44e9d 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -19,7 +19,8 @@ use std::{ }; use serde::{Deserialize, Serialize}; -use tt::Subtree; + +use ::tt::token_id as tt; use crate::{ msg::{ExpandMacro, FlatTree, PanicMessage}, @@ -151,10 +152,10 @@ impl ProcMacro { pub fn expand( &self, - subtree: &Subtree, - attr: Option<&Subtree>, + subtree: &tt::Subtree, + attr: Option<&tt::Subtree>, env: Vec<(String, String)>, - ) -> Result, ServerError> { + ) -> Result, ServerError> { let current_dir = env .iter() .find(|(name, _)| name == "CARGO_MANIFEST_DIR") diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index f9c2b9fda3aee..9b7bcaeffe7f7 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -107,27 +107,31 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { #[cfg(test)] mod tests { use super::*; - use tt::*; + use crate::tt::*; fn fixture_token_tree() -> Subtree { - let mut subtree = Subtree::default(); + let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() }; subtree .token_trees - .push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into())); + .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into())); subtree .token_trees - .push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into())); + .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into())); subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - id: TokenId::unspecified(), + span: TokenId::unspecified(), }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - id: TokenId::unspecified(), + span: TokenId::unspecified(), spacing: Spacing::Joint, }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { - delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }), + delimiter: Delimiter { + open: TokenId(2), + close: TokenId::UNSPECIFIED, + kind: DelimiterKind::Brace, + }, token_trees: vec![], })); subtree diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index b178c46263e0a..fd3202e0b284c 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -38,7 +38,8 @@ use std::collections::{HashMap, VecDeque}; use serde::{Deserialize, Serialize}; -use tt::TokenId; + +use crate::tt::{self, TokenId}; #[derive(Serialize, Deserialize, Debug)] pub struct FlatTree { @@ -52,7 +53,7 @@ pub struct FlatTree { struct SubtreeRepr { id: tt::TokenId, - kind: Option, + kind: tt::DelimiterKind, tt: [u32; 2], } @@ -124,19 +125,19 @@ impl FlatTree { impl SubtreeRepr { fn write(self) -> [u32; 4] { let kind = match self.kind { - None => 0, - Some(tt::DelimiterKind::Parenthesis) => 1, - Some(tt::DelimiterKind::Brace) => 2, - Some(tt::DelimiterKind::Bracket) => 3, + tt::DelimiterKind::Invisible => 0, + tt::DelimiterKind::Parenthesis => 1, + tt::DelimiterKind::Brace => 2, + tt::DelimiterKind::Bracket => 3, }; [self.id.0, kind, self.tt[0], self.tt[1]] } fn read([id, kind, lo, len]: [u32; 4]) -> SubtreeRepr { let kind = match kind { - 0 => None, - 1 => Some(tt::DelimiterKind::Parenthesis), - 2 => Some(tt::DelimiterKind::Brace), - 3 => Some(tt::DelimiterKind::Bracket), + 0 => tt::DelimiterKind::Invisible, + 1 => tt::DelimiterKind::Parenthesis, + 2 => tt::DelimiterKind::Brace, + 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] } @@ -216,7 +217,7 @@ impl<'a> Writer<'a> { tt::Leaf::Literal(lit) => { let idx = self.literal.len() as u32; let text = self.intern(&lit.text); - self.literal.push(LiteralRepr { id: lit.id, text }); + self.literal.push(LiteralRepr { id: lit.span, text }); idx << 2 | 0b01 } tt::Leaf::Punct(punct) => { @@ -224,14 +225,14 @@ impl<'a> Writer<'a> { self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, - id: punct.id, + id: punct.span, }); idx << 2 | 0b10 } tt::Leaf::Ident(ident) => { let idx = self.ident.len() as u32; let text = self.intern(&ident.text); - self.ident.push(IdentRepr { id: ident.id, text }); + self.ident.push(IdentRepr { id: ident.span, text }); idx << 2 | 0b11 } }, @@ -243,8 +244,8 @@ impl<'a> Writer<'a> { fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { let idx = self.subtree.len(); - let delimiter_id = subtree.delimiter.map_or(TokenId::unspecified(), |it| it.id); - let delimiter_kind = subtree.delimiter.map(|it| it.kind); + let delimiter_id = subtree.delimiter.open; + let delimiter_kind = subtree.delimiter.kind; self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] }); self.work.push_back((idx, subtree)); idx as u32 @@ -276,7 +277,11 @@ impl Reader { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; let s = tt::Subtree { - delimiter: repr.kind.map(|kind| tt::Delimiter { id: repr.id, kind }), + delimiter: tt::Delimiter { + open: repr.id, + close: TokenId::UNSPECIFIED, + kind: repr.kind, + }, token_trees: token_trees .iter() .copied() @@ -291,7 +296,7 @@ impl Reader { let repr = &self.literal[idx]; tt::Leaf::Literal(tt::Literal { text: self.text[repr.text as usize].as_str().into(), - id: repr.id, + span: repr.id, }) .into() } @@ -300,7 +305,7 @@ impl Reader { tt::Leaf::Punct(tt::Punct { char: repr.char, spacing: repr.spacing, - id: repr.id, + span: repr.id, }) .into() } @@ -308,7 +313,7 @@ impl Reader { let repr = &self.ident[idx]; tt::Leaf::Ident(tt::Ident { text: self.text[repr.text as usize].as_str().into(), - id: repr.id, + span: repr.id, }) .into() } diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs b/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs index 76e89e3191a67..93805c89354a5 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs @@ -11,6 +11,7 @@ mod ra_server; use libloading::Library; use proc_macro_api::ProcMacroKind; +use super::tt; use super::PanicMessage; pub use ra_server::TokenStream; diff --git a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs index f82f20c37bc3d..30baf3a13f57f 100644 --- a/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs @@ -15,6 +15,8 @@ use std::hash::Hash; use std::ops::Bound; use std::{ascii, vec::IntoIter}; +use crate::tt; + type Group = tt::Subtree; type TokenTree = tt::TokenTree; type Punct = tt::Punct; @@ -33,7 +35,7 @@ impl TokenStream { } pub fn with_subtree(subtree: tt::Subtree) -> Self { - if subtree.delimiter.is_some() { + if subtree.delimiter.kind != tt::DelimiterKind::Invisible { TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } } else { TokenStream { token_trees: subtree.token_trees } @@ -41,7 +43,7 @@ impl TokenStream { } pub fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: None, token_trees: self.token_trees } + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees } } pub fn is_empty(&self) -> bool { @@ -84,7 +86,9 @@ impl Extend for TokenStream { for item in streams { for tkn in item { match tkn { - tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { + tt::TokenTree::Subtree(subtree) + if subtree.delimiter.kind == tt::DelimiterKind::Invisible => + { self.token_trees.extend(subtree.token_trees); } _ => { @@ -165,7 +169,7 @@ pub struct TokenStreamBuilder { pub mod token_stream { use std::str::FromStr; - use super::{TokenStream, TokenTree}; + use super::{tt, TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, @@ -202,15 +206,17 @@ pub mod token_stream { impl ToString for TokenStream { fn to_string(&self) -> String { - tt::pretty(&self.token_trees) + ::tt::pretty(&self.token_trees) } } fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { tt::Subtree { - delimiter: subtree - .delimiter - .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }), + delimiter: tt::Delimiter { + open: tt::TokenId::UNSPECIFIED, + close: tt::TokenId::UNSPECIFIED, + ..subtree.delimiter + }, token_trees: subtree .token_trees .into_iter() @@ -233,13 +239,13 @@ pub mod token_stream { fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { match leaf { tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit }) + tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit }) } tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct }) + tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct }) } tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident }) + tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident }) } } } @@ -389,22 +395,22 @@ impl server::TokenStream for RustAnalyzer { } } -fn delim_to_internal(d: bridge::Delimiter) -> Option { +fn delim_to_internal(d: bridge::Delimiter) -> tt::Delimiter { let kind = match d { bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, bridge::Delimiter::Brace => tt::DelimiterKind::Brace, bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket, - bridge::Delimiter::None => return None, + bridge::Delimiter::None => tt::DelimiterKind::Invisible, }; - Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) + tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind } } -fn delim_to_external(d: Option) -> bridge::Delimiter { - match d.map(|it| it.kind) { - Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis, - Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace, - Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket, - None => bridge::Delimiter::None, +fn delim_to_external(d: tt::Delimiter) -> bridge::Delimiter { + match d.kind { + tt::DelimiterKind::Parenthesis => bridge::Delimiter::Parenthesis, + tt::DelimiterKind::Brace => bridge::Delimiter::Brace, + tt::DelimiterKind::Bracket => bridge::Delimiter::Bracket, + tt::DelimiterKind::Invisible => bridge::Delimiter::None, } } @@ -443,23 +449,19 @@ impl server::Group for RustAnalyzer { } fn span(&mut self, group: &Self::Group) -> Self::Span { - group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified) + group.delimiter.open } fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) { - if let Some(delim) = &mut group.delimiter { - delim.id = span; - } + group.delimiter.open = span; } fn span_open(&mut self, group: &Self::Group) -> Self::Span { - // FIXME we only store one `TokenId` for the delimiters - group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified) + group.delimiter.open } fn span_close(&mut self, group: &Self::Group) -> Self::Span { - // FIXME we only store one `TokenId` for the delimiters - group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified) + group.delimiter.close } } @@ -468,7 +470,7 @@ impl server::Punct for RustAnalyzer { tt::Punct { char: ch, spacing: spacing_to_internal(spacing), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), } } fn as_char(&mut self, punct: Self::Punct) -> char { @@ -478,28 +480,27 @@ impl server::Punct for RustAnalyzer { spacing_to_external(punct.spacing) } fn span(&mut self, punct: Self::Punct) -> Self::Span { - punct.id + punct.span } fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct { - tt::Punct { id: span, ..punct } + tt::Punct { span: span, ..punct } } } impl server::Ident for RustAnalyzer { fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident { - IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw( - string.into(), + IdentId(self.ident_interner.intern(&IdentData(tt::Ident { + text: if is_raw { ::tt::SmolStr::from_iter(["r#", string]) } else { string.into() }, span, - is_raw, - )))) + }))) } fn span(&mut self, ident: Self::Ident) -> Self::Span { - self.ident_interner.get(ident.0).0.id + self.ident_interner.get(ident.0).0.span } fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident { let data = self.ident_interner.get(ident.0); - let new = IdentData(tt::Ident { id: span, ..data.0.clone() }); + let new = IdentData(tt::Ident { span: span, ..data.0.clone() }); IdentId(self.ident_interner.intern(&new)) } } @@ -511,7 +512,7 @@ impl server::Literal for RustAnalyzer { "".to_owned() } fn from_str(&mut self, s: &str) -> Result { - Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() }) + Ok(Literal { text: s.into(), span: tt::TokenId::unspecified() }) } fn symbol(&mut self, literal: &Self::Literal) -> String { literal.text.to_string() @@ -529,7 +530,7 @@ impl server::Literal for RustAnalyzer { Ok(n) => n.to_string(), Err(_) => n.parse::().unwrap().to_string(), }; - Literal { text: n.into(), id: tt::TokenId::unspecified() } + Literal { text: n.into(), span: tt::TokenId::unspecified() } } fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal { @@ -549,7 +550,7 @@ impl server::Literal for RustAnalyzer { let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize}; - Literal { text: text.into(), id: tt::TokenId::unspecified() } + Literal { text: text.into(), span: tt::TokenId::unspecified() } } fn float(&mut self, n: &str) -> Self::Literal { @@ -558,19 +559,19 @@ impl server::Literal for RustAnalyzer { if !text.contains('.') { text += ".0" } - Literal { text: text.into(), id: tt::TokenId::unspecified() } + Literal { text: text.into(), span: tt::TokenId::unspecified() } } fn f32(&mut self, n: &str) -> Self::Literal { let n: f32 = n.parse().unwrap(); let text = format!("{n}f32"); - Literal { text: text.into(), id: tt::TokenId::unspecified() } + Literal { text: text.into(), span: tt::TokenId::unspecified() } } fn f64(&mut self, n: &str) -> Self::Literal { let n: f64 = n.parse().unwrap(); let text = format!("{n}f64"); - Literal { text: text.into(), id: tt::TokenId::unspecified() } + Literal { text: text.into(), span: tt::TokenId::unspecified() } } fn string(&mut self, string: &str) -> Self::Literal { @@ -578,11 +579,11 @@ impl server::Literal for RustAnalyzer { for ch in string.chars() { escaped.extend(ch.escape_debug()); } - Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() } + Literal { text: format!("\"{escaped}\"").into(), span: tt::TokenId::unspecified() } } fn character(&mut self, ch: char) -> Self::Literal { - Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() } + Literal { text: format!("'{ch}'").into(), span: tt::TokenId::unspecified() } } fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal { @@ -593,15 +594,15 @@ impl server::Literal for RustAnalyzer { .map(Into::::into) .collect::(); - Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() } + Literal { text: format!("b\"{string}\"").into(), span: tt::TokenId::unspecified() } } fn span(&mut self, literal: &Self::Literal) -> Self::Span { - literal.id + literal.span } fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) { - literal.id = span; + literal.span = span; } fn subspan( @@ -784,17 +785,18 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "struct".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "T".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })), tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), + delimiter: tt::Delimiter { + open: tt::TokenId::unspecified(), + close: tt::TokenId::unspecified(), kind: tt::DelimiterKind::Brace, - }), + }, token_trees: vec![], }), ], @@ -807,13 +809,14 @@ mod tests { fn test_ra_server_from_str() { use std::str::FromStr; let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), + delimiter: tt::Delimiter { + open: tt::TokenId::unspecified(), + close: tt::TokenId::unspecified(), kind: tt::DelimiterKind::Parenthesis, - }), + }, token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))], }); @@ -830,7 +833,7 @@ mod tests { underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "_".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })) ); } diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs index 243972b04997c..0a3b8866a7fd5 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs @@ -9,7 +9,7 @@ mod ra_server; use libloading::Library; use proc_macro_api::ProcMacroKind; -use super::PanicMessage; +use super::{tt, PanicMessage}; pub use ra_server::TokenStream; diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs index 068f79f824dca..d258a02472909 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs @@ -22,6 +22,8 @@ pub use symbol::*; use std::ops::Bound; +use crate::tt; + type Group = tt::Subtree; type TokenTree = tt::TokenTree; type Punct = tt::Punct; @@ -108,8 +110,9 @@ impl server::TokenStream for RustAnalyzer { bridge::TokenTree::Ident(ident) => { let text = ident.sym.text(); - let text = if ident.is_raw { tt::SmolStr::from_iter(["r#", &text]) } else { text }; - let ident: tt::Ident = tt::Ident { text, id: ident.span }; + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; let leaf = tt::Leaf::from(ident); let tree = TokenTree::from(leaf); Self::TokenStream::from_iter(vec![tree]) @@ -118,9 +121,9 @@ impl server::TokenStream for RustAnalyzer { bridge::TokenTree::Literal(literal) => { let literal = LiteralFormatter(literal); let text = literal - .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied())); + .with_stringify_parts(|parts| ::tt::SmolStr::from_iter(parts.iter().copied())); - let literal = tt::Literal { text, id: literal.0.span }; + let literal = tt::Literal { text, span: literal.0.span }; let leaf = tt::Leaf::from(literal); let tree = TokenTree::from(leaf); Self::TokenStream::from_iter(vec![tree]) @@ -130,7 +133,7 @@ impl server::TokenStream for RustAnalyzer { let punct = tt::Punct { char: p.ch as char, spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, - id: p.span, + span: p.span, }; let leaf = tt::Leaf::from(punct); let tree = TokenTree::from(leaf); @@ -184,7 +187,7 @@ impl server::TokenStream for RustAnalyzer { bridge::TokenTree::Ident(bridge::Ident { sym: Symbol::intern(ident.text.trim_start_matches("r#")), is_raw: ident.text.starts_with("r#"), - span: ident.id, + span: ident.span, }) } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { @@ -194,14 +197,14 @@ impl server::TokenStream for RustAnalyzer { symbol: Symbol::intern(&lit.text), // FIXME: handle suffixes suffix: None, - span: lit.id, + span: lit.span, }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { bridge::TokenTree::Punct(bridge::Punct { ch: punct.char as u8, joint: punct.spacing == Spacing::Joint, - span: punct.id, + span: punct.span, }) } tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { @@ -211,31 +214,29 @@ impl server::TokenStream for RustAnalyzer { } else { Some(subtree.token_trees.into_iter().collect()) }, - span: bridge::DelimSpan::from_single( - subtree.delimiter.map_or(Span::unspecified(), |del| del.id), - ), + span: bridge::DelimSpan::from_single(subtree.delimiter.open), }), }) .collect() } } -fn delim_to_internal(d: proc_macro::Delimiter) -> Option { +fn delim_to_internal(d: proc_macro::Delimiter) -> tt::Delimiter { let kind = match d { proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace, proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket, - proc_macro::Delimiter::None => return None, + proc_macro::Delimiter::None => tt::DelimiterKind::Invisible, }; - Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }) + tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind } } -fn delim_to_external(d: Option) -> proc_macro::Delimiter { - match d.map(|it| it.kind) { - Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis, - Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace, - Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket, - None => proc_macro::Delimiter::None, +fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter { + match d.kind { + tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis, + tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace, + tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket, + tt::DelimiterKind::Invisible => proc_macro::Delimiter::None, } } @@ -349,7 +350,7 @@ impl server::Server for RustAnalyzer { } fn intern_symbol(ident: &str) -> Self::Symbol { - Symbol::intern(&tt::SmolStr::from(ident)) + Symbol::intern(&::tt::SmolStr::from(ident)) } fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { @@ -413,17 +414,18 @@ mod tests { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "struct".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "T".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })), tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), + delimiter: tt::Delimiter { + open: tt::TokenId::unspecified(), + close: tt::TokenId::unspecified(), kind: tt::DelimiterKind::Brace, - }), + }, token_trees: vec![], }), ], @@ -436,13 +438,14 @@ mod tests { fn test_ra_server_from_str() { use std::str::FromStr; let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: Some(tt::Delimiter { - id: tt::TokenId::unspecified(), + delimiter: tt::Delimiter { + open: tt::TokenId::unspecified(), + close: tt::TokenId::unspecified(), kind: tt::DelimiterKind::Parenthesis, - }), + }, token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), }))], }); @@ -459,7 +462,7 @@ mod tests { underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "_".into(), - id: tt::TokenId::unspecified(), + span: tt::TokenId::unspecified(), })) ); } diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs index 113bb52c1af53..7de30e73928a3 100644 --- a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs +++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs @@ -1,6 +1,6 @@ //! TokenStream implementation used by sysroot ABI -use tt::TokenTree; +use crate::tt::{self, TokenTree}; #[derive(Debug, Default, Clone)] pub struct TokenStream { @@ -13,7 +13,7 @@ impl TokenStream { } pub fn with_subtree(subtree: tt::Subtree) -> Self { - if subtree.delimiter.is_some() { + if subtree.delimiter.kind != tt::DelimiterKind::Invisible { TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } } else { TokenStream { token_trees: subtree.token_trees } @@ -21,7 +21,7 @@ impl TokenStream { } pub fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: None, token_trees: self.token_trees } + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees } } pub fn is_empty(&self) -> bool { @@ -64,7 +64,9 @@ impl Extend for TokenStream { for item in streams { for tkn in item { match tkn { - tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => { + tt::TokenTree::Subtree(subtree) + if subtree.delimiter.kind != tt::DelimiterKind::Invisible => + { self.token_trees.extend(subtree.token_trees); } _ => { @@ -84,7 +86,7 @@ pub struct TokenStreamBuilder { pub mod token_stream { use std::str::FromStr; - use super::{TokenStream, TokenTree}; + use super::{tt, TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, @@ -121,15 +123,17 @@ pub mod token_stream { impl ToString for TokenStream { fn to_string(&self) -> String { - tt::pretty(&self.token_trees) + ::tt::pretty(&self.token_trees) } } fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { tt::Subtree { - delimiter: subtree - .delimiter - .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }), + delimiter: tt::Delimiter { + open: tt::TokenId::UNSPECIFIED, + close: tt::TokenId::UNSPECIFIED, + ..subtree.delimiter + }, token_trees: subtree .token_trees .into_iter() @@ -152,13 +156,13 @@ pub mod token_stream { fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { match leaf { tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit }) + tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit }) } tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct }) + tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct }) } tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident }) + tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident }) } } } diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs index 9f874fb02b250..04be39cffa4ad 100644 --- a/crates/proc-macro-srv/src/abis/mod.rs +++ b/crates/proc-macro-srv/src/abis/mod.rs @@ -41,6 +41,8 @@ pub(crate) use abi_sysroot::Abi as Abi_Sysroot; use libloading::Library; use proc_macro_api::{ProcMacroKind, RustCInfo}; +use crate::tt; + pub struct PanicMessage { message: Option, } diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index 0722cd89d7297..89ffd1f493e29 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -13,6 +13,8 @@ use object::Object; use paths::AbsPath; use proc_macro_api::{read_dylib_info, ProcMacroKind}; +use crate::tt; + use super::abis::Abi; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 2eb939a7ce581..ee70fe7d4f541 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -37,6 +37,8 @@ use proc_macro_api::{ ProcMacroKind, }; +use ::tt::token_id as tt; + #[derive(Default)] pub(crate) struct ProcMacroSrv { expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 1ccc170f422b0..04a0ae7bc7201 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -8,7 +8,7 @@ use expect_test::expect; #[test] fn test_derive_empty() { - assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]); + assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]); } #[test] @@ -17,10 +17,10 @@ fn test_derive_error() { "DeriveError", r#"struct S;"#, expect![[r##" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 IDENT compile_error 4294967295 PUNCH ! [alone] 4294967295 - SUBTREE () 4294967295 + SUBTREE () 4294967295 4294967295 LITERAL "#[derive(DeriveError)] struct S ;" 4294967295 PUNCH ; [alone] 4294967295"##]], ); @@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() { "fn_like_noop", r#"ident, 0, 1, []"#, expect![[r#" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 IDENT ident 4294967295 PUNCH , [alone] 4294967295 LITERAL 0 4294967295 PUNCH , [alone] 4294967295 LITERAL 1 4294967295 PUNCH , [alone] 4294967295 - SUBTREE [] 4294967295"#]], + SUBTREE [] 4294967295 4294967295"#]], ); } @@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() { "fn_like_clone_tokens", r#"ident, []"#, expect![[r#" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 IDENT ident 4294967295 PUNCH , [alone] 4294967295 - SUBTREE [] 4294967295"#]], + SUBTREE [] 4294967295 4294967295"#]], ); } @@ -62,7 +62,7 @@ fn test_fn_like_macro_clone_raw_ident() { "fn_like_clone_tokens", "r#async", expect![[r#" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 IDENT r#async 4294967295"#]], ); } @@ -73,7 +73,7 @@ fn test_fn_like_mk_literals() { "fn_like_mk_literals", r#""#, expect![[r#" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 LITERAL b"byte_string" 4294967295 LITERAL 'c' 4294967295 LITERAL "string" 4294967295 @@ -90,7 +90,7 @@ fn test_fn_like_mk_idents() { "fn_like_mk_idents", r#""#, expect![[r#" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 IDENT standard 4294967295 IDENT r#raw 4294967295"#]], ); @@ -102,7 +102,7 @@ fn test_fn_like_macro_clone_literals() { "fn_like_clone_tokens", r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, expect![[r#" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 LITERAL 1u16 4294967295 PUNCH , [alone] 4294967295 LITERAL 2_u32 4294967295 @@ -126,10 +126,10 @@ fn test_attr_macro() { r#"mod m {}"#, r#"some arguments"#, expect![[r##" - SUBTREE $ + SUBTREE $$ 4294967295 4294967295 IDENT compile_error 4294967295 PUNCH ! [alone] 4294967295 - SUBTREE () 4294967295 + SUBTREE () 4294967295 4294967295 LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295 PUNCH ; [alone] 4294967295"##]], ); diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index a33b8e14cf3f0..5ac5af94f5aef 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -34,6 +34,8 @@ use crate::{ op_queue::Cause, }; +use ::tt::token_id as tt; + #[derive(Debug)] pub(crate) enum ProjectWorkspaceProgress { Begin, @@ -656,7 +658,7 @@ pub(crate) fn load_proc_macro( _: Option<&tt::Subtree>, _: &Env, ) -> Result { - Ok(tt::Subtree::default()) + Ok(tt::Subtree::empty()) } } } diff --git a/crates/stdx/src/macros.rs b/crates/stdx/src/macros.rs index d91fc690cb517..1a9982fa8b2a7 100644 --- a/crates/stdx/src/macros.rs +++ b/crates/stdx/src/macros.rs @@ -43,5 +43,14 @@ macro_rules! impl_from { } )*)? )* + }; + ($($variant:ident$(<$V:ident>)?),* for $enum:ident) => { + $( + impl$(<$V>)? From<$variant$(<$V>)?> for $enum$(<$V>)? { + fn from(it: $variant$(<$V>)?) -> $enum$(<$V>)? { + $enum::$variant(it) + } + } + )* } } diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index d27a7aa0d4d38..4484431124e13 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs @@ -12,10 +12,10 @@ struct EntryPtr(EntryId, usize); /// Internal type which is used instead of `TokenTree` to represent a token tree /// within a `TokenBuffer`. #[derive(Debug)] -enum Entry<'t> { +enum Entry<'t, Span> { // Mimicking types from proc-macro. - Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId), - Leaf(&'t TokenTree), + Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId), + Leaf(&'t TokenTree), // End entries contain a pointer to the entry from the containing // token tree, or None if this is the outermost level. End(Option), @@ -24,16 +24,21 @@ enum Entry<'t> { /// A token tree buffer /// The safe version of `syn` [`TokenBuffer`](https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L41) #[derive(Debug)] -pub struct TokenBuffer<'t> { - buffers: Vec]>>, +pub struct TokenBuffer<'t, Span> { + buffers: Vec]>>, } -trait TokenList<'a> { - fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec>); +trait TokenList<'a, Span> { + fn entries( + &self, + ) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec>); } -impl<'a> TokenList<'a> for &'a [TokenTree] { - fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec>) { +impl<'a, Span> TokenList<'a, Span> for &'a [TokenTree] { + fn entries( + &self, + ) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec>) + { // Must contain everything in tokens and then the Entry::End let start_capacity = self.len() + 1; let mut entries = Vec::with_capacity(start_capacity); @@ -53,8 +58,11 @@ impl<'a> TokenList<'a> for &'a [TokenTree] { } } -impl<'a> TokenList<'a> for &'a Subtree { - fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec>) { +impl<'a, Span> TokenList<'a, Span> for &'a Subtree { + fn entries( + &self, + ) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec>) + { // Must contain everything in tokens and then the Entry::End let mut entries = vec![]; let mut children = vec![]; @@ -64,25 +72,25 @@ impl<'a> TokenList<'a> for &'a Subtree { } } -impl<'t> TokenBuffer<'t> { - pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> { +impl<'t, Span> TokenBuffer<'t, Span> { + pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t, Span> { Self::new(tokens) } - pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> { + pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t, Span> { Self::new(subtree) } - fn new>(tokens: T) -> TokenBuffer<'t> { + fn new>(tokens: T) -> TokenBuffer<'t, Span> { let mut buffers = vec![]; let idx = TokenBuffer::new_inner(tokens, &mut buffers, None); assert_eq!(idx, 0); TokenBuffer { buffers } } - fn new_inner>( + fn new_inner>( tokens: T, - buffers: &mut Vec]>>, + buffers: &mut Vec]>>, next: Option, ) -> usize { let (children, mut entries) = tokens.entries(); @@ -105,25 +113,25 @@ impl<'t> TokenBuffer<'t> { /// Creates a cursor referencing the first token in the buffer and able to /// traverse until the end of the buffer. - pub fn begin(&self) -> Cursor<'_> { + pub fn begin(&self) -> Cursor<'_, Span> { Cursor::create(self, EntryPtr(EntryId(0), 0)) } - fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_>> { + fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_, Span>> { let id = ptr.0; self.buffers[id.0].get(ptr.1) } } #[derive(Debug)] -pub enum TokenTreeRef<'a> { - Subtree(&'a Subtree, Option<&'a TokenTree>), - Leaf(&'a Leaf, &'a TokenTree), +pub enum TokenTreeRef<'a, Span> { + Subtree(&'a Subtree, Option<&'a TokenTree>), + Leaf(&'a Leaf, &'a TokenTree), } -impl<'a> TokenTreeRef<'a> { - pub fn cloned(&self) -> TokenTree { - match &self { +impl<'a, Span: Clone> TokenTreeRef<'a, Span> { + pub fn cloned(&self) -> TokenTree { + match self { TokenTreeRef::Subtree(subtree, tt) => match tt { Some(it) => (*it).clone(), None => (*subtree).clone().into(), @@ -135,20 +143,20 @@ impl<'a> TokenTreeRef<'a> { /// A safe version of `Cursor` from `syn` crate #[derive(Copy, Clone, Debug)] -pub struct Cursor<'a> { - buffer: &'a TokenBuffer<'a>, +pub struct Cursor<'a, Span> { + buffer: &'a TokenBuffer<'a, Span>, ptr: EntryPtr, } -impl<'a> PartialEq for Cursor<'a> { - fn eq(&self, other: &Cursor<'_>) -> bool { +impl<'a, Span> PartialEq for Cursor<'a, Span> { + fn eq(&self, other: &Cursor<'_, Span>) -> bool { self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer) } } -impl<'a> Eq for Cursor<'a> {} +impl<'a, Span> Eq for Cursor<'a, Span> {} -impl<'a> Cursor<'a> { +impl<'a, Span> Cursor<'a, Span> { /// Check whether it is eof pub fn eof(self) -> bool { matches!(self.buffer.entry(&self.ptr), None | Some(Entry::End(None))) @@ -156,7 +164,7 @@ impl<'a> Cursor<'a> { /// If the cursor is pointing at the end of a subtree, returns /// the parent subtree - pub fn end(self) -> Option<&'a Subtree> { + pub fn end(self) -> Option<&'a Subtree> { match self.entry() { Some(Entry::End(Some(ptr))) => { let idx = ptr.1; @@ -171,13 +179,13 @@ impl<'a> Cursor<'a> { } } - fn entry(self) -> Option<&'a Entry<'a>> { + fn entry(&self) -> Option<&'a Entry<'a, Span>> { self.buffer.entry(&self.ptr) } /// If the cursor is pointing at a `Subtree`, returns /// a cursor into that subtree - pub fn subtree(self) -> Option> { + pub fn subtree(self) -> Option> { match self.entry() { Some(Entry::Subtree(_, _, entry_id)) => { Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0))) @@ -187,7 +195,7 @@ impl<'a> Cursor<'a> { } /// If the cursor is pointing at a `TokenTree`, returns it - pub fn token_tree(self) -> Option> { + pub fn token_tree(self) -> Option> { match self.entry() { Some(Entry::Leaf(tt)) => match tt { TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, tt)), @@ -198,12 +206,12 @@ impl<'a> Cursor<'a> { } } - fn create(buffer: &'a TokenBuffer<'_>, ptr: EntryPtr) -> Cursor<'a> { + fn create(buffer: &'a TokenBuffer<'_, Span>, ptr: EntryPtr) -> Cursor<'a, Span> { Cursor { buffer, ptr } } /// Bump the cursor - pub fn bump(self) -> Cursor<'a> { + pub fn bump(self) -> Cursor<'a, Span> { if let Some(Entry::End(exit)) = self.buffer.entry(&self.ptr) { match exit { Some(exit) => Cursor::create(self.buffer, *exit), @@ -216,7 +224,7 @@ impl<'a> Cursor<'a> { /// Bump the cursor, if it is a subtree, returns /// a cursor into that subtree - pub fn bump_subtree(self) -> Cursor<'a> { + pub fn bump_subtree(self) -> Cursor<'a, Span> { match self.entry() { Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(), _ => self.bump(), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 353b09fd8c1ed..b7dbc82e1d66e 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -16,45 +16,106 @@ pub use smol_str::SmolStr; /// which source tokens. We do it by assigning an distinct identity to each /// source token and making sure that identities are preserved during macro /// expansion. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct TokenId(pub u32); +impl fmt::Debug for TokenId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + impl TokenId { + pub const UNSPECIFIED: TokenId = TokenId(!0); pub const fn unspecified() -> TokenId { - TokenId(!0) + Self::UNSPECIFIED } } -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TokenTree { - Leaf(Leaf), - Subtree(Subtree), -} -impl_from!(Leaf, Subtree for TokenTree); +pub mod token_id { + pub use crate::{DelimiterKind, Spacing, TokenId}; + pub type Span = crate::TokenId; + pub type Subtree = crate::Subtree; + pub type Punct = crate::Punct; + pub type Delimiter = crate::Delimiter; + pub type Leaf = crate::Leaf; + pub type Ident = crate::Ident; + pub type Literal = crate::Literal; + pub type TokenTree = crate::TokenTree; + pub mod buffer { + pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>; + pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>; + pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>; + } -impl TokenTree { - pub fn empty() -> Self { - TokenTree::Subtree(Subtree::default()) + impl Delimiter { + pub const UNSPECIFIED: Self = Self { + open: TokenId::UNSPECIFIED, + close: TokenId::UNSPECIFIED, + kind: DelimiterKind::Invisible, + }; + pub const fn unspecified() -> Self { + Self::UNSPECIFIED + } + } + impl Subtree { + pub const fn empty() -> Self { + Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] } + } + } + impl TokenTree { + pub const fn empty() -> Self { + Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }) + } } } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct SyntaxContext(pub u32); + +// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +// pub struct Span { +// pub id: TokenId, +// pub ctx: SyntaxContext, +// } +// pub type Span = (TokenId, SyntaxContext); + #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Leaf { - Literal(Literal), - Punct(Punct), - Ident(Ident), +pub enum TokenTree { + Leaf(Leaf), + Subtree(Subtree), } -impl_from!(Literal, Punct, Ident for Leaf); +impl_from!(Leaf, Subtree for TokenTree); -#[derive(Clone, PartialEq, Eq, Hash, Default)] -pub struct Subtree { - pub delimiter: Option, - pub token_trees: Vec, +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub enum Leaf { + Literal(Literal), + Punct(Punct), + Ident(Ident), +} + +impl Leaf { + pub fn span(&self) -> &Span { + match self { + Leaf::Literal(it) => &it.span, + Leaf::Punct(it) => &it.span, + Leaf::Ident(it) => &it.span, + } + } +} +impl_from!(Literal, Punct, Ident for Leaf); + +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct Subtree { + // FIXME, this should not be Option + pub delimiter: Delimiter, + pub token_trees: Vec>, } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct Delimiter { - pub id: TokenId, +pub struct Delimiter { + pub open: Span, + pub close: Span, pub kind: DelimiterKind, } @@ -63,19 +124,20 @@ pub enum DelimiterKind { Parenthesis, Brace, Bracket, + Invisible, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Literal { +pub struct Literal { pub text: SmolStr, - pub id: TokenId, + pub span: Span, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct Punct { +pub struct Punct { pub char: char, pub spacing: Spacing, - pub id: TokenId, + pub span: Span, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -85,39 +147,25 @@ pub enum Spacing { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Ident { - /// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. +/// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier. +pub struct Ident { pub text: SmolStr, - pub id: TokenId, + pub span: Span, } -impl Ident { - /// Constructor intended to be used only by proc macro server. `text` should not contain raw - /// identifier prefix. - pub fn new_with_is_raw(text: SmolStr, id: TokenId, is_raw: bool) -> Self { - let text = if is_raw { SmolStr::from_iter(["r#", &text]) } else { text }; - Ident { text, id } - } -} - -impl Leaf { - pub fn id(&self) -> TokenId { - match self { - Leaf::Literal(l) => l.id, - Leaf::Punct(p) => p.id, - Leaf::Ident(i) => i.id, - } - } -} - -fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result { +fn print_debug_subtree( + f: &mut fmt::Formatter<'_>, + subtree: &Subtree, + level: usize, +) -> fmt::Result { let align = " ".repeat(level); - let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) { - None => "$".to_string(), - Some((DelimiterKind::Parenthesis, id)) => format!("() {id}"), - Some((DelimiterKind::Brace, id)) => format!("{{}} {id}"), - Some((DelimiterKind::Bracket, id)) => format!("[] {id}"), + let Delimiter { kind, open, close } = &subtree.delimiter; + let aux = match kind { + DelimiterKind::Invisible => format!("$$ {:?} {:?}", open, close), + DelimiterKind::Parenthesis => format!("() {:?} {:?}", open, close), + DelimiterKind::Brace => format!("{{}} {:?} {:?}", open, close), + DelimiterKind::Bracket => format!("[] {:?} {:?}", open, close), }; if subtree.token_trees.is_empty() { @@ -135,21 +183,25 @@ fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usi Ok(()) } -fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) -> fmt::Result { +fn print_debug_token( + f: &mut fmt::Formatter<'_>, + tkn: &TokenTree, + level: usize, +) -> fmt::Result { let align = " ".repeat(level); match tkn { TokenTree::Leaf(leaf) => match leaf { - Leaf::Literal(lit) => write!(f, "{align}LITERAL {} {}", lit.text, lit.id.0)?, + Leaf::Literal(lit) => write!(f, "{}LITERAL {} {:?}", align, lit.text, lit.span)?, Leaf::Punct(punct) => write!( f, - "{}PUNCH {} [{}] {}", + "{}PUNCH {} [{}] {:?}", align, punct.char, if punct.spacing == Spacing::Alone { "alone" } else { "joint" }, - punct.id.0 + punct.span )?, - Leaf::Ident(ident) => write!(f, "{align}IDENT {} {}", ident.text, ident.id.0)?, + Leaf::Ident(ident) => write!(f, "{}IDENT {} {:?}", align, ident.text, ident.span)?, }, TokenTree::Subtree(subtree) => { print_debug_subtree(f, subtree, level)?; @@ -159,13 +211,13 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) Ok(()) } -impl fmt::Debug for Subtree { +impl fmt::Debug for Subtree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { print_debug_subtree(f, self, 0) } } -impl fmt::Display for TokenTree { +impl fmt::Display for TokenTree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { TokenTree::Leaf(it) => fmt::Display::fmt(it, f), @@ -174,13 +226,13 @@ impl fmt::Display for TokenTree { } } -impl fmt::Display for Subtree { +impl fmt::Display for Subtree { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let (l, r) = match self.delimiter_kind() { - Some(DelimiterKind::Parenthesis) => ("(", ")"), - Some(DelimiterKind::Brace) => ("{", "}"), - Some(DelimiterKind::Bracket) => ("[", "]"), - None => ("", ""), + let (l, r) = match self.delimiter.kind { + DelimiterKind::Parenthesis => ("(", ")"), + DelimiterKind::Brace => ("{", "}"), + DelimiterKind::Bracket => ("[", "]"), + DelimiterKind::Invisible => ("", ""), }; f.write_str(l)?; let mut needs_space = false; @@ -202,7 +254,7 @@ impl fmt::Display for Subtree { } } -impl fmt::Display for Leaf { +impl fmt::Display for Leaf { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Leaf::Ident(it) => fmt::Display::fmt(it, f), @@ -212,25 +264,25 @@ impl fmt::Display for Leaf { } } -impl fmt::Display for Ident { +impl fmt::Display for Ident { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.text, f) } } -impl fmt::Display for Literal { +impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.text, f) } } -impl fmt::Display for Punct { +impl fmt::Display for Punct { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&self.char, f) } } -impl Subtree { +impl Subtree { /// Count the number of tokens recursively pub fn count(&self) -> usize { let children_count = self @@ -244,20 +296,16 @@ impl Subtree { self.token_trees.len() + children_count } - - pub fn delimiter_kind(&self) -> Option { - self.delimiter.map(|it| it.kind) - } } -impl Subtree { +impl Subtree { /// A simple line string used for debugging pub fn as_debug_string(&self) -> String { - let delim = match self.delimiter_kind() { - Some(DelimiterKind::Brace) => ("{", "}"), - Some(DelimiterKind::Bracket) => ("[", "]"), - Some(DelimiterKind::Parenthesis) => ("(", ")"), - None => (" ", " "), + let delim = match self.delimiter.kind { + DelimiterKind::Brace => ("{", "}"), + DelimiterKind::Bracket => ("[", "]"), + DelimiterKind::Parenthesis => ("(", ")"), + DelimiterKind::Invisible => ("$", "$"), }; let mut res = String::new(); @@ -275,7 +323,7 @@ impl Subtree { (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => { " ".to_string() + &s } - (Leaf::Punct(_), Some(&TokenTree::Leaf(Leaf::Punct(punct)))) => { + (Leaf::Punct(_), Some(TokenTree::Leaf(Leaf::Punct(punct)))) => { if punct.spacing == Spacing::Alone { " ".to_string() + &s } else { @@ -298,19 +346,19 @@ impl Subtree { pub mod buffer; -pub fn pretty(tkns: &[TokenTree]) -> String { - fn tokentree_to_text(tkn: &TokenTree) -> String { +pub fn pretty(tkns: &[TokenTree]) -> String { + fn tokentree_to_text(tkn: &TokenTree) -> String { match tkn { TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(), TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(), TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char), TokenTree::Subtree(subtree) => { let content = pretty(&subtree.token_trees); - let (open, close) = match subtree.delimiter.map(|it| it.kind) { - None => ("", ""), - Some(DelimiterKind::Brace) => ("{", "}"), - Some(DelimiterKind::Parenthesis) => ("(", ")"), - Some(DelimiterKind::Bracket) => ("[", "]"), + let (open, close) = match subtree.delimiter.kind { + DelimiterKind::Brace => ("{", "}"), + DelimiterKind::Bracket => ("[", "]"), + DelimiterKind::Parenthesis => ("(", ")"), + DelimiterKind::Invisible => ("", ""), }; format!("{open}{content}{close}") } From 6a8b20230b4e1ccdc5936d9cde5a6018d3b8fc74 Mon Sep 17 00:00:00 2001 From: Alex Macleod Date: Tue, 31 Jan 2023 14:12:03 +0000 Subject: [PATCH 223/501] Add machine applicable suggestion for `needless_lifetimes` Also adds a test for #5787 --- clippy_lints/src/lifetimes.rs | 253 +++++---- tests/ui/crashes/ice-2774.stderr | 5 + .../needless_lifetimes_impl_trait.stderr | 5 + tests/ui/needless_lifetimes.fixed | 537 ++++++++++++++++++ tests/ui/needless_lifetimes.rs | 21 +- tests/ui/needless_lifetimes.stderr | 380 ++++++++++--- 6 files changed, 1019 insertions(+), 182 deletions(-) create mode 100644 tests/ui/needless_lifetimes.fixed diff --git a/clippy_lints/src/lifetimes.rs b/clippy_lints/src/lifetimes.rs index ef9ac96ace5c7..3cccc2cfe2aa3 100644 --- a/clippy_lints/src/lifetimes.rs +++ b/clippy_lints/src/lifetimes.rs @@ -1,22 +1,21 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::trait_ref_of_method; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_errors::Applicability; use rustc_hir::intravisit::nested_filter::{self as hir_nested_filter, NestedFilter}; use rustc_hir::intravisit::{ - walk_fn_decl, walk_generic_arg, walk_generic_param, walk_generics, walk_impl_item_ref, walk_item, walk_param_bound, + walk_fn_decl, walk_generic_param, walk_generics, walk_impl_item_ref, walk_item, walk_param_bound, walk_poly_trait_ref, walk_trait_ref, walk_ty, Visitor, }; -use rustc_hir::lang_items; use rustc_hir::FnRetTy::Return; use rustc_hir::{ - BareFnTy, BodyId, FnDecl, GenericArg, GenericBound, GenericParam, GenericParamKind, Generics, Impl, ImplItem, - ImplItemKind, Item, ItemKind, Lifetime, LifetimeName, LifetimeParamKind, PolyTraitRef, PredicateOrigin, TraitFn, - TraitItem, TraitItemKind, Ty, TyKind, WherePredicate, + lang_items, BareFnTy, BodyId, FnDecl, FnSig, GenericArg, GenericBound, GenericParam, GenericParamKind, Generics, + Impl, ImplItem, ImplItemKind, Item, ItemKind, Lifetime, LifetimeName, LifetimeParamKind, Node, PolyTraitRef, + PredicateOrigin, TraitFn, TraitItem, TraitItemKind, Ty, TyKind, WherePredicate, }; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::hir::nested_filter as middle_nested_filter; use rustc_middle::lint::in_external_macro; -use rustc_middle::ty::TyCtxt; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::def_id::LocalDefId; use rustc_span::source_map::Span; @@ -35,8 +34,6 @@ declare_clippy_lint! { /// ### Known problems /// - We bail out if the function has a `where` clause where lifetimes /// are mentioned due to potential false positives. - /// - Lifetime bounds such as `impl Foo + 'a` and `T: 'a` must be elided with the - /// placeholder notation `'_` because the fully elided notation leaves the type bound to `'static`. /// /// ### Example /// ```rust @@ -94,7 +91,7 @@ declare_lint_pass!(Lifetimes => [NEEDLESS_LIFETIMES, EXTRA_UNUSED_LIFETIMES]); impl<'tcx> LateLintPass<'tcx> for Lifetimes { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) { if let ItemKind::Fn(ref sig, generics, id) = item.kind { - check_fn_inner(cx, sig.decl, Some(id), None, generics, item.span, true); + check_fn_inner(cx, sig, Some(id), None, generics, item.span, true); } else if let ItemKind::Impl(impl_) = item.kind { if !item.span.from_expansion() { report_extra_impl_lifetimes(cx, impl_); @@ -107,7 +104,7 @@ impl<'tcx> LateLintPass<'tcx> for Lifetimes { let report_extra_lifetimes = trait_ref_of_method(cx, item.owner_id.def_id).is_none(); check_fn_inner( cx, - sig.decl, + sig, Some(id), None, item.generics, @@ -123,22 +120,14 @@ impl<'tcx> LateLintPass<'tcx> for Lifetimes { TraitFn::Required(sig) => (None, Some(sig)), TraitFn::Provided(id) => (Some(id), None), }; - check_fn_inner(cx, sig.decl, body, trait_sig, item.generics, item.span, true); + check_fn_inner(cx, sig, body, trait_sig, item.generics, item.span, true); } } } -/// The lifetime of a &-reference. -#[derive(PartialEq, Eq, Hash, Debug, Clone)] -enum RefLt { - Unnamed, - Static, - Named(LocalDefId), -} - fn check_fn_inner<'tcx>( cx: &LateContext<'tcx>, - decl: &'tcx FnDecl<'_>, + sig: &'tcx FnSig<'_>, body: Option, trait_sig: Option<&[Ident]>, generics: &'tcx Generics<'_>, @@ -164,7 +153,7 @@ fn check_fn_inner<'tcx>( for bound in pred.bounds { let mut visitor = RefVisitor::new(cx); walk_param_bound(&mut visitor, bound); - if visitor.lts.iter().any(|lt| matches!(lt, RefLt::Named(_))) { + if visitor.lts.iter().any(|lt| matches!(lt.res, LifetimeName::Param(_))) { return; } if let GenericBound::Trait(ref trait_ref, _) = *bound { @@ -191,12 +180,12 @@ fn check_fn_inner<'tcx>( } } - if let Some(elidable_lts) = could_use_elision(cx, decl, body, trait_sig, generics.params) { + if let Some((elidable_lts, usages)) = could_use_elision(cx, sig.decl, body, trait_sig, generics.params) { let lts = elidable_lts .iter() // In principle, the result of the call to `Node::ident` could be `unwrap`ped, as `DefId` should refer to a // `Node::GenericParam`. - .filter_map(|&(def_id, _)| cx.tcx.hir().get_by_def_id(def_id).ident()) + .filter_map(|&def_id| cx.tcx.hir().get_by_def_id(def_id).ident()) .map(|ident| ident.to_string()) .collect::>() .join(", "); @@ -204,21 +193,99 @@ fn check_fn_inner<'tcx>( span_lint_and_then( cx, NEEDLESS_LIFETIMES, - span.with_hi(decl.output.span().hi()), + span.with_hi(sig.decl.output.span().hi()), &format!("the following explicit lifetimes could be elided: {lts}"), |diag| { - if let Some(span) = elidable_lts.iter().find_map(|&(_, span)| span) { - diag.span_help(span, "replace with `'_` in generic arguments such as here"); + if sig.header.is_async() { + // async functions have usages whose spans point at the lifetime declaration which messes up + // suggestions + return; + }; + + if let Some(suggestions) = elision_suggestions(cx, generics, &elidable_lts, &usages) { + diag.multipart_suggestion("elide the lifetimes", suggestions, Applicability::MachineApplicable); } }, ); } if report_extra_lifetimes { - self::report_extra_lifetimes(cx, decl, generics); + self::report_extra_lifetimes(cx, sig.decl, generics); } } +fn elision_suggestions( + cx: &LateContext<'_>, + generics: &Generics<'_>, + elidable_lts: &[LocalDefId], + usages: &[Lifetime], +) -> Option> { + let explicit_params = generics + .params + .iter() + .filter(|param| !param.is_elided_lifetime() && !param.is_impl_trait()) + .collect::>(); + + let mut suggestions = if elidable_lts.len() == explicit_params.len() { + // if all the params are elided remove the whole generic block + // + // fn x<'a>() {} + // ^^^^ + vec![(generics.span, String::new())] + } else { + elidable_lts + .iter() + .map(|&id| { + let pos = explicit_params.iter().position(|param| param.def_id == id)?; + let param = explicit_params.get(pos)?; + + let span = if let Some(next) = explicit_params.get(pos + 1) { + // fn x<'prev, 'a, 'next>() {} + // ^^^^ + param.span.until(next.span) + } else { + // `pos` should be at least 1 here, because the param in position 0 would either have a `next` + // param or would have taken the `elidable_lts.len() == explicit_params.len()` branch. + let prev = explicit_params.get(pos - 1)?; + + // fn x<'prev, 'a>() {} + // ^^^^ + param.span.with_lo(prev.span.hi()) + }; + + Some((span, String::new())) + }) + .collect::>>()? + }; + + suggestions.extend( + usages + .iter() + .filter(|usage| named_lifetime(usage).map_or(false, |id| elidable_lts.contains(&id))) + .map(|usage| { + match cx.tcx.hir().get_parent(usage.hir_id) { + Node::Ty(Ty { + kind: TyKind::Ref(..), .. + }) => { + // expand `&'a T` to `&'a T` + // ^^ ^^^ + let span = cx + .sess() + .source_map() + .span_extend_while(usage.ident.span, |ch| ch.is_ascii_whitespace()) + .unwrap_or(usage.ident.span); + + (span, String::new()) + }, + // `T<'a>` and `impl Foo + 'a` should be replaced by `'_` + _ => (usage.ident.span, String::from("'_")), + } + }), + ); + + Some(suggestions) +} + // elision doesn't work for explicit self types, see rust-lang/rust#69064 fn explicit_self_type<'tcx>(cx: &LateContext<'tcx>, func: &FnDecl<'tcx>, ident: Option) -> bool { if_chain! { @@ -238,13 +305,20 @@ fn explicit_self_type<'tcx>(cx: &LateContext<'tcx>, func: &FnDecl<'tcx>, ident: } } +fn named_lifetime(lt: &Lifetime) -> Option { + match lt.res { + LifetimeName::Param(id) if !lt.is_anonymous() => Some(id), + _ => None, + } +} + fn could_use_elision<'tcx>( cx: &LateContext<'tcx>, func: &'tcx FnDecl<'_>, body: Option, trait_sig: Option<&[Ident]>, named_generics: &'tcx [GenericParam<'_>], -) -> Option)>> { +) -> Option<(Vec, Vec)> { // There are two scenarios where elision works: // * no output references, all input references have different LT // * output references, exactly one input reference with same LT @@ -252,7 +326,7 @@ fn could_use_elision<'tcx>( // level of the current item. // check named LTs - let allowed_lts = allowed_lts_from(cx.tcx, named_generics); + let allowed_lts = allowed_lts_from(named_generics); // these will collect all the lifetimes for references in arg/return types let mut input_visitor = RefVisitor::new(cx); @@ -302,32 +376,24 @@ fn could_use_elision<'tcx>( // check for lifetimes from higher scopes for lt in input_lts.iter().chain(output_lts.iter()) { - if !allowed_lts.contains(lt) { + if let Some(id) = named_lifetime(lt) + && !allowed_lts.contains(&id) + { return None; } } // check for higher-ranked trait bounds if !input_visitor.nested_elision_site_lts.is_empty() || !output_visitor.nested_elision_site_lts.is_empty() { - let allowed_lts: FxHashSet<_> = allowed_lts - .iter() - .filter_map(|lt| match lt { - RefLt::Named(def_id) => Some(cx.tcx.item_name(def_id.to_def_id())), - _ => None, - }) - .collect(); + let allowed_lts: FxHashSet<_> = allowed_lts.iter().map(|id| cx.tcx.item_name(id.to_def_id())).collect(); for lt in input_visitor.nested_elision_site_lts { - if let RefLt::Named(def_id) = lt { - if allowed_lts.contains(&cx.tcx.item_name(def_id.to_def_id())) { - return None; - } + if allowed_lts.contains(<.ident.name) { + return None; } } for lt in output_visitor.nested_elision_site_lts { - if let RefLt::Named(def_id) = lt { - if allowed_lts.contains(&cx.tcx.item_name(def_id.to_def_id())) { - return None; - } + if allowed_lts.contains(<.ident.name) { + return None; } } } @@ -339,15 +405,10 @@ fn could_use_elision<'tcx>( let elidable_lts = named_lifetime_occurrences(&input_lts) .into_iter() .filter_map(|(def_id, occurrences)| { - if occurrences == 1 && (input_lts.len() == 1 || !output_lts.contains(&RefLt::Named(def_id))) { - Some(( - def_id, - input_visitor - .lifetime_generic_arg_spans - .get(&def_id) - .or_else(|| output_visitor.lifetime_generic_arg_spans.get(&def_id)) - .copied(), - )) + if occurrences == 1 + && (input_lts.len() == 1 || !output_lts.iter().any(|lt| named_lifetime(lt) == Some(def_id))) + { + Some(def_id) } else { None } @@ -355,31 +416,34 @@ fn could_use_elision<'tcx>( .collect::>(); if elidable_lts.is_empty() { - None - } else { - Some(elidable_lts) + return None; } + + let usages = itertools::chain(input_lts, output_lts).collect(); + + Some((elidable_lts, usages)) } -fn allowed_lts_from(tcx: TyCtxt<'_>, named_generics: &[GenericParam<'_>]) -> FxHashSet { - let mut allowed_lts = FxHashSet::default(); - for par in named_generics.iter() { - if let GenericParamKind::Lifetime { .. } = par.kind { - allowed_lts.insert(RefLt::Named(tcx.hir().local_def_id(par.hir_id))); - } - } - allowed_lts.insert(RefLt::Unnamed); - allowed_lts.insert(RefLt::Static); - allowed_lts +fn allowed_lts_from(named_generics: &[GenericParam<'_>]) -> FxHashSet { + named_generics + .iter() + .filter_map(|par| { + if let GenericParamKind::Lifetime { .. } = par.kind { + Some(par.def_id) + } else { + None + } + }) + .collect() } /// Number of times each named lifetime occurs in the given slice. Returns a vector to preserve /// relative order. #[must_use] -fn named_lifetime_occurrences(lts: &[RefLt]) -> Vec<(LocalDefId, usize)> { +fn named_lifetime_occurrences(lts: &[Lifetime]) -> Vec<(LocalDefId, usize)> { let mut occurrences = Vec::new(); for lt in lts { - if let &RefLt::Named(curr_def_id) = lt { + if let Some(curr_def_id) = named_lifetime(lt) { if let Some(pair) = occurrences .iter_mut() .find(|(prev_def_id, _)| *prev_def_id == curr_def_id) @@ -393,12 +457,10 @@ fn named_lifetime_occurrences(lts: &[RefLt]) -> Vec<(LocalDefId, usize)> { occurrences } -/// A visitor usable for `rustc_front::visit::walk_ty()`. struct RefVisitor<'a, 'tcx> { cx: &'a LateContext<'tcx>, - lts: Vec, - lifetime_generic_arg_spans: FxHashMap, - nested_elision_site_lts: Vec, + lts: Vec, + nested_elision_site_lts: Vec, unelided_trait_object_lifetime: bool, } @@ -407,32 +469,16 @@ impl<'a, 'tcx> RefVisitor<'a, 'tcx> { Self { cx, lts: Vec::new(), - lifetime_generic_arg_spans: FxHashMap::default(), nested_elision_site_lts: Vec::new(), unelided_trait_object_lifetime: false, } } - fn record(&mut self, lifetime: &Option) { - if let Some(ref lt) = *lifetime { - if lt.is_static() { - self.lts.push(RefLt::Static); - } else if lt.is_anonymous() { - // Fresh lifetimes generated should be ignored. - self.lts.push(RefLt::Unnamed); - } else if let LifetimeName::Param(def_id) = lt.res { - self.lts.push(RefLt::Named(def_id)); - } - } else { - self.lts.push(RefLt::Unnamed); - } - } - - fn all_lts(&self) -> Vec { + fn all_lts(&self) -> Vec { self.lts .iter() .chain(self.nested_elision_site_lts.iter()) - .cloned() + .copied() .collect::>() } @@ -444,7 +490,7 @@ impl<'a, 'tcx> RefVisitor<'a, 'tcx> { impl<'a, 'tcx> Visitor<'tcx> for RefVisitor<'a, 'tcx> { // for lifetimes as parameters of generics fn visit_lifetime(&mut self, lifetime: &'tcx Lifetime) { - self.record(&Some(*lifetime)); + self.lts.push(*lifetime); } fn visit_poly_trait_ref(&mut self, poly_tref: &'tcx PolyTraitRef<'tcx>) { @@ -469,11 +515,7 @@ impl<'a, 'tcx> Visitor<'tcx> for RefVisitor<'a, 'tcx> { walk_item(self, item); self.lts.truncate(len); self.lts.extend(bounds.iter().filter_map(|bound| match bound { - GenericArg::Lifetime(l) => Some(if let LifetimeName::Param(def_id) = l.res { - RefLt::Named(def_id) - } else { - RefLt::Unnamed - }), + GenericArg::Lifetime(&l) => Some(l), _ => None, })); }, @@ -493,13 +535,6 @@ impl<'a, 'tcx> Visitor<'tcx> for RefVisitor<'a, 'tcx> { _ => walk_ty(self, ty), } } - - fn visit_generic_arg(&mut self, generic_arg: &'tcx GenericArg<'tcx>) { - if let GenericArg::Lifetime(l) = generic_arg && let LifetimeName::Param(def_id) = l.res { - self.lifetime_generic_arg_spans.entry(def_id).or_insert(l.ident.span); - } - walk_generic_arg(self, generic_arg); - } } /// Are any lifetimes mentioned in the `where` clause? If so, we don't try to @@ -517,14 +552,18 @@ fn has_where_lifetimes<'tcx>(cx: &LateContext<'tcx>, generics: &'tcx Generics<'_ return true; } // if the bounds define new lifetimes, they are fine to occur - let allowed_lts = allowed_lts_from(cx.tcx, pred.bound_generic_params); + let allowed_lts = allowed_lts_from(pred.bound_generic_params); // now walk the bounds for bound in pred.bounds.iter() { walk_param_bound(&mut visitor, bound); } // and check that all lifetimes are allowed - if visitor.all_lts().iter().any(|it| !allowed_lts.contains(it)) { - return true; + for lt in visitor.all_lts() { + if let Some(id) = named_lifetime(<) + && !allowed_lts.contains(&id) + { + return true; + } } }, WherePredicate::EqPredicate(ref pred) => { diff --git a/tests/ui/crashes/ice-2774.stderr b/tests/ui/crashes/ice-2774.stderr index 1f26c7f4db657..c5ea0b16d1be4 100644 --- a/tests/ui/crashes/ice-2774.stderr +++ b/tests/ui/crashes/ice-2774.stderr @@ -5,6 +5,11 @@ LL | pub fn add_barfoos_to_foos<'a>(bars: &HashSet<&'a Bar>) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::needless-lifetimes` implied by `-D warnings` +help: elide the lifetimes + | +LL - pub fn add_barfoos_to_foos<'a>(bars: &HashSet<&'a Bar>) { +LL + pub fn add_barfoos_to_foos(bars: &HashSet<&Bar>) { + | error: aborting due to previous error diff --git a/tests/ui/crashes/needless_lifetimes_impl_trait.stderr b/tests/ui/crashes/needless_lifetimes_impl_trait.stderr index 875d5ab4f21ca..0b0e0ad2684a9 100644 --- a/tests/ui/crashes/needless_lifetimes_impl_trait.stderr +++ b/tests/ui/crashes/needless_lifetimes_impl_trait.stderr @@ -9,6 +9,11 @@ note: the lint level is defined here | LL | #![deny(clippy::needless_lifetimes)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: elide the lifetimes + | +LL - fn baz<'a>(&'a self) -> impl Foo + 'a { +LL + fn baz(&self) -> impl Foo + '_ { + | error: aborting due to previous error diff --git a/tests/ui/needless_lifetimes.fixed b/tests/ui/needless_lifetimes.fixed new file mode 100644 index 0000000000000..270cd1afc6792 --- /dev/null +++ b/tests/ui/needless_lifetimes.fixed @@ -0,0 +1,537 @@ +// run-rustfix +// aux-build:macro_rules.rs + +#![warn(clippy::needless_lifetimes)] +#![allow( + unused, + clippy::boxed_local, + clippy::needless_pass_by_value, + clippy::unnecessary_wraps, + dyn_drop, + clippy::get_first +)] + +#[macro_use] +extern crate macro_rules; + +fn distinct_lifetimes(_x: &u8, _y: &u8, _z: u8) {} + +fn distinct_and_static(_x: &u8, _y: &u8, _z: &'static u8) {} + +// No error; same lifetime on two params. +fn same_lifetime_on_input<'a>(_x: &'a u8, _y: &'a u8) {} + +// No error; static involved. +fn only_static_on_input(_x: &u8, _y: &u8, _z: &'static u8) {} + +fn mut_and_static_input(_x: &mut u8, _y: &'static str) {} + +fn in_and_out(x: &u8, _y: u8) -> &u8 { + x +} + +// No error; multiple input refs. +fn multiple_in_and_out_1<'a>(x: &'a u8, _y: &'a u8) -> &'a u8 { + x +} + +// Error; multiple input refs, but the output lifetime is not elided, i.e., the following is valid: +// fn multiple_in_and_out_2a<'a>(x: &'a u8, _y: &u8) -> &'a u8 +// ^^^ +fn multiple_in_and_out_2a<'a>(x: &'a u8, _y: &u8) -> &'a u8 { + x +} + +// Error; multiple input refs, but the output lifetime is not elided, i.e., the following is valid: +// fn multiple_in_and_out_2b<'b>(_x: &u8, y: &'b u8) -> &'b u8 +// ^^^ +fn multiple_in_and_out_2b<'b>(_x: &u8, y: &'b u8) -> &'b u8 { + y +} + +// No error; multiple input refs +async fn func<'a>(args: &[&'a str]) -> Option<&'a str> { + args.get(0).cloned() +} + +// No error; static involved. +fn in_static_and_out<'a>(x: &'a u8, _y: &'static u8) -> &'a u8 { + x +} + +// Error; multiple input refs, but the output lifetime is not elided, i.e., the following is valid: +// fn deep_reference_1a<'a>(x: &'a u8, _y: &u8) -> Result<&'a u8, ()> +// ^^^ +fn deep_reference_1a<'a>(x: &'a u8, _y: &u8) -> Result<&'a u8, ()> { + Ok(x) +} + +// Error; multiple input refs, but the output lifetime is not elided, i.e., the following is valid: +// fn deep_reference_1b<'b>(_x: &u8, y: &'b u8) -> Result<&'b u8, ()> +// ^^^ +fn deep_reference_1b<'b>(_x: &u8, y: &'b u8) -> Result<&'b u8, ()> { + Ok(y) +} + +// No error; two input refs. +fn deep_reference_2<'a>(x: Result<&'a u8, &'a u8>) -> &'a u8 { + x.unwrap() +} + +fn deep_reference_3(x: &u8, _y: u8) -> Result<&u8, ()> { + Ok(x) +} + +// Where-clause, but without lifetimes. +fn where_clause_without_lt(x: &u8, _y: u8) -> Result<&u8, ()> +where + T: Copy, +{ + Ok(x) +} + +type Ref<'r> = &'r u8; + +// No error; same lifetime on two params. +fn lifetime_param_1<'a>(_x: Ref<'a>, _y: &'a u8) {} + +fn lifetime_param_2(_x: Ref<'_>, _y: &u8) {} + +// No error; bounded lifetime. +fn lifetime_param_3<'a, 'b: 'a>(_x: Ref<'a>, _y: &'b u8) {} + +// No error; bounded lifetime. +fn lifetime_param_4<'a, 'b>(_x: Ref<'a>, _y: &'b u8) +where + 'b: 'a, +{ +} + +struct Lt<'a, I: 'static> { + x: &'a I, +} + +// No error; fn bound references `'a`. +fn fn_bound<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> +where + F: Fn(Lt<'a, I>) -> Lt<'a, I>, +{ + unreachable!() +} + +fn fn_bound_2(_m: Lt<'_, I>, _f: F) -> Lt<'_, I> +where + for<'x> F: Fn(Lt<'x, I>) -> Lt<'x, I>, +{ + unreachable!() +} + +// No error; see below. +fn fn_bound_3<'a, F: FnOnce(&'a i32)>(x: &'a i32, f: F) { + f(x); +} + +fn fn_bound_3_cannot_elide() { + let x = 42; + let p = &x; + let mut q = &x; + // This will fail if we elide lifetimes of `fn_bound_3`. + fn_bound_3(p, |y| q = y); +} + +// No error; multiple input refs. +fn fn_bound_4<'a, F: FnOnce() -> &'a ()>(cond: bool, x: &'a (), f: F) -> &'a () { + if cond { x } else { f() } +} + +struct X { + x: u8, +} + +impl X { + fn self_and_out(&self) -> &u8 { + &self.x + } + + // Error; multiple input refs, but the output lifetime is not elided, i.e., the following is valid: + // fn self_and_in_out_1<'s>(&'s self, _x: &u8) -> &'s u8 + // ^^^ + fn self_and_in_out_1<'s>(&'s self, _x: &u8) -> &'s u8 { + &self.x + } + + // Error; multiple input refs, but the output lifetime is not elided, i.e., the following is valid: + // fn self_and_in_out_2<'t>(&self, x: &'t u8) -> &'t u8 + // ^^^^^ + fn self_and_in_out_2<'t>(&self, x: &'t u8) -> &'t u8 { + x + } + + fn distinct_self_and_in(&self, _x: &u8) {} + + // No error; same lifetimes on two params. + fn self_and_same_in<'s>(&'s self, _x: &'s u8) {} +} + +struct Foo<'a>(&'a u8); + +impl<'a> Foo<'a> { + // No error; lifetime `'a` not defined in method. + fn self_shared_lifetime(&self, _: &'a u8) {} + // No error; bounds exist. + fn self_bound_lifetime<'b: 'a>(&self, _: &'b u8) {} +} + +fn already_elided<'a>(_: &u8, _: &'a u8) -> &'a u8 { + unimplemented!() +} + +fn struct_with_lt(_foo: Foo<'_>) -> &str { + unimplemented!() +} + +// No warning; two input lifetimes (named on the reference, anonymous on `Foo`). +fn struct_with_lt2<'a>(_foo: &'a Foo) -> &'a str { + unimplemented!() +} + +// No warning; two input lifetimes (anonymous on the reference, named on `Foo`). +fn struct_with_lt3<'a>(_foo: &Foo<'a>) -> &'a str { + unimplemented!() +} + +// Warning; two input lifetimes, but the output lifetime is not elided, i.e., the following is +// valid: +// fn struct_with_lt4a<'a>(_foo: &'a Foo<'_>) -> &'a str +// ^^ +fn struct_with_lt4a<'a>(_foo: &'a Foo<'_>) -> &'a str { + unimplemented!() +} + +// Warning; two input lifetimes, but the output lifetime is not elided, i.e., the following is +// valid: +// fn struct_with_lt4b<'b>(_foo: &Foo<'b>) -> &'b str +// ^^^^ +fn struct_with_lt4b<'b>(_foo: &Foo<'b>) -> &'b str { + unimplemented!() +} + +trait WithLifetime<'a> {} + +type WithLifetimeAlias<'a> = dyn WithLifetime<'a>; + +// Should not warn because it won't build without the lifetime. +fn trait_obj_elided<'a>(_arg: &'a dyn WithLifetime) -> &'a str { + unimplemented!() +} + +// Should warn because there is no lifetime on `Drop`, so this would be +// unambiguous if we elided the lifetime. +fn trait_obj_elided2(_arg: &dyn Drop) -> &str { + unimplemented!() +} + +type FooAlias<'a> = Foo<'a>; + +fn alias_with_lt(_foo: FooAlias<'_>) -> &str { + unimplemented!() +} + +// No warning; two input lifetimes (named on the reference, anonymous on `FooAlias`). +fn alias_with_lt2<'a>(_foo: &'a FooAlias) -> &'a str { + unimplemented!() +} + +// No warning; two input lifetimes (anonymous on the reference, named on `FooAlias`). +fn alias_with_lt3<'a>(_foo: &FooAlias<'a>) -> &'a str { + unimplemented!() +} + +// Warning; two input lifetimes, but the output lifetime is not elided, i.e., the following is +// valid: +// fn alias_with_lt4a<'a>(_foo: &'a FooAlias<'_>) -> &'a str +// ^^ +fn alias_with_lt4a<'a>(_foo: &'a FooAlias<'_>) -> &'a str { + unimplemented!() +} + +// Warning; two input lifetimes, but the output lifetime is not elided, i.e., the following is +// valid: +// fn alias_with_lt4b<'b>(_foo: &FooAlias<'b>) -> &'b str +// ^^^^^^^^^ +fn alias_with_lt4b<'b>(_foo: &FooAlias<'b>) -> &'b str { + unimplemented!() +} + +fn named_input_elided_output(_arg: &str) -> &str { + unimplemented!() +} + +fn elided_input_named_output<'a>(_arg: &str) -> &'a str { + unimplemented!() +} + +fn trait_bound_ok>(_: &u8, _: T) { + unimplemented!() +} +fn trait_bound<'a, T: WithLifetime<'a>>(_: &'a u8, _: T) { + unimplemented!() +} + +// Don't warn on these; see issue #292. +fn trait_bound_bug<'a, T: WithLifetime<'a>>() { + unimplemented!() +} + +// See issue #740. +struct Test { + vec: Vec, +} + +impl Test { + fn iter<'a>(&'a self) -> Box + 'a> { + unimplemented!() + } +} + +trait LintContext<'a> {} + +fn f<'a, T: LintContext<'a>>(_: &T) {} + +fn test<'a>(x: &'a [u8]) -> u8 { + let y: &'a u8 = &x[5]; + *y +} + +// Issue #3284: give hint regarding lifetime in return type. +struct Cow<'a> { + x: &'a str, +} +fn out_return_type_lts(e: &str) -> Cow<'_> { + unimplemented!() +} + +// Make sure we still warn on implementations +mod issue4291 { + trait BadTrait { + fn needless_lt(x: &u8) {} + } + + impl BadTrait for () { + fn needless_lt(_x: &u8) {} + } +} + +mod issue2944 { + trait Foo {} + struct Bar; + struct Baz<'a> { + bar: &'a Bar, + } + + impl<'a> Foo for Baz<'a> {} + impl Bar { + fn baz(&self) -> impl Foo + '_ { + Baz { bar: self } + } + } +} + +mod nested_elision_sites { + // issue #issue2944 + + // closure trait bounds subject to nested elision + // don't lint because they refer to outer lifetimes + fn trait_fn<'a>(i: &'a i32) -> impl Fn() -> &'a i32 { + move || i + } + fn trait_fn_mut<'a>(i: &'a i32) -> impl FnMut() -> &'a i32 { + move || i + } + fn trait_fn_once<'a>(i: &'a i32) -> impl FnOnce() -> &'a i32 { + move || i + } + + // don't lint + fn impl_trait_in_input_position<'a>(f: impl Fn() -> &'a i32) -> &'a i32 { + f() + } + fn impl_trait_in_output_position<'a>(i: &'a i32) -> impl Fn() -> &'a i32 { + move || i + } + // lint + fn impl_trait_elidable_nested_named_lifetimes<'a>(i: &'a i32, f: impl for<'b> Fn(&'b i32) -> &'b i32) -> &'a i32 { + f(i) + } + fn impl_trait_elidable_nested_anonymous_lifetimes(i: &i32, f: impl Fn(&i32) -> &i32) -> &i32 { + f(i) + } + + // don't lint + fn generics_not_elidable<'a, T: Fn() -> &'a i32>(f: T) -> &'a i32 { + f() + } + // lint + fn generics_elidable &i32>(i: &i32, f: T) -> &i32 { + f(i) + } + + // don't lint + fn where_clause_not_elidable<'a, T>(f: T) -> &'a i32 + where + T: Fn() -> &'a i32, + { + f() + } + // lint + fn where_clause_elidadable(i: &i32, f: T) -> &i32 + where + T: Fn(&i32) -> &i32, + { + f(i) + } + + // don't lint + fn pointer_fn_in_input_position<'a>(f: fn(&'a i32) -> &'a i32, i: &'a i32) -> &'a i32 { + f(i) + } + fn pointer_fn_in_output_position<'a>(_: &'a i32) -> fn(&'a i32) -> &'a i32 { + |i| i + } + // lint + fn pointer_fn_elidable(i: &i32, f: fn(&i32) -> &i32) -> &i32 { + f(i) + } + + // don't lint + fn nested_fn_pointer_1<'a>(_: &'a i32) -> fn(fn(&'a i32) -> &'a i32) -> i32 { + |f| 42 + } + fn nested_fn_pointer_2<'a>(_: &'a i32) -> impl Fn(fn(&'a i32)) { + |f| () + } + + // lint + fn nested_fn_pointer_3(_: &i32) -> fn(fn(&i32) -> &i32) -> i32 { + |f| 42 + } + fn nested_fn_pointer_4(_: &i32) -> impl Fn(fn(&i32)) { + |f| () + } +} + +mod issue6159 { + use std::ops::Deref; + pub fn apply_deref<'a, T, F, R>(x: &'a T, f: F) -> R + where + T: Deref, + F: FnOnce(&'a T::Target) -> R, + { + f(x.deref()) + } +} + +mod issue7296 { + use std::rc::Rc; + use std::sync::Arc; + + struct Foo; + impl Foo { + fn implicit(&self) -> &() { + &() + } + fn implicit_mut(&mut self) -> &() { + &() + } + + fn explicit<'a>(self: &'a Arc) -> &'a () { + &() + } + fn explicit_mut<'a>(self: &'a mut Rc) -> &'a () { + &() + } + + fn lifetime_elsewhere(self: Box, here: &()) -> &() { + &() + } + } + + trait Bar { + fn implicit(&self) -> &(); + fn implicit_provided(&self) -> &() { + &() + } + + fn explicit<'a>(self: &'a Arc) -> &'a (); + fn explicit_provided<'a>(self: &'a Arc) -> &'a () { + &() + } + + fn lifetime_elsewhere(self: Box, here: &()) -> &(); + fn lifetime_elsewhere_provided(self: Box, here: &()) -> &() { + &() + } + } +} + +mod pr_9743_false_negative_fix { + #![allow(unused)] + + fn foo(x: &u8, y: &'_ u8) {} + + fn bar(x: &u8, y: &'_ u8, z: &'_ u8) {} +} + +mod pr_9743_output_lifetime_checks { + #![allow(unused)] + + // lint: only one input + fn one_input(x: &u8) -> &u8 { + unimplemented!() + } + + // lint: multiple inputs, output would not be elided + fn multiple_inputs_output_not_elided<'b>(x: &u8, y: &'b u8, z: &'b u8) -> &'b u8 { + unimplemented!() + } + + // don't lint: multiple inputs, output would be elided (which would create an ambiguity) + fn multiple_inputs_output_would_be_elided<'a, 'b>(x: &'a u8, y: &'b u8, z: &'b u8) -> &'a u8 { + unimplemented!() + } +} + +mod in_macro { + macro_rules! local_one_input_macro { + () => { + fn one_input(x: &u8) -> &u8 { + unimplemented!() + } + }; + } + + // lint local macro expands to function with needless lifetimes + local_one_input_macro!(); + + // no lint on external macro + macro_rules::needless_lifetime!(); +} + +mod issue5787 { + use std::sync::MutexGuard; + + struct Foo; + + impl Foo { + // doesn't get linted without async + pub async fn wait<'a, T>(&self, guard: MutexGuard<'a, T>) -> MutexGuard<'a, T> { + guard + } + } + + async fn foo<'a>(_x: &i32, y: &'a str) -> &'a str { + y + } +} + +fn main() {} diff --git a/tests/ui/needless_lifetimes.rs b/tests/ui/needless_lifetimes.rs index 78493c6d06726..5d4dc971b8d28 100644 --- a/tests/ui/needless_lifetimes.rs +++ b/tests/ui/needless_lifetimes.rs @@ -1,7 +1,9 @@ +// run-rustfix // aux-build:macro_rules.rs + #![warn(clippy::needless_lifetimes)] #![allow( - dead_code, + unused, clippy::boxed_local, clippy::needless_pass_by_value, clippy::unnecessary_wraps, @@ -515,4 +517,21 @@ mod in_macro { macro_rules::needless_lifetime!(); } +mod issue5787 { + use std::sync::MutexGuard; + + struct Foo; + + impl Foo { + // doesn't get linted without async + pub async fn wait<'a, T>(&self, guard: MutexGuard<'a, T>) -> MutexGuard<'a, T> { + guard + } + } + + async fn foo<'a>(_x: &i32, y: &'a str) -> &'a str { + y + } +} + fn main() {} diff --git a/tests/ui/needless_lifetimes.stderr b/tests/ui/needless_lifetimes.stderr index 9d02626956e0f..afe637ac38887 100644 --- a/tests/ui/needless_lifetimes.stderr +++ b/tests/ui/needless_lifetimes.stderr @@ -1,319 +1,546 @@ error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:15:1 + --> $DIR/needless_lifetimes.rs:17:1 | LL | fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D clippy::needless-lifetimes` implied by `-D warnings` +help: elide the lifetimes + | +LL - fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {} +LL + fn distinct_lifetimes(_x: &u8, _y: &u8, _z: u8) {} + | error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:17:1 + --> $DIR/needless_lifetimes.rs:19:1 | LL | fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {} +LL + fn distinct_and_static(_x: &u8, _y: &u8, _z: &'static u8) {} + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:27:1 + --> $DIR/needless_lifetimes.rs:29:1 | LL | fn in_and_out<'a>(x: &'a u8, _y: u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn in_and_out<'a>(x: &'a u8, _y: u8) -> &'a u8 { +LL + fn in_and_out(x: &u8, _y: u8) -> &u8 { + | error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:39:1 + --> $DIR/needless_lifetimes.rs:41:1 | LL | fn multiple_in_and_out_2a<'a, 'b>(x: &'a u8, _y: &'b u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn multiple_in_and_out_2a<'a, 'b>(x: &'a u8, _y: &'b u8) -> &'a u8 { +LL + fn multiple_in_and_out_2a<'a>(x: &'a u8, _y: &u8) -> &'a u8 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:46:1 + --> $DIR/needless_lifetimes.rs:48:1 | LL | fn multiple_in_and_out_2b<'a, 'b>(_x: &'a u8, y: &'b u8) -> &'b u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn multiple_in_and_out_2b<'a, 'b>(_x: &'a u8, y: &'b u8) -> &'b u8 { +LL + fn multiple_in_and_out_2b<'b>(_x: &u8, y: &'b u8) -> &'b u8 { + | error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:63:1 + --> $DIR/needless_lifetimes.rs:65:1 | LL | fn deep_reference_1a<'a, 'b>(x: &'a u8, _y: &'b u8) -> Result<&'a u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn deep_reference_1a<'a, 'b>(x: &'a u8, _y: &'b u8) -> Result<&'a u8, ()> { +LL + fn deep_reference_1a<'a>(x: &'a u8, _y: &u8) -> Result<&'a u8, ()> { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:70:1 + --> $DIR/needless_lifetimes.rs:72:1 | LL | fn deep_reference_1b<'a, 'b>(_x: &'a u8, y: &'b u8) -> Result<&'b u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn deep_reference_1b<'a, 'b>(_x: &'a u8, y: &'b u8) -> Result<&'b u8, ()> { +LL + fn deep_reference_1b<'b>(_x: &u8, y: &'b u8) -> Result<&'b u8, ()> { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:79:1 + --> $DIR/needless_lifetimes.rs:81:1 | LL | fn deep_reference_3<'a>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn deep_reference_3<'a>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> { +LL + fn deep_reference_3(x: &u8, _y: u8) -> Result<&u8, ()> { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:84:1 + --> $DIR/needless_lifetimes.rs:86:1 | LL | fn where_clause_without_lt<'a, T>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn where_clause_without_lt<'a, T>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> +LL + fn where_clause_without_lt(x: &u8, _y: u8) -> Result<&u8, ()> + | error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:96:1 + --> $DIR/needless_lifetimes.rs:98:1 | LL | fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:96:37 +help: elide the lifetimes + | +LL - fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {} +LL + fn lifetime_param_2(_x: Ref<'_>, _y: &u8) {} | -LL | fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {} - | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:120:1 + --> $DIR/needless_lifetimes.rs:122:1 | LL | fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:120:32 +help: elide the lifetimes + | +LL - fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> +LL + fn fn_bound_2(_m: Lt<'_, I>, _f: F) -> Lt<'_, I> | -LL | fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> - | ^^ error: the following explicit lifetimes could be elided: 's - --> $DIR/needless_lifetimes.rs:150:5 + --> $DIR/needless_lifetimes.rs:152:5 | LL | fn self_and_out<'s>(&'s self) -> &'s u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn self_and_out<'s>(&'s self) -> &'s u8 { +LL + fn self_and_out(&self) -> &u8 { + | error: the following explicit lifetimes could be elided: 't - --> $DIR/needless_lifetimes.rs:157:5 + --> $DIR/needless_lifetimes.rs:159:5 | LL | fn self_and_in_out_1<'s, 't>(&'s self, _x: &'t u8) -> &'s u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn self_and_in_out_1<'s, 't>(&'s self, _x: &'t u8) -> &'s u8 { +LL + fn self_and_in_out_1<'s>(&'s self, _x: &u8) -> &'s u8 { + | error: the following explicit lifetimes could be elided: 's - --> $DIR/needless_lifetimes.rs:164:5 + --> $DIR/needless_lifetimes.rs:166:5 | LL | fn self_and_in_out_2<'s, 't>(&'s self, x: &'t u8) -> &'t u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn self_and_in_out_2<'s, 't>(&'s self, x: &'t u8) -> &'t u8 { +LL + fn self_and_in_out_2<'t>(&self, x: &'t u8) -> &'t u8 { + | error: the following explicit lifetimes could be elided: 's, 't - --> $DIR/needless_lifetimes.rs:168:5 + --> $DIR/needless_lifetimes.rs:170:5 | LL | fn distinct_self_and_in<'s, 't>(&'s self, _x: &'t u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn distinct_self_and_in<'s, 't>(&'s self, _x: &'t u8) {} +LL + fn distinct_self_and_in(&self, _x: &u8) {} + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:187:1 + --> $DIR/needless_lifetimes.rs:189:1 | LL | fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:187:33 +help: elide the lifetimes + | +LL - fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str { +LL + fn struct_with_lt(_foo: Foo<'_>) -> &str { | -LL | fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str { - | ^^ error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:205:1 + --> $DIR/needless_lifetimes.rs:207:1 | LL | fn struct_with_lt4a<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:205:43 +help: elide the lifetimes + | +LL - fn struct_with_lt4a<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str { +LL + fn struct_with_lt4a<'a>(_foo: &'a Foo<'_>) -> &'a str { | -LL | fn struct_with_lt4a<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str { - | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:213:1 + --> $DIR/needless_lifetimes.rs:215:1 | LL | fn struct_with_lt4b<'a, 'b>(_foo: &'a Foo<'b>) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn struct_with_lt4b<'a, 'b>(_foo: &'a Foo<'b>) -> &'b str { +LL + fn struct_with_lt4b<'b>(_foo: &Foo<'b>) -> &'b str { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:228:1 + --> $DIR/needless_lifetimes.rs:230:1 | LL | fn trait_obj_elided2<'a>(_arg: &'a dyn Drop) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn trait_obj_elided2<'a>(_arg: &'a dyn Drop) -> &'a str { +LL + fn trait_obj_elided2(_arg: &dyn Drop) -> &str { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:234:1 + --> $DIR/needless_lifetimes.rs:236:1 | LL | fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:234:37 +help: elide the lifetimes + | +LL - fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str { +LL + fn alias_with_lt(_foo: FooAlias<'_>) -> &str { | -LL | fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str { - | ^^ error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:252:1 + --> $DIR/needless_lifetimes.rs:254:1 | LL | fn alias_with_lt4a<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:252:47 +help: elide the lifetimes + | +LL - fn alias_with_lt4a<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str { +LL + fn alias_with_lt4a<'a>(_foo: &'a FooAlias<'_>) -> &'a str { | -LL | fn alias_with_lt4a<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str { - | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:260:1 + --> $DIR/needless_lifetimes.rs:262:1 | LL | fn alias_with_lt4b<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn alias_with_lt4b<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'b str { +LL + fn alias_with_lt4b<'b>(_foo: &FooAlias<'b>) -> &'b str { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:264:1 + --> $DIR/needless_lifetimes.rs:266:1 | LL | fn named_input_elided_output<'a>(_arg: &'a str) -> &str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn named_input_elided_output<'a>(_arg: &'a str) -> &str { +LL + fn named_input_elided_output(_arg: &str) -> &str { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:272:1 + --> $DIR/needless_lifetimes.rs:274:1 | LL | fn trait_bound_ok<'a, T: WithLifetime<'static>>(_: &'a u8, _: T) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn trait_bound_ok<'a, T: WithLifetime<'static>>(_: &'a u8, _: T) { +LL + fn trait_bound_ok>(_: &u8, _: T) { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:308:1 + --> $DIR/needless_lifetimes.rs:310:1 | LL | fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: replace with `'_` in generic arguments such as here - --> $DIR/needless_lifetimes.rs:308:47 +help: elide the lifetimes + | +LL - fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> { +LL + fn out_return_type_lts(e: &str) -> Cow<'_> { | -LL | fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> { - | ^^ error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:315:9 + --> $DIR/needless_lifetimes.rs:317:9 | LL | fn needless_lt<'a>(x: &'a u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn needless_lt<'a>(x: &'a u8) {} +LL + fn needless_lt(x: &u8) {} + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:319:9 + --> $DIR/needless_lifetimes.rs:321:9 | LL | fn needless_lt<'a>(_x: &'a u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn needless_lt<'a>(_x: &'a u8) {} +LL + fn needless_lt(_x: &u8) {} + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:332:9 + --> $DIR/needless_lifetimes.rs:334:9 | LL | fn baz<'a>(&'a self) -> impl Foo + 'a { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn baz<'a>(&'a self) -> impl Foo + 'a { +LL + fn baz(&self) -> impl Foo + '_ { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:364:5 + --> $DIR/needless_lifetimes.rs:366:5 | LL | fn impl_trait_elidable_nested_anonymous_lifetimes<'a>(i: &'a i32, f: impl Fn(&i32) -> &i32) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn impl_trait_elidable_nested_anonymous_lifetimes<'a>(i: &'a i32, f: impl Fn(&i32) -> &i32) -> &'a i32 { +LL + fn impl_trait_elidable_nested_anonymous_lifetimes(i: &i32, f: impl Fn(&i32) -> &i32) -> &i32 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:373:5 + --> $DIR/needless_lifetimes.rs:375:5 | LL | fn generics_elidable<'a, T: Fn(&i32) -> &i32>(i: &'a i32, f: T) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn generics_elidable<'a, T: Fn(&i32) -> &i32>(i: &'a i32, f: T) -> &'a i32 { +LL + fn generics_elidable &i32>(i: &i32, f: T) -> &i32 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:385:5 + --> $DIR/needless_lifetimes.rs:387:5 | LL | fn where_clause_elidadable<'a, T>(i: &'a i32, f: T) -> &'a i32 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn where_clause_elidadable<'a, T>(i: &'a i32, f: T) -> &'a i32 +LL + fn where_clause_elidadable(i: &i32, f: T) -> &i32 + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:400:5 + --> $DIR/needless_lifetimes.rs:402:5 | LL | fn pointer_fn_elidable<'a>(i: &'a i32, f: fn(&i32) -> &i32) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn pointer_fn_elidable<'a>(i: &'a i32, f: fn(&i32) -> &i32) -> &'a i32 { +LL + fn pointer_fn_elidable(i: &i32, f: fn(&i32) -> &i32) -> &i32 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:413:5 + --> $DIR/needless_lifetimes.rs:415:5 | LL | fn nested_fn_pointer_3<'a>(_: &'a i32) -> fn(fn(&i32) -> &i32) -> i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn nested_fn_pointer_3<'a>(_: &'a i32) -> fn(fn(&i32) -> &i32) -> i32 { +LL + fn nested_fn_pointer_3(_: &i32) -> fn(fn(&i32) -> &i32) -> i32 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:416:5 + --> $DIR/needless_lifetimes.rs:418:5 | LL | fn nested_fn_pointer_4<'a>(_: &'a i32) -> impl Fn(fn(&i32)) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn nested_fn_pointer_4<'a>(_: &'a i32) -> impl Fn(fn(&i32)) { +LL + fn nested_fn_pointer_4(_: &i32) -> impl Fn(fn(&i32)) { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:438:9 + --> $DIR/needless_lifetimes.rs:440:9 | LL | fn implicit<'a>(&'a self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn implicit<'a>(&'a self) -> &'a () { +LL + fn implicit(&self) -> &() { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:441:9 + --> $DIR/needless_lifetimes.rs:443:9 | LL | fn implicit_mut<'a>(&'a mut self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn implicit_mut<'a>(&'a mut self) -> &'a () { +LL + fn implicit_mut(&mut self) -> &() { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:452:9 + --> $DIR/needless_lifetimes.rs:454:9 | LL | fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a () { +LL + fn lifetime_elsewhere(self: Box, here: &()) -> &() { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:458:9 + --> $DIR/needless_lifetimes.rs:460:9 | LL | fn implicit<'a>(&'a self) -> &'a (); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn implicit<'a>(&'a self) -> &'a (); +LL + fn implicit(&self) -> &(); + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:459:9 + --> $DIR/needless_lifetimes.rs:461:9 | LL | fn implicit_provided<'a>(&'a self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn implicit_provided<'a>(&'a self) -> &'a () { +LL + fn implicit_provided(&self) -> &() { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:468:9 + --> $DIR/needless_lifetimes.rs:470:9 | LL | fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a (); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a (); +LL + fn lifetime_elsewhere(self: Box, here: &()) -> &(); + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:469:9 + --> $DIR/needless_lifetimes.rs:471:9 | LL | fn lifetime_elsewhere_provided<'a>(self: Box, here: &'a ()) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn lifetime_elsewhere_provided<'a>(self: Box, here: &'a ()) -> &'a () { +LL + fn lifetime_elsewhere_provided(self: Box, here: &()) -> &() { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:478:5 + --> $DIR/needless_lifetimes.rs:480:5 | LL | fn foo<'a>(x: &'a u8, y: &'_ u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn foo<'a>(x: &'a u8, y: &'_ u8) {} +LL + fn foo(x: &u8, y: &'_ u8) {} + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:480:5 + --> $DIR/needless_lifetimes.rs:482:5 | LL | fn bar<'a>(x: &'a u8, y: &'_ u8, z: &'_ u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn bar<'a>(x: &'a u8, y: &'_ u8, z: &'_ u8) {} +LL + fn bar(x: &u8, y: &'_ u8, z: &'_ u8) {} + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:487:5 + --> $DIR/needless_lifetimes.rs:489:5 | LL | fn one_input<'a>(x: &'a u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn one_input<'a>(x: &'a u8) -> &'a u8 { +LL + fn one_input(x: &u8) -> &u8 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:492:5 + --> $DIR/needless_lifetimes.rs:494:5 | LL | fn multiple_inputs_output_not_elided<'a, 'b>(x: &'a u8, y: &'b u8, z: &'b u8) -> &'b u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: elide the lifetimes + | +LL - fn multiple_inputs_output_not_elided<'a, 'b>(x: &'a u8, y: &'b u8, z: &'b u8) -> &'b u8 { +LL + fn multiple_inputs_output_not_elided<'b>(x: &u8, y: &'b u8, z: &'b u8) -> &'b u8 { + | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:505:13 + --> $DIR/needless_lifetimes.rs:507:13 | LL | fn one_input<'a>(x: &'a u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -322,6 +549,11 @@ LL | local_one_input_macro!(); | ------------------------ in this macro invocation | = note: this error originates in the macro `local_one_input_macro` (in Nightly builds, run with -Z macro-backtrace for more info) +help: elide the lifetimes + | +LL - fn one_input<'a>(x: &'a u8) -> &'a u8 { +LL + fn one_input(x: &u8) -> &u8 { + | error: aborting due to 46 previous errors From 56f81ebc3e52ac7e6dd8a3359e290d8765975edc Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 31 Jan 2023 15:43:47 +0100 Subject: [PATCH 224/501] Lazily create the trace output channel --- editors/code/src/ctx.ts | 6 ++---- editors/code/src/util.ts | 46 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 8b04182155dfe..e2a30e0cc4544 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -4,7 +4,7 @@ import * as ra from "./lsp_ext"; import { Config, substituteVSCodeVariables } from "./config"; import { createClient } from "./client"; -import { isRustDocument, isRustEditor, log, RustEditor } from "./util"; +import { isRustDocument, isRustEditor, LazyOutputChannel, log, RustEditor } from "./util"; import { ServerStatusParams } from "./lsp_ext"; import { PersistentState } from "./persistent_state"; import { bootstrap } from "./bootstrap"; @@ -128,9 +128,7 @@ export class Ctx { } if (!this.traceOutputChannel) { - this.traceOutputChannel = vscode.window.createOutputChannel( - "Rust Analyzer Language Server Trace" - ); + this.traceOutputChannel = new LazyOutputChannel("Rust Analyzer Language Server Trace"); this.pushExtCleanup(this.traceOutputChannel); } if (!this.outputChannel) { diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts index a92c90f7ff43f..33d42986ad9e4 100644 --- a/editors/code/src/util.ts +++ b/editors/code/src/util.ts @@ -166,3 +166,49 @@ export function execute(command: string, options: ExecOptions): Promise }); }); } + +export class LazyOutputChannel implements vscode.OutputChannel { + constructor(name: string) { + this.name = name; + } + + name: string; + _channel: vscode.OutputChannel | undefined; + + get channel(): vscode.OutputChannel { + if (!this._channel) { + this._channel = vscode.window.createOutputChannel(this.name); + } + return this._channel; + } + + append(value: string): void { + this.channel.append(value); + } + appendLine(value: string): void { + this.channel.appendLine(value); + } + replace(value: string): void { + this.channel.replace(value); + } + clear(): void { + if (this._channel) { + this._channel.clear(); + } + } + show(preserveFocus?: boolean): void; + show(column?: vscode.ViewColumn, preserveFocus?: boolean): void; + show(column?: vscode.ViewColumn, preserveFocus?: boolean): void { + this.channel.show(column, preserveFocus); + } + hide(): void { + if (this._channel) { + this._channel.hide(); + } + } + dispose(): void { + if (this._channel) { + this._channel.dispose(); + } + } +} From 5b1187a04654d3d95926265e3d9762f695b31c1d Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Tue, 31 Jan 2023 15:49:39 +0100 Subject: [PATCH 225/501] =?UTF-8?q?Remove=20some=20types=20=C2=AF\=5F(?= =?UTF-8?q?=E3=83=84)=5F/=C2=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- editors/code/src/util.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts index 33d42986ad9e4..d93b9caeb1648 100644 --- a/editors/code/src/util.ts +++ b/editors/code/src/util.ts @@ -198,7 +198,7 @@ export class LazyOutputChannel implements vscode.OutputChannel { } show(preserveFocus?: boolean): void; show(column?: vscode.ViewColumn, preserveFocus?: boolean): void; - show(column?: vscode.ViewColumn, preserveFocus?: boolean): void { + show(column?: any, preserveFocus?: any): void { this.channel.show(column, preserveFocus); } hide(): void { From 0bf0d937b83ffa8db70c7fb9d7a0935bc56f3b70 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Tue, 31 Jan 2023 20:23:38 +0330 Subject: [PATCH 226/501] unsize cast array only on pointer type --- crates/hir-ty/src/autoderef.rs | 2 ++ crates/hir-ty/src/method_resolution.rs | 32 ++++++++++++-------- crates/hir-ty/src/tests/method_resolution.rs | 3 +- crates/test-utils/src/minicore.rs | 18 +++++++++++ 4 files changed, 40 insertions(+), 15 deletions(-) diff --git a/crates/hir-ty/src/autoderef.rs b/crates/hir-ty/src/autoderef.rs index caddca6d9152e..58744dd0c0f98 100644 --- a/crates/hir-ty/src/autoderef.rs +++ b/crates/hir-ty/src/autoderef.rs @@ -17,11 +17,13 @@ use crate::{ static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10); +#[derive(Debug)] pub(crate) enum AutoderefKind { Builtin, Overloaded, } +#[derive(Debug)] pub(crate) struct Autoderef<'a, 'db> { pub(crate) table: &'a mut InferenceTable<'db>, ty: Ty, diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 858de0005962c..8c7714b9a697b 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -5,7 +5,7 @@ use std::{ops::ControlFlow, sync::Arc}; use base_db::{CrateId, Edition}; -use chalk_ir::{cast::Cast, Mutability, UniverseIndex}; +use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex}; use hir_def::{ data::ImplData, item_scope::ItemScope, lang_item::LangItem, nameres::DefMap, AssocItemId, BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, @@ -25,7 +25,7 @@ use crate::{ static_lifetime, to_chalk_trait_id, utils::all_super_traits, AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner, - Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind, + Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, }; /// This is used as a key for indexing impls. @@ -588,25 +588,31 @@ impl ReceiverAdjustments { } } } + if let Some(m) = self.autoref { + ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner); + adjust + .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() }); + } if self.unsize_array { - ty = match ty.kind(Interner) { - TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner), - _ => { - never!("unsize_array with non-array {:?}", ty); - ty + ty = 'x: { + if let TyKind::Ref(m, l, inner) = ty.kind(Interner) { + if let TyKind::Array(inner, _) = inner.kind(Interner) { + break 'x TyKind::Ref( + m.clone(), + l.clone(), + TyKind::Slice(inner.clone()).intern(Interner), + ) + .intern(Interner); + } } + never!("unsize_array with non-reference-to-array {:?}", ty); + ty }; - // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference adjust.push(Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target: ty.clone(), }); } - if let Some(m) = self.autoref { - ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner); - adjust - .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() }); - } (ty, adjust) } diff --git a/crates/hir-ty/src/tests/method_resolution.rs b/crates/hir-ty/src/tests/method_resolution.rs index 616ca8058e9df..42fb685abf907 100644 --- a/crates/hir-ty/src/tests/method_resolution.rs +++ b/crates/hir-ty/src/tests/method_resolution.rs @@ -1725,14 +1725,13 @@ fn test() { #[test] fn receiver_adjustment_unsize_array() { - // FIXME not quite correct check( r#" //- minicore: slice fn test() { let a = [1, 2, 3]; a.len(); -} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not)) +} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize) "#, ); } diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index dff60914409a5..5634bafd062f2 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -273,6 +273,24 @@ pub mod ops { } } + impl Index for [T; N] + where + I: SliceIndex<[T]>, + { + type Output = I::Output; + fn index(&self, index: I) -> &I::Output { + loop {} + } + } + impl IndexMut for [T; N] + where + I: SliceIndex<[T]>, + { + fn index_mut(&mut self, index: I) -> &mut I::Output { + loop {} + } + } + pub unsafe trait SliceIndex { type Output: ?Sized; } From 3a20cbf9fc1e3049da8dcc64e572dfb4ff32dd8e Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Mon, 30 Jan 2023 15:55:47 -0700 Subject: [PATCH 227/501] rustdoc: remove inconsistently-present sidebar tooltips Discussed in https://rust-lang.zulipchat.com/#narrow/stream/266220-rustdoc/topic/Inconsistent.20sidebar.20tooltips/near/323565625 --- src/librustdoc/html/render/context.rs | 9 +++------ src/librustdoc/html/render/mod.rs | 3 --- src/librustdoc/html/static/js/main.js | 6 +----- tests/rustdoc/markdown-summaries.rs | 27 --------------------------- 4 files changed, 4 insertions(+), 41 deletions(-) delete mode 100644 tests/rustdoc/markdown-summaries.rs diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index b59645ec2e2d5..1216a8d71c8cf 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -18,7 +18,7 @@ use super::search_index::build_index; use super::write_shared::write_shared; use super::{ collect_spans_and_sources, print_sidebar, scrape_examples_help, sidebar_module_like, AllTypes, - LinkFromSrc, NameDoc, StylePath, + LinkFromSrc, StylePath, }; use crate::clean::{self, types::ExternalLocation, ExternalCrate}; @@ -256,7 +256,7 @@ impl<'tcx> Context<'tcx> { } /// Construct a map of items shown in the sidebar to a plain-text summary of their docs. - fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap> { + fn build_sidebar_items(&self, m: &clean::Module) -> BTreeMap> { // BTreeMap instead of HashMap to get a sorted output let mut map: BTreeMap<_, Vec<_>> = BTreeMap::new(); let mut inserted: FxHashMap> = FxHashMap::default(); @@ -274,10 +274,7 @@ impl<'tcx> Context<'tcx> { if inserted.entry(short).or_default().insert(myname) { let short = short.to_string(); let myname = myname.to_string(); - map.entry(short).or_default().push(( - myname, - Some(item.doc_value().map_or_else(String::new, |s| plain_text_summary(&s))), - )); + map.entry(short).or_default().push(myname); } } diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 816a8f4e274ce..deebab1f0e80e 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -83,9 +83,6 @@ use crate::scrape_examples::{CallData, CallLocation}; use crate::try_none; use crate::DOC_RUST_LANG_ORG_CHANNEL; -/// A pair of name and its optional document. -pub(crate) type NameDoc = (String, Option); - pub(crate) fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ { crate::html::format::display_fn(move |f| { if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { f.write_str(v) } diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index b9ad8ef70e917..b5cbc484601ca 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -456,10 +456,7 @@ function loadCss(cssUrl) { const ul = document.createElement("ul"); ul.className = "block " + shortty; - for (const item of filtered) { - const name = item[0]; - const desc = item[1]; // can be null - + for (const name of filtered) { let path; if (shortty === "mod") { path = name + "/index.html"; @@ -469,7 +466,6 @@ function loadCss(cssUrl) { const current_page = document.location.href.split("/").pop(); const link = document.createElement("a"); link.href = path; - link.title = desc; if (path === current_page) { link.className = "current"; } diff --git a/tests/rustdoc/markdown-summaries.rs b/tests/rustdoc/markdown-summaries.rs deleted file mode 100644 index 31e7072b5ce9b..0000000000000 --- a/tests/rustdoc/markdown-summaries.rs +++ /dev/null @@ -1,27 +0,0 @@ -#![crate_type = "lib"] -#![crate_name = "summaries"] - -//! This *summary* has a [link] and `code`. -//! -//! This is the second paragraph. -//! -//! [link]: https://example.com - -// @hasraw search-index.js 'This summary has a link and code.' -// @!hasraw - 'second paragraph' - -/// This `code` will be rendered in a code tag. -/// -/// This text should not be rendered. -pub struct Sidebar; - -// @hasraw search-index.js 'This code will be rendered in a code tag.' -// @hasraw summaries/sidebar-items.js 'This `code` will be rendered in a code tag.' -// @!hasraw - 'text should not be rendered' - -/// ```text -/// this block should not be rendered -/// ``` -pub struct Sidebar2; - -// @!hasraw summaries/sidebar-items.js 'block should not be rendered' From 572e32b4cd64ddb946a1203652e1aa6d440aa2f5 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Tue, 31 Jan 2023 17:39:19 +0000 Subject: [PATCH 228/501] Fix rustc test suite --- scripts/test_rustc_tests.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/test_rustc_tests.sh b/scripts/test_rustc_tests.sh index 07c9ae6ee9ff2..e14a129dbc2d0 100755 --- a/scripts/test_rustc_tests.sh +++ b/scripts/test_rustc_tests.sh @@ -91,6 +91,7 @@ rm tests/ui/proc-macro/proc-macro-deprecated-attr.rs # same rm tests/ui/proc-macro/quote-debug.rs # same rm tests/ui/proc-macro/no-missing-docs.rs # same rm tests/ui/rust-2018/proc-macro-crate-in-paths.rs # same +rm tests/ui/proc-macro/allowed-signatures.rs # same # doesn't work due to the way the rustc test suite is invoked. # should work when using ./x.py test the way it is intended From d1024b6560c0b75b1347841d7ccb0dc36e0dffdd Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Tue, 31 Jan 2023 17:44:29 +0000 Subject: [PATCH 229/501] Fix y.rs bench --- build_system/bench.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/build_system/bench.rs b/build_system/bench.rs index f48f7bece0282..196e2fd9a7d6d 100644 --- a/build_system/bench.rs +++ b/build_system/bench.rs @@ -32,12 +32,14 @@ fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { std::process::exit(1); } - SIMPLE_RAYTRACER_REPO.fetch(dirs); - spawn_and_wait(SIMPLE_RAYTRACER.fetch( - &bootstrap_host_compiler.cargo, - &bootstrap_host_compiler.rustc, - dirs, - )); + if !SIMPLE_RAYTRACER_REPO.source_dir().to_path(dirs).exists() { + SIMPLE_RAYTRACER_REPO.fetch(dirs); + spawn_and_wait(SIMPLE_RAYTRACER.fetch( + &bootstrap_host_compiler.cargo, + &bootstrap_host_compiler.rustc, + dirs, + )); + } eprintln!("[LLVM BUILD] simple-raytracer"); let build_cmd = SIMPLE_RAYTRACER_LLVM.build(bootstrap_host_compiler, dirs); @@ -64,17 +66,17 @@ fn benchmark_simple_raytracer(dirs: &Dirs, bootstrap_host_compiler: &Compiler) { let target_dir = SIMPLE_RAYTRACER.target_dir(dirs); let clean_cmd = format!( - "cargo clean --manifest-path {manifest_path} --target-dir {target_dir}", + "RUSTC=rustc cargo clean --manifest-path {manifest_path} --target-dir {target_dir}", manifest_path = manifest_path.display(), target_dir = target_dir.display(), ); let llvm_build_cmd = format!( - "cargo build --manifest-path {manifest_path} --target-dir {target_dir}", + "RUSTC=rustc cargo build --manifest-path {manifest_path} --target-dir {target_dir}", manifest_path = manifest_path.display(), target_dir = target_dir.display(), ); let clif_build_cmd = format!( - "{cargo_clif} build --manifest-path {manifest_path} --target-dir {target_dir}", + "RUSTC=rustc {cargo_clif} build --manifest-path {manifest_path} --target-dir {target_dir}", cargo_clif = cargo_clif.display(), manifest_path = manifest_path.display(), target_dir = target_dir.display(), From 79f4cc0822578199fa9ffee6f0abf909b8976ab9 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Tue, 31 Jan 2023 17:49:08 +0000 Subject: [PATCH 230/501] Run y.rs bench in CI --- .github/workflows/main.yml | 42 ++++++++++++++++++++++++++++++++++++++ build_system/bench.rs | 11 ++++------ 2 files changed, 46 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c0daf69e98e91..6e35d1e1dc260 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -185,3 +185,45 @@ jobs: env: TARGET_TRIPLE: ${{ matrix.env.TARGET_TRIPLE }} run: ./y.rs abi-cafe + + bench: + runs-on: ubuntu-latest + timeout-minutes: 60 + + defaults: + run: + shell: bash + + steps: + - uses: actions/checkout@v3 + + - name: Cache cargo target dir + uses: actions/cache@v3 + with: + path: build/cg_clif + key: ${{ runner.os }}-x86_64-unknown-linux-gnu-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }} + + - name: Cache cargo bin dir + uses: actions/cache@v3 + with: + path: ~/.cargo/bin + key: ${{ runner.os }}-${{ matrix.env.TARGET_TRIPLE }}-cargo-bin-dir-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }} + + - name: Use sparse cargo registry + run: | + cat >> ~/.cargo/config.toml < Date: Tue, 31 Jan 2023 23:23:37 +0100 Subject: [PATCH 231/501] update some dependencies --- Cargo.toml | 2 +- clippy_dev/Cargo.toml | 2 +- clippy_dev/src/main.rs | 99 +++++++++++++++++++++++------------------ clippy_lints/Cargo.toml | 2 +- lintcheck/Cargo.toml | 4 +- 5 files changed, 60 insertions(+), 49 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index dc94b1045249e..70d1268090f6e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,7 +42,7 @@ filetime = "0.2" rustc-workspace-hack = "1.0" # UI test dependencies -clap = { version = "3.1", features = ["derive"] } +clap = { version = "4.1.4", features = ["derive"] } clippy_utils = { path = "clippy_utils" } derive-new = "0.5" if_chain = "1.0" diff --git a/clippy_dev/Cargo.toml b/clippy_dev/Cargo.toml index 510c7e852af6e..c3f8a782d273a 100644 --- a/clippy_dev/Cargo.toml +++ b/clippy_dev/Cargo.toml @@ -5,7 +5,7 @@ edition = "2021" [dependencies] aho-corasick = "0.7" -clap = "3.2" +clap = "4.1.4" indoc = "1.0" itertools = "0.10.1" opener = "0.5" diff --git a/clippy_dev/src/main.rs b/clippy_dev/src/main.rs index d3e036692040f..b2d67a72fd2b8 100644 --- a/clippy_dev/src/main.rs +++ b/clippy_dev/src/main.rs @@ -2,7 +2,7 @@ // warn on lints, that are included in `rust-lang/rust`s bootstrap #![warn(rust_2018_idioms, unused_lifetimes)] -use clap::{Arg, ArgAction, ArgMatches, Command, PossibleValue}; +use clap::{Arg, ArgAction, ArgMatches, Command}; use clippy_dev::{bless, dogfood, fmt, lint, new_lint, serve, setup, update_lints}; use indoc::indoc; @@ -110,24 +110,37 @@ fn get_clap_config() -> ArgMatches { Command::new("bless").about("bless the test output changes").arg( Arg::new("ignore-timestamp") .long("ignore-timestamp") + .action(ArgAction::SetTrue) .help("Include files updated before clippy was built"), ), Command::new("dogfood").about("Runs the dogfood test").args([ - Arg::new("fix").long("fix").help("Apply the suggestions when possible"), + Arg::new("fix") + .long("fix") + .action(ArgAction::SetTrue) + .help("Apply the suggestions when possible"), Arg::new("allow-dirty") .long("allow-dirty") + .action(ArgAction::SetTrue) .help("Fix code even if the working directory has changes") .requires("fix"), Arg::new("allow-staged") .long("allow-staged") + .action(ArgAction::SetTrue) .help("Fix code even if the working directory has staged changes") .requires("fix"), ]), Command::new("fmt") .about("Run rustfmt on all projects and tests") .args([ - Arg::new("check").long("check").help("Use the rustfmt --check option"), - Arg::new("verbose").short('v').long("verbose").help("Echo commands run"), + Arg::new("check") + .long("check") + .action(ArgAction::SetTrue) + .help("Use the rustfmt --check option"), + Arg::new("verbose") + .short('v') + .long("verbose") + .action(ArgAction::SetTrue) + .help("Echo commands run"), ]), Command::new("update_lints") .about("Updates lint registration and information from the source code") @@ -140,13 +153,17 @@ fn get_clap_config() -> ArgMatches { * all lints are registered in the lint store", ) .args([ - Arg::new("print-only").long("print-only").help( - "Print a table of lints to STDOUT. \ - This does not include deprecated and internal lints. \ - (Does not modify any files)", - ), + Arg::new("print-only") + .long("print-only") + .action(ArgAction::SetTrue) + .help( + "Print a table of lints to STDOUT. \ + This does not include deprecated and internal lints. \ + (Does not modify any files)", + ), Arg::new("check") .long("check") + .action(ArgAction::SetTrue) .help("Checks that `cargo dev update_lints` has been run. Used on CI."), ]), Command::new("new_lint") @@ -156,15 +173,13 @@ fn get_clap_config() -> ArgMatches { .short('p') .long("pass") .help("Specify whether the lint runs during the early or late pass") - .takes_value(true) - .value_parser([PossibleValue::new("early"), PossibleValue::new("late")]) + .value_parser(["early", "late"]) .conflicts_with("type") .required_unless_present("type"), Arg::new("name") .short('n') .long("name") .help("Name of the new lint in snake case, ex: fn_too_long") - .takes_value(true) .required(true), Arg::new("category") .short('c') @@ -172,25 +187,23 @@ fn get_clap_config() -> ArgMatches { .help("What category the lint belongs to") .default_value("nursery") .value_parser([ - PossibleValue::new("style"), - PossibleValue::new("correctness"), - PossibleValue::new("suspicious"), - PossibleValue::new("complexity"), - PossibleValue::new("perf"), - PossibleValue::new("pedantic"), - PossibleValue::new("restriction"), - PossibleValue::new("cargo"), - PossibleValue::new("nursery"), - PossibleValue::new("internal"), - PossibleValue::new("internal_warn"), - ]) - .takes_value(true), - Arg::new("type") - .long("type") - .help("What directory the lint belongs in") - .takes_value(true) - .required(false), - Arg::new("msrv").long("msrv").help("Add MSRV config code to the lint"), + "style", + "correctness", + "suspicious", + "complexity", + "perf", + "pedantic", + "restriction", + "cargo", + "nursery", + "internal", + "internal_warn", + ]), + Arg::new("type").long("type").help("What directory the lint belongs in"), + Arg::new("msrv") + .long("msrv") + .action(ArgAction::SetTrue) + .help("Add MSRV config code to the lint"), ]), Command::new("setup") .about("Support for setting up your personal development environment") @@ -201,13 +214,12 @@ fn get_clap_config() -> ArgMatches { .args([ Arg::new("remove") .long("remove") - .help("Remove the dependencies added with 'cargo dev setup intellij'") - .required(false), + .action(ArgAction::SetTrue) + .help("Remove the dependencies added with 'cargo dev setup intellij'"), Arg::new("rustc-repo-path") .long("repo-path") .short('r') .help("The path to a rustc repo that will be used for setting the dependencies") - .takes_value(true) .value_name("path") .conflicts_with("remove") .required(true), @@ -217,26 +229,26 @@ fn get_clap_config() -> ArgMatches { .args([ Arg::new("remove") .long("remove") - .help("Remove the pre-commit hook added with 'cargo dev setup git-hook'") - .required(false), + .action(ArgAction::SetTrue) + .help("Remove the pre-commit hook added with 'cargo dev setup git-hook'"), Arg::new("force-override") .long("force-override") .short('f') - .help("Forces the override of an existing git pre-commit hook") - .required(false), + .action(ArgAction::SetTrue) + .help("Forces the override of an existing git pre-commit hook"), ]), Command::new("vscode-tasks") .about("Add several tasks to vscode for formatting, validation and testing") .args([ Arg::new("remove") .long("remove") - .help("Remove the tasks added with 'cargo dev setup vscode-tasks'") - .required(false), + .action(ArgAction::SetTrue) + .help("Remove the tasks added with 'cargo dev setup vscode-tasks'"), Arg::new("force-override") .long("force-override") .short('f') - .help("Forces the override of existing vscode tasks") - .required(false), + .action(ArgAction::SetTrue) + .help("Forces the override of existing vscode tasks"), ]), ]), Command::new("remove") @@ -295,6 +307,7 @@ fn get_clap_config() -> ArgMatches { .help("The new name of the lint"), Arg::new("uplift") .long("uplift") + .action(ArgAction::SetTrue) .help("This lint will be uplifted into rustc"), ]), Command::new("deprecate").about("Deprecates the given lint").args([ @@ -305,8 +318,6 @@ fn get_clap_config() -> ArgMatches { Arg::new("reason") .long("reason") .short('r') - .required(false) - .takes_value(true) .help("The reason for deprecation"), ]), ]) diff --git a/clippy_lints/Cargo.toml b/clippy_lints/Cargo.toml index 7278ad13d568a..989e4d3fa56c5 100644 --- a/clippy_lints/Cargo.toml +++ b/clippy_lints/Cargo.toml @@ -9,7 +9,7 @@ keywords = ["clippy", "lint", "plugin"] edition = "2021" [dependencies] -cargo_metadata = "0.14" +cargo_metadata = "0.15.3" clippy_utils = { path = "../clippy_utils" } declare_clippy_lint = { path = "../declare_clippy_lint" } if_chain = "1.0" diff --git a/lintcheck/Cargo.toml b/lintcheck/Cargo.toml index de31c16b819ef..653121af54dc3 100644 --- a/lintcheck/Cargo.toml +++ b/lintcheck/Cargo.toml @@ -10,8 +10,8 @@ edition = "2021" publish = false [dependencies] -cargo_metadata = "0.14" -clap = "3.2" +cargo_metadata = "0.15.3" +clap = "4.1.4" crossbeam-channel = "0.5.6" flate2 = "1.0" rayon = "1.5.1" From a3d32bbbbe06ffe42edbc4905e964d394de5ee02 Mon Sep 17 00:00:00 2001 From: SpanishPear Date: Wed, 1 Feb 2023 18:11:37 +1100 Subject: [PATCH 232/501] fix formatting + test syntax --- compiler/rustc_parse/src/parser/diagnostics.rs | 2 +- tests/ui/parser/suggest_misplaced_generics/enum.fixed | 1 - tests/ui/parser/suggest_misplaced_generics/enum.rs | 1 - .../parser/suggest_misplaced_generics/fn-complex-generics.fixed | 1 - .../ui/parser/suggest_misplaced_generics/fn-complex-generics.rs | 1 - tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed | 1 - tests/ui/parser/suggest_misplaced_generics/fn-simple.rs | 1 - tests/ui/parser/suggest_misplaced_generics/struct.fixed | 1 - tests/ui/parser/suggest_misplaced_generics/struct.rs | 1 - tests/ui/parser/suggest_misplaced_generics/trait.fixed | 1 - tests/ui/parser/suggest_misplaced_generics/trait.rs | 1 - tests/ui/parser/suggest_misplaced_generics/type.fixed | 1 - tests/ui/parser/suggest_misplaced_generics/type.rs | 1 - 13 files changed, 1 insertion(+), 13 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 1740f2c2c8455..2c6db485828bd 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -353,7 +353,7 @@ impl<'a> Parser<'a> { if !self.look_ahead(1, |t| *t == token::Lt) && let Ok(snippet) = self.sess.source_map().span_to_snippet(generic.span) { - err.multipart_suggestion_verbose( + err.multipart_suggestion_verbose( format!("place the generic parameter name after the {ident_name} name"), vec![ (self.token.span.shrink_to_hi(), snippet), diff --git a/tests/ui/parser/suggest_misplaced_generics/enum.fixed b/tests/ui/parser/suggest_misplaced_generics/enum.fixed index a9d3e9f86d09c..3332118a1e768 100644 --- a/tests/ui/parser/suggest_misplaced_generics/enum.fixed +++ b/tests/ui/parser/suggest_misplaced_generics/enum.fixed @@ -5,6 +5,5 @@ enum Foo { Variant(T) } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the enum name -//~| SUGGESTION Foo fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/enum.rs b/tests/ui/parser/suggest_misplaced_generics/enum.rs index 2d216ba53cc72..5a2289c5c5ae2 100644 --- a/tests/ui/parser/suggest_misplaced_generics/enum.rs +++ b/tests/ui/parser/suggest_misplaced_generics/enum.rs @@ -5,6 +5,5 @@ enum Foo { Variant(T) } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the enum name -//~| SUGGESTION Foo fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed index 06947e098ee6a..84bf64bd63cf9 100644 --- a/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed +++ b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.fixed @@ -5,6 +5,5 @@ fn f<'a, B: 'a + std::ops::Add>(_x: B) { } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the fn name -//~| SUGGESTION f<'a, B: 'a + std::ops::Add> fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs index cefce8d08806d..d0684397e744c 100644 --- a/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs +++ b/tests/ui/parser/suggest_misplaced_generics/fn-complex-generics.rs @@ -5,6 +5,5 @@ fn<'a, B: 'a + std::ops::Add> f(_x: B) { } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the fn name -//~| SUGGESTION f<'a, B: 'a + std::ops::Add> fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed b/tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed index 31c5429b16b05..cbfd5f2d39c08 100644 --- a/tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed +++ b/tests/ui/parser/suggest_misplaced_generics/fn-simple.fixed @@ -5,6 +5,5 @@ fn id(x: T) -> T { x } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the fn name -//~| SUGGESTION id fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/fn-simple.rs b/tests/ui/parser/suggest_misplaced_generics/fn-simple.rs index 0a466184e996f..b207cf70d8584 100644 --- a/tests/ui/parser/suggest_misplaced_generics/fn-simple.rs +++ b/tests/ui/parser/suggest_misplaced_generics/fn-simple.rs @@ -5,6 +5,5 @@ fn id(x: T) -> T { x } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the fn name -//~| SUGGESTION id fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/struct.fixed b/tests/ui/parser/suggest_misplaced_generics/struct.fixed index 8627699a83084..fec05bdeca15c 100644 --- a/tests/ui/parser/suggest_misplaced_generics/struct.fixed +++ b/tests/ui/parser/suggest_misplaced_generics/struct.fixed @@ -5,6 +5,5 @@ struct Foo { x: T } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the struct name -//~| SUGGESTION Foo fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/struct.rs b/tests/ui/parser/suggest_misplaced_generics/struct.rs index 15646b06cfc62..6b80150d54656 100644 --- a/tests/ui/parser/suggest_misplaced_generics/struct.rs +++ b/tests/ui/parser/suggest_misplaced_generics/struct.rs @@ -5,6 +5,5 @@ struct Foo { x: T } //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the struct name -//~| SUGGESTION Foo fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/trait.fixed b/tests/ui/parser/suggest_misplaced_generics/trait.fixed index 31ebf1f088fc7..a471a078af142 100644 --- a/tests/ui/parser/suggest_misplaced_generics/trait.fixed +++ b/tests/ui/parser/suggest_misplaced_generics/trait.fixed @@ -5,7 +5,6 @@ trait Foo { //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the trait name - //~| SUGGESTION Foo } diff --git a/tests/ui/parser/suggest_misplaced_generics/trait.rs b/tests/ui/parser/suggest_misplaced_generics/trait.rs index 81b6abbd66163..55355f451f9fd 100644 --- a/tests/ui/parser/suggest_misplaced_generics/trait.rs +++ b/tests/ui/parser/suggest_misplaced_generics/trait.rs @@ -5,7 +5,6 @@ trait Foo { //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the trait name - //~| SUGGESTION Foo } diff --git a/tests/ui/parser/suggest_misplaced_generics/type.fixed b/tests/ui/parser/suggest_misplaced_generics/type.fixed index b04003b803d1c..a97b9e66d0b2b 100644 --- a/tests/ui/parser/suggest_misplaced_generics/type.fixed +++ b/tests/ui/parser/suggest_misplaced_generics/type.fixed @@ -5,6 +5,5 @@ type Foo = T; //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the type name -//~| SUGGESTION Foo fn main() {} diff --git a/tests/ui/parser/suggest_misplaced_generics/type.rs b/tests/ui/parser/suggest_misplaced_generics/type.rs index 2d759a8b1ab61..17e200536fa3e 100644 --- a/tests/ui/parser/suggest_misplaced_generics/type.rs +++ b/tests/ui/parser/suggest_misplaced_generics/type.rs @@ -5,6 +5,5 @@ type Foo = T; //~^ ERROR expected identifier, found `<` //~| HELP place the generic parameter name after the type name -//~| SUGGESTION Foo fn main() {} From c40b0895f09777be72bb6e875c1c0aefb9ccf316 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 1 Feb 2023 11:04:20 +0100 Subject: [PATCH 233/501] Implement proc-macro-api versioning --- crates/proc-macro-api/src/lib.rs | 4 +-- crates/proc-macro-api/src/msg.rs | 5 ++++ crates/proc-macro-api/src/process.rs | 38 +++++++++++++++++++++++----- crates/proc-macro-srv/src/cli.rs | 3 +++ 4 files changed, 42 insertions(+), 8 deletions(-) diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index bb381c4d44e9d..3f5e8a5e80b9a 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -115,7 +115,7 @@ impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. pub fn spawn( process_path: AbsPathBuf, - args: impl IntoIterator>, + args: impl IntoIterator> + Clone, ) -> io::Result { let process = ProcMacroProcessSrv::run(process_path, args)?; Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) }) @@ -174,7 +174,7 @@ impl ProcMacro { let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?; match response { msg::Response::ExpandMacro(it) => Ok(it.map(FlatTree::to_subtree)), - msg::Response::ListMacros { .. } => { + msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { Err(ServerError { message: "unexpected response".to_string(), io: None }) } } diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 9b7bcaeffe7f7..262483d2cbee6 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -12,16 +12,21 @@ use crate::ProcMacroKind; pub use crate::msg::flat::FlatTree; +pub const NO_VERSION_CHECK_VERSION: u32 = 0; +pub const API_VERSION: u32 = 1; + #[derive(Debug, Serialize, Deserialize)] pub enum Request { ListMacros { dylib_path: PathBuf }, ExpandMacro(ExpandMacro), + ApiVersionCheck {}, } #[derive(Debug, Serialize, Deserialize)] pub enum Response { ListMacros(Result, String>), ExpandMacro(Result), + ApiVersionCheck(u32), } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 54dcb17f4e8b0..da4527c64f24e 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -19,19 +19,45 @@ pub(crate) struct ProcMacroProcessSrv { _process: Process, stdin: ChildStdin, stdout: BufReader, + version: u32, } impl ProcMacroProcessSrv { pub(crate) fn run( process_path: AbsPathBuf, - args: impl IntoIterator>, + args: impl IntoIterator> + Clone, ) -> io::Result { - let mut process = Process::run(process_path, args)?; - let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); + let create_srv = || { + let mut process = Process::run(process_path.clone(), args.clone())?; + let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); + + io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 }) + }; + let mut srv = create_srv()?; + tracing::info!("sending version check"); + match srv.version_check() { + Ok(v) => { + tracing::info!("got version {v}"); + srv.version = v; + Ok(srv) + } + Err(e) => { + tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0"); + create_srv() + } + } + } - let srv = ProcMacroProcessSrv { _process: process, stdin, stdout }; + pub(crate) fn version_check(&mut self) -> Result { + let request = Request::ApiVersionCheck {}; + let response = self.send_task(request)?; - Ok(srv) + match response { + Response::ApiVersionCheck(version) => Ok(version), + Response::ExpandMacro { .. } | Response::ListMacros { .. } => { + Err(ServerError { message: "unexpected response".to_string(), io: None }) + } + } } pub(crate) fn find_proc_macros( @@ -44,7 +70,7 @@ impl ProcMacroProcessSrv { match response { Response::ListMacros(it) => Ok(it), - Response::ExpandMacro { .. } => { + Response::ExpandMacro { .. } | Response::ApiVersionCheck { .. } => { Err(ServerError { message: "unexpected response".to_string(), io: None }) } } diff --git a/crates/proc-macro-srv/src/cli.rs b/crates/proc-macro-srv/src/cli.rs index f1e131c135d83..eaf94714dd791 100644 --- a/crates/proc-macro-srv/src/cli.rs +++ b/crates/proc-macro-srv/src/cli.rs @@ -15,6 +15,9 @@ pub fn run() -> io::Result<()> { msg::Response::ListMacros(srv.list_macros(&dylib_path)) } msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)), + msg::Request::ApiVersionCheck {} => { + msg::Response::ApiVersionCheck(proc_macro_api::msg::API_VERSION) + } }; write_response(res)? } From 14f19c73e9b548a654e95443095e1d267bc64efe Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 1 Feb 2023 11:38:39 +0100 Subject: [PATCH 234/501] fix: Don't render fieldless discriminant inlay hints for datacarrying enums --- crates/ide/src/inlay_hints.rs | 4 +- crates/ide/src/inlay_hints/discriminant.rs | 69 ++++++++++++++-------- 2 files changed, 47 insertions(+), 26 deletions(-) diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index 3d4a3466e2d19..a9581db9ad493 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -422,11 +422,9 @@ fn hints( // static type elisions ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)), ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)), + ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, file_id, it), _ => None, }, - ast::Variant(v) => { - discriminant::hints(hints, famous_defs, config, file_id, &v) - }, // FIXME: fn-ptr type, dyn fn type, and trait object type elisions ast::Type(_) => None, _ => None, diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index 310295cc37935..c5c947150b377 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -4,29 +4,40 @@ //! Bar/* = 0*/, //! } //! ``` -use ide_db::{base_db::FileId, famous_defs::FamousDefs}; +use hir::Semantics; +use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase}; use syntax::ast::{self, AstNode, HasName}; use crate::{ DiscriminantHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, }; -pub(super) fn hints( +pub(super) fn enum_hints( acc: &mut Vec, FamousDefs(sema, _): &FamousDefs<'_, '_>, config: &InlayHintsConfig, _: FileId, - variant: &ast::Variant, + enum_: ast::Enum, ) -> Option<()> { - let field_list = match config.discriminant_hints { - DiscriminantHints::Always => variant.field_list(), - DiscriminantHints::Fieldless => match variant.field_list() { - Some(_) => return None, - None => None, - }, - DiscriminantHints::Never => return None, + let disabled = match config.discriminant_hints { + DiscriminantHints::Always => false, + DiscriminantHints::Fieldless => sema.to_def(&enum_)?.is_data_carrying(sema.db), + DiscriminantHints::Never => true, }; + if disabled { + return None; + } + for variant in enum_.variant_list()?.variants() { + variant_hints(acc, sema, &variant); + } + None +} +fn variant_hints( + acc: &mut Vec, + sema: &Semantics<'_, RootDatabase>, + variant: &ast::Variant, +) -> Option<()> { if variant.eq_token().is_some() { return None; } @@ -39,7 +50,7 @@ pub(super) fn hints( let d = v.eval(sema.db); acc.push(InlayHint { - range: match field_list { + range: match variant.field_list() { Some(field_list) => name.syntax().text_range().cover(field_list.syntax().text_range()), None => name.syntax().text_range(), }, @@ -91,15 +102,30 @@ mod tests { check_discriminants( r#" enum Enum { - Variant, - //^^^^^^^0 - Variant1, - //^^^^^^^^1 - Variant2, - //^^^^^^^^2 - Variant5 = 5, - Variant6, - //^^^^^^^^6 + Variant, +//^^^^^^^0 + Variant1, +//^^^^^^^^1 + Variant2, +//^^^^^^^^2 + Variant5 = 5, + Variant6, +//^^^^^^^^6 +} +"#, + ); + check_discriminants_fieldless( + r#" +enum Enum { + Variant, +//^^^^^^^0 + Variant1, +//^^^^^^^^1 + Variant2, +//^^^^^^^^2 + Variant5 = 5, + Variant6, +//^^^^^^^^6 } "#, ); @@ -133,13 +159,10 @@ enum Enum { enum Enum { Variant(), Variant1, - //^^^^^^^^1 Variant2 {}, Variant3, - //^^^^^^^^3 Variant5 = 5, Variant6, - //^^^^^^^^6 } "#, ); From df04fd6fba2e1b2b85a108e3ec7e56c731456ba9 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Wed, 1 Feb 2023 13:55:28 +0000 Subject: [PATCH 235/501] Don't force many scalar pair values to the stack in write_cvalue_maybe_transmute Sometimes it is necessary for handling vector to scalar pair transmutes, but if the types are the same there is no need for this. This improves runtime performance on simple-raytracer by 12%. --- src/abi/comments.rs | 4 ++-- src/value_and_place.rs | 11 +++++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/abi/comments.rs b/src/abi/comments.rs index 7f4619b5c940b..abf63e33c3537 100644 --- a/src/abi/comments.rs +++ b/src/abi/comments.rs @@ -98,12 +98,12 @@ pub(super) fn add_local_place_comments<'tcx>( } CPlaceInner::VarPair(place_local, var1, var2) => { assert_eq!(local, place_local); - ("ssa", Cow::Owned(format!(",var=({}, {})", var1.index(), var2.index()))) + ("ssa", Cow::Owned(format!("var=({}, {})", var1.index(), var2.index()))) } CPlaceInner::VarLane(_local, _var, _lane) => unreachable!(), CPlaceInner::Addr(ptr, meta) => { let meta = if let Some(meta) = meta { - Cow::Owned(format!(",meta={}", meta)) + Cow::Owned(format!("meta={}", meta)) } else { Cow::Borrowed("") }; diff --git a/src/value_and_place.rs b/src/value_and_place.rs index fa06d6c3ba7f3..cc4493d442f7d 100644 --- a/src/value_and_place.rs +++ b/src/value_and_place.rs @@ -588,10 +588,13 @@ impl<'tcx> CPlace<'tcx> { return; } CPlaceInner::VarPair(_local, var1, var2) => { - let (ptr, meta) = from.force_stack(fx); - assert!(meta.is_none()); - let (data1, data2) = - CValue(CValueInner::ByRef(ptr, None), dst_layout).load_scalar_pair(fx); + let (data1, data2) = if self.layout().ty == dst_layout.ty { + CValue(from.0, dst_layout).load_scalar_pair(fx) + } else { + let (ptr, meta) = from.force_stack(fx); + assert!(meta.is_none()); + CValue(CValueInner::ByRef(ptr, None), dst_layout).load_scalar_pair(fx) + }; let (dst_ty1, dst_ty2) = fx.clif_pair_type(self.layout().ty).unwrap(); transmute_value(fx, var1, data1, dst_ty1); transmute_value(fx, var2, data2, dst_ty2); From 78bf4af6592c41e9595091a3b24e689190d8b972 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Wed, 1 Feb 2023 13:58:05 +0000 Subject: [PATCH 236/501] Fix hyperfine install on CI --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6e35d1e1dc260..92a1d8c189779 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -217,7 +217,7 @@ jobs: EOF - name: Install hyperfine - run: cargo install hyperfine + run: cargo install hyperfine || true - name: Prepare dependencies run: ./y.rs prepare From 21bdff8bc015b557bfae85b968dc4c100be85caf Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Wed, 1 Feb 2023 15:28:06 +0100 Subject: [PATCH 237/501] Fix transmute from vector to scalar pair again --- src/value_and_place.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/value_and_place.rs b/src/value_and_place.rs index cc4493d442f7d..320eecaee008e 100644 --- a/src/value_and_place.rs +++ b/src/value_and_place.rs @@ -588,7 +588,7 @@ impl<'tcx> CPlace<'tcx> { return; } CPlaceInner::VarPair(_local, var1, var2) => { - let (data1, data2) = if self.layout().ty == dst_layout.ty { + let (data1, data2) = if from.layout().ty == dst_layout.ty { CValue(from.0, dst_layout).load_scalar_pair(fx) } else { let (ptr, meta) = from.force_stack(fx); From 24d3e937ca7e861201e13f7458267260a6f643bf Mon Sep 17 00:00:00 2001 From: Eric Sink Date: Wed, 1 Feb 2023 09:48:22 -0600 Subject: [PATCH 238/501] Expand section on Visual Studio to mention all three available rust-analyzer extensions for that IDE. --- docs/user/manual.adoc | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 2cd1188c9df33..73f2d37c7987a 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -520,13 +520,42 @@ There is a package named `ra_ap_rust_analyzer` available on https://crates.io/cr For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow]. -=== Visual Studio IDE +=== Visual Studio 2022 + +There are multiple rust-analyzer extensions for Visual Studio 2022 on Windows: + +==== rust-analyzer.vs + +(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International) + +https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer[Visual Studio Marketplace] + +https://github.com/kitamstudios/rust-analyzer/[GitHub] Support for Rust development in the Visual Studio IDE is enabled by the link:https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer[rust-analyzer] package. Either click on the download link or install from IDE's extension manager. For now link:https://visualstudio.microsoft.com/downloads/[Visual Studio 2022] is required. All editions are supported viz. Community, Professional & Enterprise. The package aims to provide 0-friction installation and therefore comes loaded with most things required including rust-analyzer binary. If anything it needs is missing, appropriate errors / warnings will guide the user. E.g. cargo.exe needs to be in path and the package will tell you as much. This package is under rapid active development. So if you encounter any issues please file it at link:https://github.com/kitamstudios/rust-analyzer/[rust-analyzer.vs]. +==== VS_RustAnalyzer + +(License: GPL) + +https://marketplace.visualstudio.com/items?itemName=cchharris.vsrustanalyzer[Visual Studio Marketplace] + +https://github.com/cchharris/VS-RustAnalyzer[GitHub] + +==== SourceGear Rust + +(License: closed source) + +https://marketplace.visualstudio.com/items?itemName=SourceGear.SourceGearRust[Visual Studio Marketplace] + +https://github.com/sourcegear/rust-vs-extension[GitHub (docs, issues, discussions)] + +* Free (no-cost) +* Supports all editions of Visual Studio 2022 on Windows: Community, Professional, or Enterprise + == Troubleshooting Start with looking at the rust-analyzer version. From ff340f9c211c4788bd993f6db7e9f10c23560714 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 1 Feb 2023 19:06:09 +0200 Subject: [PATCH 239/501] Bump zip --- Cargo.lock | 16 +++------------- crates/proc-macro-api/src/lib.rs | 2 +- crates/profile/src/lib.rs | 2 +- xtask/Cargo.toml | 1 + xtask/src/dist.rs | 9 +++++++-- 5 files changed, 13 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b83c3778c2ca6..c1f146411b232 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1832,10 +1832,8 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" dependencies = [ - "itoa", "serde", "time-core", - "time-macros", ] [[package]] @@ -1844,15 +1842,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" -[[package]] -name = "time-macros" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" -dependencies = [ - "time-core", -] - [[package]] name = "tinyvec" version = "1.6.0" @@ -2205,6 +2194,7 @@ version = "0.1.0" dependencies = [ "anyhow", "flate2", + "time", "write-json", "xflags", "xshell", @@ -2213,9 +2203,9 @@ dependencies = [ [[package]] name = "zip" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080" +checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef" dependencies = [ "byteorder", "crc32fast", diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index bb381c4d44e9d..a64ba7510e7f1 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -71,7 +71,7 @@ impl MacroDylib { /// A handle to a specific macro (a `#[proc_macro]` annotated function). /// -/// It exists withing a context of a specific [`ProcMacroProcess`] -- currently +/// It exists within a context of a specific [`ProcMacroProcess`] -- currently /// we share a single expander process for all macros. #[derive(Debug, Clone)] pub struct ProcMacro { diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs index 7ca3c7d629570..e7fc3d970bfff 100644 --- a/crates/profile/src/lib.rs +++ b/crates/profile/src/lib.rs @@ -26,7 +26,7 @@ pub use countme::Count; thread_local!(static IN_SCOPE: RefCell = RefCell::new(false)); -/// Allows to check if the current code is withing some dynamic scope, can be +/// Allows to check if the current code is within some dynamic scope, can be /// useful during debugging to figure out why a function is called. pub struct Scope { prev: bool, diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 2dd01796c6e41..b4b294c3099fa 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -12,5 +12,6 @@ flate2 = "1.0.24" write-json = "0.1.2" xshell = "0.2.2" xflags = "0.3.0" +time = { version = "0.3", default-features = false } zip = { version = "0.6", default-features = false, features = ["deflate", "time"] } # Avoid adding more dependencies to this crate diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 74715c53eaac5..5a03c71b28a98 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs @@ -6,6 +6,7 @@ use std::{ }; use flate2::{write::GzEncoder, Compression}; +use time::OffsetDateTime; use xshell::{cmd, Shell}; use zip::{write::FileOptions, DateTime, ZipWriter}; @@ -112,7 +113,8 @@ fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> any src_path.file_name().unwrap().to_str().unwrap(), FileOptions::default() .last_modified_time( - DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(), + DateTime::try_from(OffsetDateTime::from(std::fs::metadata(src_path)?.modified()?)) + .unwrap(), ) .unix_permissions(0o755) .compression_method(zip::CompressionMethod::Deflated) @@ -125,7 +127,10 @@ fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> any symbols_path.file_name().unwrap().to_str().unwrap(), FileOptions::default() .last_modified_time( - DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(), + DateTime::try_from(OffsetDateTime::from( + std::fs::metadata(src_path)?.modified()?, + )) + .unwrap(), ) .compression_method(zip::CompressionMethod::Deflated) .compression_level(Some(9)), From f7d59b2e574888912cf7fb670c742b4636e451d0 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Wed, 1 Feb 2023 22:50:43 +0100 Subject: [PATCH 240/501] Don't depend on FormatArgsExpn in ManualAssert. --- clippy_lints/src/manual_assert.rs | 97 +++++++++++++++---------------- 1 file changed, 48 insertions(+), 49 deletions(-) diff --git a/clippy_lints/src/manual_assert.rs b/clippy_lints/src/manual_assert.rs index 4277455a3a21c..ce5d657bcf0e3 100644 --- a/clippy_lints/src/manual_assert.rs +++ b/clippy_lints/src/manual_assert.rs @@ -1,7 +1,6 @@ use crate::rustc_lint::LintContext; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::macros::{root_macro_call, FormatArgsExpn}; -use clippy_utils::source::snippet_with_applicability; +use clippy_utils::macros::root_macro_call; use clippy_utils::{is_else_clause, peel_blocks_with_stmt, span_extract_comment, sugg}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, UnOp}; @@ -38,57 +37,57 @@ declare_lint_pass!(ManualAssert => [MANUAL_ASSERT]); impl<'tcx> LateLintPass<'tcx> for ManualAssert { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &Expr<'tcx>) { - if_chain! { - if let ExprKind::If(cond, then, None) = expr.kind; - if !matches!(cond.kind, ExprKind::Let(_)); - if !expr.span.from_expansion(); - let then = peel_blocks_with_stmt(then); - if let Some(macro_call) = root_macro_call(then.span); - if cx.tcx.item_name(macro_call.def_id) == sym::panic; - if !cx.tcx.sess.source_map().is_multiline(cond.span); - if let Some(format_args) = FormatArgsExpn::find_nested(cx, then, macro_call.expn); + if let ExprKind::If(cond, then, None) = expr.kind + && !matches!(cond.kind, ExprKind::Let(_)) + && !expr.span.from_expansion() + && let then = peel_blocks_with_stmt(then) + && let Some(macro_call) = root_macro_call(then.span) + && cx.tcx.item_name(macro_call.def_id) == sym::panic + && !cx.tcx.sess.source_map().is_multiline(cond.span) + && let Ok(panic_snippet) = cx.sess().source_map().span_to_snippet(macro_call.span) + && let Some(panic_snippet) = panic_snippet.strip_suffix(')') + && let Some((_, format_args_snip)) = panic_snippet.split_once('(') // Don't change `else if foo { panic!(..) }` to `else { assert!(foo, ..) }` as it just // shuffles the condition around. // Should this have a config value? - if !is_else_clause(cx.tcx, expr); - then { - let mut applicability = Applicability::MachineApplicable; - let format_args_snip = snippet_with_applicability(cx, format_args.inputs_span(), "..", &mut applicability); - let cond = cond.peel_drop_temps(); - let mut comments = span_extract_comment(cx.sess().source_map(), expr.span); - if !comments.is_empty() { - comments += "\n"; - } - let (cond, not) = match cond.kind { - ExprKind::Unary(UnOp::Not, e) => (e, ""), - _ => (cond, "!"), - }; - let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_par(); - let sugg = format!("assert!({not}{cond_sugg}, {format_args_snip});"); - // we show to the user the suggestion without the comments, but when applicating the fix, include the comments in the block - span_lint_and_then( - cx, - MANUAL_ASSERT, - expr.span, - "only a `panic!` in `if`-then statement", - |diag| { - // comments can be noisy, do not show them to the user - if !comments.is_empty() { - diag.tool_only_span_suggestion( - expr.span.shrink_to_lo(), - "add comments back", - comments, - applicability); - } - diag.span_suggestion( - expr.span, - "try instead", - sugg, - applicability); - } - - ); + && !is_else_clause(cx.tcx, expr) + { + let mut applicability = Applicability::MachineApplicable; + let cond = cond.peel_drop_temps(); + let mut comments = span_extract_comment(cx.sess().source_map(), expr.span); + if !comments.is_empty() { + comments += "\n"; } + let (cond, not) = match cond.kind { + ExprKind::Unary(UnOp::Not, e) => (e, ""), + _ => (cond, "!"), + }; + let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_par(); + let sugg = format!("assert!({not}{cond_sugg}, {format_args_snip});"); + // we show to the user the suggestion without the comments, but when applicating the fix, include the comments in the block + span_lint_and_then( + cx, + MANUAL_ASSERT, + expr.span, + "only a `panic!` in `if`-then statement", + |diag| { + // comments can be noisy, do not show them to the user + if !comments.is_empty() { + diag.tool_only_span_suggestion( + expr.span.shrink_to_lo(), + "add comments back", + comments, + applicability + ); + } + diag.span_suggestion( + expr.span, + "try instead", + sugg, + applicability + ); + } + ); } } } From ecd98bad45841d30ff83269b94da7a8c06da0516 Mon Sep 17 00:00:00 2001 From: Mara Bos Date: Wed, 1 Feb 2023 22:51:02 +0100 Subject: [PATCH 241/501] Bless tests. --- tests/ui/manual_assert.edition2018.fixed | 35 ++++-------- tests/ui/manual_assert.edition2018.stderr | 67 ++++++++++++++++++++++- 2 files changed, 77 insertions(+), 25 deletions(-) diff --git a/tests/ui/manual_assert.edition2018.fixed b/tests/ui/manual_assert.edition2018.fixed index 638320dd6eec4..8c7e919bf62a1 100644 --- a/tests/ui/manual_assert.edition2018.fixed +++ b/tests/ui/manual_assert.edition2018.fixed @@ -29,9 +29,7 @@ fn main() { panic!("qaqaq{:?}", a); } assert!(a.is_empty(), "qaqaq{:?}", a); - if !a.is_empty() { - panic!("qwqwq"); - } + assert!(a.is_empty(), "qwqwq"); if a.len() == 3 { println!("qwq"); println!("qwq"); @@ -46,21 +44,11 @@ fn main() { println!("qwq"); } let b = vec![1, 2, 3]; - if b.is_empty() { - panic!("panic1"); - } - if b.is_empty() && a.is_empty() { - panic!("panic2"); - } - if a.is_empty() && !b.is_empty() { - panic!("panic3"); - } - if b.is_empty() || a.is_empty() { - panic!("panic4"); - } - if a.is_empty() || !b.is_empty() { - panic!("panic5"); - } + assert!(!b.is_empty(), "panic1"); + assert!(!(b.is_empty() && a.is_empty()), "panic2"); + assert!(!(a.is_empty() && !b.is_empty()), "panic3"); + assert!(!(b.is_empty() || a.is_empty()), "panic4"); + assert!(!(a.is_empty() || !b.is_empty()), "panic5"); assert!(!a.is_empty(), "with expansion {}", one!()); if a.is_empty() { let _ = 0; @@ -71,12 +59,11 @@ fn main() { fn issue7730(a: u8) { // Suggestion should preserve comment - if a > 2 { - // comment - /* this is a + // comment +/* this is a multiline comment */ - /// Doc comment - panic!("panic with comment") // comment after `panic!` - } +/// Doc comment +// comment after `panic!` +assert!(!(a > 2), "panic with comment"); } diff --git a/tests/ui/manual_assert.edition2018.stderr b/tests/ui/manual_assert.edition2018.stderr index 1f2e1e3087bd0..3555ac29243a1 100644 --- a/tests/ui/manual_assert.edition2018.stderr +++ b/tests/ui/manual_assert.edition2018.stderr @@ -8,6 +8,54 @@ LL | | } | = note: `-D clippy::manual-assert` implied by `-D warnings` +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:34:5 + | +LL | / if !a.is_empty() { +LL | | panic!("qwqwq"); +LL | | } + | |_____^ help: try instead: `assert!(a.is_empty(), "qwqwq");` + +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:51:5 + | +LL | / if b.is_empty() { +LL | | panic!("panic1"); +LL | | } + | |_____^ help: try instead: `assert!(!b.is_empty(), "panic1");` + +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:54:5 + | +LL | / if b.is_empty() && a.is_empty() { +LL | | panic!("panic2"); +LL | | } + | |_____^ help: try instead: `assert!(!(b.is_empty() && a.is_empty()), "panic2");` + +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:57:5 + | +LL | / if a.is_empty() && !b.is_empty() { +LL | | panic!("panic3"); +LL | | } + | |_____^ help: try instead: `assert!(!(a.is_empty() && !b.is_empty()), "panic3");` + +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:60:5 + | +LL | / if b.is_empty() || a.is_empty() { +LL | | panic!("panic4"); +LL | | } + | |_____^ help: try instead: `assert!(!(b.is_empty() || a.is_empty()), "panic4");` + +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:63:5 + | +LL | / if a.is_empty() || !b.is_empty() { +LL | | panic!("panic5"); +LL | | } + | |_____^ help: try instead: `assert!(!(a.is_empty() || !b.is_empty()), "panic5");` + error: only a `panic!` in `if`-then statement --> $DIR/manual_assert.rs:66:5 | @@ -16,5 +64,22 @@ LL | | panic!("with expansion {}", one!()) LL | | } | |_____^ help: try instead: `assert!(!a.is_empty(), "with expansion {}", one!());` -error: aborting due to 2 previous errors +error: only a `panic!` in `if`-then statement + --> $DIR/manual_assert.rs:78:5 + | +LL | / if a > 2 { +LL | | // comment +LL | | /* this is a +LL | | multiline +... | +LL | | panic!("panic with comment") // comment after `panic!` +LL | | } + | |_____^ + | +help: try instead + | +LL | assert!(!(a > 2), "panic with comment"); + | + +error: aborting due to 9 previous errors From 493cabbde7f5838cbc0e1ccebc15dd6a5c82c1c5 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Thu, 2 Feb 2023 17:47:11 +0900 Subject: [PATCH 242/501] Treat scope info retrieval failure as assist failure --- .../src/handlers/generate_function.rs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index fa93a4c887621..45b27a63ce26d 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -290,7 +290,7 @@ impl FunctionBuilder { ); let (generic_param_list, where_clause) = - fn_generic_params(ctx, necessary_generic_params, &target); + fn_generic_params(ctx, necessary_generic_params, &target)?; Some(Self { target, @@ -336,7 +336,7 @@ impl FunctionBuilder { ); let (generic_param_list, where_clause) = - fn_generic_params(ctx, necessary_generic_params, &target); + fn_generic_params(ctx, necessary_generic_params, &target)?; Some(Self { target, @@ -551,7 +551,8 @@ fn fn_args( )) } -/// Gets parameter bounds and where predicates in scope and filters out irrelevant ones. +/// Gets parameter bounds and where predicates in scope and filters out irrelevant ones. Returns +/// `None` when it fails to get scope information. /// /// See comment on `filter_unnecessary_bounds()` for what bounds we consider relevant. /// @@ -562,10 +563,10 @@ fn fn_generic_params( ctx: &AssistContext<'_>, necessary_params: FxHashSet, target: &GeneratedFunctionTarget, -) -> (Option, Option) { +) -> Option<(Option, Option)> { if necessary_params.is_empty() { // Not really needed but fast path. - return (None, None); + return Some((None, None)); } // 1. Get generic parameters (with bounds) and where predicates in scope. @@ -592,8 +593,8 @@ fn fn_generic_params( // 4. Rewrite paths if let Some(param) = generic_params.first() { - let source_scope = ctx.sema.scope(param.syntax()).unwrap(); - let target_scope = ctx.sema.scope(&target.parent()).unwrap(); + let source_scope = ctx.sema.scope(param.syntax())?; + let target_scope = ctx.sema.scope(&target.parent())?; if source_scope.module() != target_scope.module() { let transform = PathTransform::generic_transformation(&target_scope, &source_scope); let generic_params = generic_params.iter().map(|it| it.syntax()); @@ -606,7 +607,7 @@ fn fn_generic_params( let where_clause = if where_preds.is_empty() { None } else { Some(make::where_clause(where_preds)) }; - (Some(generic_param_list), where_clause) + Some((Some(generic_param_list), where_clause)) } fn params_and_where_preds_in_scope( From f112d3f75686295ca835c53a0281152c6a43fbe4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 29 Jan 2023 18:41:37 +0200 Subject: [PATCH 243/501] Add a manual section for Flatpak --- docs/user/manual.adoc | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 2cd1188c9df33..e422dc3d53a42 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -232,6 +232,27 @@ The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew]. $ brew install rust-analyzer ---- +=== VS Code or VSCodium in Flatpak + +Setting up `rust-analyzer` with a Flatpak version of Code is not trivial because of the Flatpak sandbox. +While the sandbox can be disabled for some directories, `/usr/bin` will always be mounted under `/run/host/usr/bin`. +This prevents access to the system's C compiler, a system-wide installation of Rust, or any other libraries you might want to link to. +Some compilers and libraries can be acquired as Flatpak SDKs, such as `org.freedesktop.Sdk.Extension.rust-stable` or `org.freedesktop.Sdk.Extension.llvm15`. + +If you use a Flatpak SDK for Rust, there should be no extra steps necessary. + +If you want to use Flatpak in combination with `rustup`, the following steps might help: + + - both Rust and `rustup` have to be installed using https://rustup.rs. Distro packages _will not_ work. + - you need to launch Code, open a terminal and run `echo $PATH` + - using https://flathub.org/apps/details/com.github.tchx84.Flatseal[Flatseal], you must add an environment variable called `PATH`. + Set its value to the output from above, appending `:~/.cargo/bin`, where `~` is the path to your home directory. + You must replace `~`, as it won't be expanded otherwise. + - while Flatseal is open, you must enable access to "All user files" + +A C compiler should already be available via `org.freedesktop.Sdk`. +Any other tools or libraries you will need to acquire from Flatpak. + === Emacs Prerequisites: You have installed the <>. From fba16e2e3a6f743cb23c7793851a4f0201b21722 Mon Sep 17 00:00:00 2001 From: Michael Krasnitski Date: Thu, 2 Feb 2023 19:36:42 -0500 Subject: [PATCH 244/501] Add `extra_unused_type_parameters` lint --- CHANGELOG.md | 1 + README.md | 2 +- book/src/README.md | 2 +- clippy_lints/src/declared_lints.rs | 1 + .../src/extra_unused_type_parameters.rs | 178 ++++++++++++++++++ clippy_lints/src/lib.rs | 2 + tests/ui/extra_unused_type_parameters.rs | 69 +++++++ tests/ui/extra_unused_type_parameters.stderr | 59 ++++++ tests/ui/needless_lifetimes.fixed | 1 + tests/ui/needless_lifetimes.rs | 1 + tests/ui/needless_lifetimes.stderr | 92 ++++----- tests/ui/new_without_default.rs | 7 +- tests/ui/new_without_default.stderr | 14 +- tests/ui/redundant_field_names.fixed | 2 +- tests/ui/redundant_field_names.rs | 2 +- .../ui/seek_to_start_instead_of_rewind.fixed | 4 +- tests/ui/seek_to_start_instead_of_rewind.rs | 4 +- tests/ui/type_repetition_in_bounds.rs | 1 + tests/ui/type_repetition_in_bounds.stderr | 8 +- 19 files changed, 384 insertions(+), 66 deletions(-) create mode 100644 clippy_lints/src/extra_unused_type_parameters.rs create mode 100644 tests/ui/extra_unused_type_parameters.rs create mode 100644 tests/ui/extra_unused_type_parameters.stderr diff --git a/CHANGELOG.md b/CHANGELOG.md index e2cde09776f4c..659e8aebcd579 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4383,6 +4383,7 @@ Released 2018-09-13 [`extend_from_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#extend_from_slice [`extend_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#extend_with_drain [`extra_unused_lifetimes`]: https://rust-lang.github.io/rust-clippy/master/index.html#extra_unused_lifetimes +[`extra_unused_type_parameters`]: https://rust-lang.github.io/rust-clippy/master/index.html#extra_unused_type_parameters [`fallible_impl_from`]: https://rust-lang.github.io/rust-clippy/master/index.html#fallible_impl_from [`field_reassign_with_default`]: https://rust-lang.github.io/rust-clippy/master/index.html#field_reassign_with_default [`filetype_is_file`]: https://rust-lang.github.io/rust-clippy/master/index.html#filetype_is_file diff --git a/README.md b/README.md index ab44db694835f..95f6d2cc45c83 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code. -[There are over 550 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) +[There are over 600 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html). You can choose how much Clippy is supposed to ~~annoy~~ help you by changing the lint level by category. diff --git a/book/src/README.md b/book/src/README.md index 23867df8efe1d..df4a1f2702e47 100644 --- a/book/src/README.md +++ b/book/src/README.md @@ -6,7 +6,7 @@ A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code. -[There are over 550 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) +[There are over 600 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html). You can choose how diff --git a/clippy_lints/src/declared_lints.rs b/clippy_lints/src/declared_lints.rs index 36a366fc97474..457a25826e799 100644 --- a/clippy_lints/src/declared_lints.rs +++ b/clippy_lints/src/declared_lints.rs @@ -156,6 +156,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::exhaustive_items::EXHAUSTIVE_STRUCTS_INFO, crate::exit::EXIT_INFO, crate::explicit_write::EXPLICIT_WRITE_INFO, + crate::extra_unused_type_parameters::EXTRA_UNUSED_TYPE_PARAMETERS_INFO, crate::fallible_impl_from::FALLIBLE_IMPL_FROM_INFO, crate::float_literal::EXCESSIVE_PRECISION_INFO, crate::float_literal::LOSSY_FLOAT_LITERAL_INFO, diff --git a/clippy_lints/src/extra_unused_type_parameters.rs b/clippy_lints/src/extra_unused_type_parameters.rs new file mode 100644 index 0000000000000..2fdd8a71466c0 --- /dev/null +++ b/clippy_lints/src/extra_unused_type_parameters.rs @@ -0,0 +1,178 @@ +use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::trait_ref_of_method; +use rustc_data_structures::fx::FxHashMap; +use rustc_errors::MultiSpan; +use rustc_hir::intravisit::{walk_impl_item, walk_item, walk_param_bound, walk_ty, Visitor}; +use rustc_hir::{ + GenericParamKind, Generics, ImplItem, ImplItemKind, Item, ItemKind, PredicateOrigin, Ty, TyKind, WherePredicate, +}; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::hir::nested_filter; +use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_span::{def_id::DefId, Span}; + +declare_clippy_lint! { + /// ### What it does + /// Checks for type parameters in generics that are never used anywhere else. + /// + /// ### Why is this bad? + /// Functions cannot infer the value of unused type parameters; therefore, calling them + /// requires using a turbofish, which serves no purpose but to satisfy the compiler. + /// + /// ### Example + /// ```rust + /// // unused type parameters + /// fn unused_ty(x: u8) { + /// // .. + /// } + /// ``` + /// Use instead: + /// ```rust + /// fn no_unused_ty(x: u8) { + /// // .. + /// } + /// ``` + #[clippy::version = "1.69.0"] + pub EXTRA_UNUSED_TYPE_PARAMETERS, + complexity, + "unused type parameters in function definitions" +} +declare_lint_pass!(ExtraUnusedTypeParameters => [EXTRA_UNUSED_TYPE_PARAMETERS]); + +/// A visitor struct that walks a given function and gathers generic type parameters, plus any +/// trait bounds those parameters have. +struct TypeWalker<'cx, 'tcx> { + cx: &'cx LateContext<'tcx>, + /// Collection of all the type parameters and their spans. + ty_params: FxHashMap, + /// Collection of any (inline) trait bounds corresponding to each type parameter. + bounds: FxHashMap, + /// The entire `Generics` object of the function, useful for querying purposes. + generics: &'tcx Generics<'tcx>, + /// The value of this will remain `true` if *every* parameter: + /// 1. Is a type parameter, and + /// 2. Goes unused in the function. + /// Otherwise, if any type parameters end up being used, or if any lifetime or const-generic + /// parameters are present, this will be set to `false`. + all_params_unused: bool, +} + +impl<'cx, 'tcx> TypeWalker<'cx, 'tcx> { + fn new(cx: &'cx LateContext<'tcx>, generics: &'tcx Generics<'tcx>) -> Self { + let mut all_params_unused = true; + let ty_params = generics + .params + .iter() + .filter_map(|param| { + if let GenericParamKind::Type { .. } = param.kind { + Some((param.def_id.into(), param.span)) + } else { + if !param.is_elided_lifetime() { + all_params_unused = false; + } + None + } + }) + .collect(); + Self { + cx, + ty_params, + bounds: FxHashMap::default(), + generics, + all_params_unused, + } + } + + fn emit_lint(&self) { + let (msg, help) = match self.ty_params.len() { + 0 => return, + 1 => ( + "type parameter goes unused in function definition", + "consider removing the parameter", + ), + _ => ( + "type parameters go unused in function definition", + "consider removing the parameters", + ), + }; + + let source_map = self.cx.tcx.sess.source_map(); + let span = if self.all_params_unused { + self.generics.span.into() // Remove the entire list of generics + } else { + MultiSpan::from_spans( + self.ty_params + .iter() + .map(|(def_id, &span)| { + // Extend the span past any trait bounds, and include the comma at the end. + let span_to_extend = self.bounds.get(def_id).copied().map_or(span, Span::shrink_to_hi); + let comma_range = source_map.span_extend_to_next_char(span_to_extend, '>', false); + let comma_span = source_map.span_through_char(comma_range, ','); + span.with_hi(comma_span.hi()) + }) + .collect(), + ) + }; + + span_lint_and_help(self.cx, EXTRA_UNUSED_TYPE_PARAMETERS, span, msg, None, help); + } +} + +impl<'cx, 'tcx> Visitor<'tcx> for TypeWalker<'cx, 'tcx> { + type NestedFilter = nested_filter::OnlyBodies; + + fn visit_ty(&mut self, t: &'tcx Ty<'tcx>) { + if let Some((def_id, _)) = t.peel_refs().as_generic_param() { + if self.ty_params.remove(&def_id).is_some() { + self.all_params_unused = false; + } + } else if let TyKind::OpaqueDef(id, _, _) = t.kind { + // Explicitly walk OpaqueDef. Normally `walk_ty` would do the job, but it calls + // `visit_nested_item`, which checks that `Self::NestedFilter::INTER` is set. We're + // using `OnlyBodies`, so the check ends up failing and the type isn't fully walked. + let item = self.nested_visit_map().item(id); + walk_item(self, item); + } else { + walk_ty(self, t); + } + } + + fn visit_where_predicate(&mut self, predicate: &'tcx WherePredicate<'tcx>) { + if let WherePredicate::BoundPredicate(predicate) = predicate { + // Collect spans for bounds that appear in the list of generics (not in a where-clause) + // for use in forming the help message + if let Some((def_id, _)) = predicate.bounded_ty.peel_refs().as_generic_param() + && let PredicateOrigin::GenericParam = predicate.origin + { + self.bounds.insert(def_id, predicate.span); + } + // Only walk the right-hand side of where-bounds + for bound in predicate.bounds { + walk_param_bound(self, bound); + } + } + } + + fn nested_visit_map(&mut self) -> Self::Map { + self.cx.tcx.hir() + } +} + +impl<'tcx> LateLintPass<'tcx> for ExtraUnusedTypeParameters { + fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { + if let ItemKind::Fn(_, generics, _) = item.kind { + let mut walker = TypeWalker::new(cx, generics); + walk_item(&mut walker, item); + walker.emit_lint(); + } + } + + fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx ImplItem<'tcx>) { + // Only lint on inherent methods, not trait methods. + if let ImplItemKind::Fn(..) = item.kind && trait_ref_of_method(cx, item.owner_id.def_id).is_none() { + let mut walker = TypeWalker::new(cx, item.generics); + walk_impl_item(&mut walker, item); + walker.emit_lint(); + } + } +} diff --git a/clippy_lints/src/lib.rs b/clippy_lints/src/lib.rs index 5c4b604104417..2f5c4adca9f10 100644 --- a/clippy_lints/src/lib.rs +++ b/clippy_lints/src/lib.rs @@ -122,6 +122,7 @@ mod excessive_bools; mod exhaustive_items; mod exit; mod explicit_write; +mod extra_unused_type_parameters; mod fallible_impl_from; mod float_literal; mod floating_point_arithmetic; @@ -910,6 +911,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: store.register_late_pass(|_| Box::new(permissions_set_readonly_false::PermissionsSetReadonlyFalse)); store.register_late_pass(|_| Box::new(size_of_ref::SizeOfRef)); store.register_late_pass(|_| Box::new(multiple_unsafe_ops_per_block::MultipleUnsafeOpsPerBlock)); + store.register_late_pass(|_| Box::new(extra_unused_type_parameters::ExtraUnusedTypeParameters)); // add lints here, do not remove this comment, it's used in `new_lint` } diff --git a/tests/ui/extra_unused_type_parameters.rs b/tests/ui/extra_unused_type_parameters.rs new file mode 100644 index 0000000000000..5cb80cb6233f7 --- /dev/null +++ b/tests/ui/extra_unused_type_parameters.rs @@ -0,0 +1,69 @@ +#![allow(unused, clippy::needless_lifetimes)] +#![warn(clippy::extra_unused_type_parameters)] + +fn unused_ty(x: u8) {} + +fn unused_multi(x: u8) {} + +fn unused_with_lt<'a, T>(x: &'a u8) {} + +fn used_ty(x: T, y: u8) {} + +fn used_ref<'a, T>(x: &'a T) {} + +fn used_ret(x: u8) -> T { + T::default() +} + +fn unused_bounded(x: U) {} + +fn unused_where_clause(x: U) +where + T: Default, +{ +} + +fn some_unused, E>(b: B, c: C) {} + +fn used_opaque(iter: impl Iterator) -> usize { + iter.count() +} + +fn used_ret_opaque() -> impl Iterator { + std::iter::empty() +} + +fn used_vec_box(x: Vec>) {} + +fn used_body() -> String { + T::default().to_string() +} + +fn used_closure() -> impl Fn() { + || println!("{}", T::default().to_string()) +} + +struct S; + +impl S { + fn unused_ty_impl(&self) {} +} + +// Don't lint on trait methods +trait Foo { + fn bar(&self); +} + +impl Foo for S { + fn bar(&self) {} +} + +fn skip_index(iter: Iter, index: usize) -> impl Iterator +where + Iter: Iterator, +{ + iter.enumerate() + .filter_map(move |(i, a)| if i == index { None } else { Some(a) }) +} + +fn main() {} diff --git a/tests/ui/extra_unused_type_parameters.stderr b/tests/ui/extra_unused_type_parameters.stderr new file mode 100644 index 0000000000000..1c8dd53e63859 --- /dev/null +++ b/tests/ui/extra_unused_type_parameters.stderr @@ -0,0 +1,59 @@ +error: type parameter goes unused in function definition + --> $DIR/extra_unused_type_parameters.rs:4:13 + | +LL | fn unused_ty(x: u8) {} + | ^^^ + | + = help: consider removing the parameter + = note: `-D clippy::extra-unused-type-parameters` implied by `-D warnings` + +error: type parameters go unused in function definition + --> $DIR/extra_unused_type_parameters.rs:6:16 + | +LL | fn unused_multi(x: u8) {} + | ^^^^^^ + | + = help: consider removing the parameters + +error: type parameter goes unused in function definition + --> $DIR/extra_unused_type_parameters.rs:8:23 + | +LL | fn unused_with_lt<'a, T>(x: &'a u8) {} + | ^ + | + = help: consider removing the parameter + +error: type parameter goes unused in function definition + --> $DIR/extra_unused_type_parameters.rs:18:19 + | +LL | fn unused_bounded(x: U) {} + | ^^^^^^^^^^^ + | + = help: consider removing the parameter + +error: type parameter goes unused in function definition + --> $DIR/extra_unused_type_parameters.rs:20:24 + | +LL | fn unused_where_clause(x: U) + | ^^ + | + = help: consider removing the parameter + +error: type parameters go unused in function definition + --> $DIR/extra_unused_type_parameters.rs:26:16 + | +LL | fn some_unused, E>(b: B, c: C) {} + | ^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^ + | + = help: consider removing the parameters + +error: type parameter goes unused in function definition + --> $DIR/extra_unused_type_parameters.rs:49:22 + | +LL | fn unused_ty_impl(&self) {} + | ^^^ + | + = help: consider removing the parameter + +error: aborting due to 7 previous errors + diff --git a/tests/ui/needless_lifetimes.fixed b/tests/ui/needless_lifetimes.fixed index 270cd1afc6792..d286ef4ba3788 100644 --- a/tests/ui/needless_lifetimes.fixed +++ b/tests/ui/needless_lifetimes.fixed @@ -5,6 +5,7 @@ #![allow( unused, clippy::boxed_local, + clippy::extra_unused_type_parameters, clippy::needless_pass_by_value, clippy::unnecessary_wraps, dyn_drop, diff --git a/tests/ui/needless_lifetimes.rs b/tests/ui/needless_lifetimes.rs index 5d4dc971b8d28..409528b291db1 100644 --- a/tests/ui/needless_lifetimes.rs +++ b/tests/ui/needless_lifetimes.rs @@ -5,6 +5,7 @@ #![allow( unused, clippy::boxed_local, + clippy::extra_unused_type_parameters, clippy::needless_pass_by_value, clippy::unnecessary_wraps, dyn_drop, diff --git a/tests/ui/needless_lifetimes.stderr b/tests/ui/needless_lifetimes.stderr index afe637ac38887..4e3c8f20d8c52 100644 --- a/tests/ui/needless_lifetimes.stderr +++ b/tests/ui/needless_lifetimes.stderr @@ -1,5 +1,5 @@ error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:17:1 + --> $DIR/needless_lifetimes.rs:18:1 | LL | fn distinct_lifetimes<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -12,7 +12,7 @@ LL + fn distinct_lifetimes(_x: &u8, _y: &u8, _z: u8) {} | error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:19:1 + --> $DIR/needless_lifetimes.rs:20:1 | LL | fn distinct_and_static<'a, 'b>(_x: &'a u8, _y: &'b u8, _z: &'static u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -24,7 +24,7 @@ LL + fn distinct_and_static(_x: &u8, _y: &u8, _z: &'static u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:29:1 + --> $DIR/needless_lifetimes.rs:30:1 | LL | fn in_and_out<'a>(x: &'a u8, _y: u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -36,7 +36,7 @@ LL + fn in_and_out(x: &u8, _y: u8) -> &u8 { | error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:41:1 + --> $DIR/needless_lifetimes.rs:42:1 | LL | fn multiple_in_and_out_2a<'a, 'b>(x: &'a u8, _y: &'b u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -48,7 +48,7 @@ LL + fn multiple_in_and_out_2a<'a>(x: &'a u8, _y: &u8) -> &'a u8 { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:48:1 + --> $DIR/needless_lifetimes.rs:49:1 | LL | fn multiple_in_and_out_2b<'a, 'b>(_x: &'a u8, y: &'b u8) -> &'b u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -60,7 +60,7 @@ LL + fn multiple_in_and_out_2b<'b>(_x: &u8, y: &'b u8) -> &'b u8 { | error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:65:1 + --> $DIR/needless_lifetimes.rs:66:1 | LL | fn deep_reference_1a<'a, 'b>(x: &'a u8, _y: &'b u8) -> Result<&'a u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -72,7 +72,7 @@ LL + fn deep_reference_1a<'a>(x: &'a u8, _y: &u8) -> Result<&'a u8, ()> { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:72:1 + --> $DIR/needless_lifetimes.rs:73:1 | LL | fn deep_reference_1b<'a, 'b>(_x: &'a u8, y: &'b u8) -> Result<&'b u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -84,7 +84,7 @@ LL + fn deep_reference_1b<'b>(_x: &u8, y: &'b u8) -> Result<&'b u8, ()> { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:81:1 + --> $DIR/needless_lifetimes.rs:82:1 | LL | fn deep_reference_3<'a>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -96,7 +96,7 @@ LL + fn deep_reference_3(x: &u8, _y: u8) -> Result<&u8, ()> { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:86:1 + --> $DIR/needless_lifetimes.rs:87:1 | LL | fn where_clause_without_lt<'a, T>(x: &'a u8, _y: u8) -> Result<&'a u8, ()> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -108,7 +108,7 @@ LL + fn where_clause_without_lt(x: &u8, _y: u8) -> Result<&u8, ()> | error: the following explicit lifetimes could be elided: 'a, 'b - --> $DIR/needless_lifetimes.rs:98:1 + --> $DIR/needless_lifetimes.rs:99:1 | LL | fn lifetime_param_2<'a, 'b>(_x: Ref<'a>, _y: &'b u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -120,7 +120,7 @@ LL + fn lifetime_param_2(_x: Ref<'_>, _y: &u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:122:1 + --> $DIR/needless_lifetimes.rs:123:1 | LL | fn fn_bound_2<'a, F, I>(_m: Lt<'a, I>, _f: F) -> Lt<'a, I> | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -132,7 +132,7 @@ LL + fn fn_bound_2(_m: Lt<'_, I>, _f: F) -> Lt<'_, I> | error: the following explicit lifetimes could be elided: 's - --> $DIR/needless_lifetimes.rs:152:5 + --> $DIR/needless_lifetimes.rs:153:5 | LL | fn self_and_out<'s>(&'s self) -> &'s u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -144,7 +144,7 @@ LL + fn self_and_out(&self) -> &u8 { | error: the following explicit lifetimes could be elided: 't - --> $DIR/needless_lifetimes.rs:159:5 + --> $DIR/needless_lifetimes.rs:160:5 | LL | fn self_and_in_out_1<'s, 't>(&'s self, _x: &'t u8) -> &'s u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -156,7 +156,7 @@ LL + fn self_and_in_out_1<'s>(&'s self, _x: &u8) -> &'s u8 { | error: the following explicit lifetimes could be elided: 's - --> $DIR/needless_lifetimes.rs:166:5 + --> $DIR/needless_lifetimes.rs:167:5 | LL | fn self_and_in_out_2<'s, 't>(&'s self, x: &'t u8) -> &'t u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -168,7 +168,7 @@ LL + fn self_and_in_out_2<'t>(&self, x: &'t u8) -> &'t u8 { | error: the following explicit lifetimes could be elided: 's, 't - --> $DIR/needless_lifetimes.rs:170:5 + --> $DIR/needless_lifetimes.rs:171:5 | LL | fn distinct_self_and_in<'s, 't>(&'s self, _x: &'t u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -180,7 +180,7 @@ LL + fn distinct_self_and_in(&self, _x: &u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:189:1 + --> $DIR/needless_lifetimes.rs:190:1 | LL | fn struct_with_lt<'a>(_foo: Foo<'a>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -192,7 +192,7 @@ LL + fn struct_with_lt(_foo: Foo<'_>) -> &str { | error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:207:1 + --> $DIR/needless_lifetimes.rs:208:1 | LL | fn struct_with_lt4a<'a, 'b>(_foo: &'a Foo<'b>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -204,7 +204,7 @@ LL + fn struct_with_lt4a<'a>(_foo: &'a Foo<'_>) -> &'a str { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:215:1 + --> $DIR/needless_lifetimes.rs:216:1 | LL | fn struct_with_lt4b<'a, 'b>(_foo: &'a Foo<'b>) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -216,7 +216,7 @@ LL + fn struct_with_lt4b<'b>(_foo: &Foo<'b>) -> &'b str { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:230:1 + --> $DIR/needless_lifetimes.rs:231:1 | LL | fn trait_obj_elided2<'a>(_arg: &'a dyn Drop) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -228,7 +228,7 @@ LL + fn trait_obj_elided2(_arg: &dyn Drop) -> &str { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:236:1 + --> $DIR/needless_lifetimes.rs:237:1 | LL | fn alias_with_lt<'a>(_foo: FooAlias<'a>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -240,7 +240,7 @@ LL + fn alias_with_lt(_foo: FooAlias<'_>) -> &str { | error: the following explicit lifetimes could be elided: 'b - --> $DIR/needless_lifetimes.rs:254:1 + --> $DIR/needless_lifetimes.rs:255:1 | LL | fn alias_with_lt4a<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'a str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -252,7 +252,7 @@ LL + fn alias_with_lt4a<'a>(_foo: &'a FooAlias<'_>) -> &'a str { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:262:1 + --> $DIR/needless_lifetimes.rs:263:1 | LL | fn alias_with_lt4b<'a, 'b>(_foo: &'a FooAlias<'b>) -> &'b str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -264,7 +264,7 @@ LL + fn alias_with_lt4b<'b>(_foo: &FooAlias<'b>) -> &'b str { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:266:1 + --> $DIR/needless_lifetimes.rs:267:1 | LL | fn named_input_elided_output<'a>(_arg: &'a str) -> &str { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -276,7 +276,7 @@ LL + fn named_input_elided_output(_arg: &str) -> &str { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:274:1 + --> $DIR/needless_lifetimes.rs:275:1 | LL | fn trait_bound_ok<'a, T: WithLifetime<'static>>(_: &'a u8, _: T) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -288,7 +288,7 @@ LL + fn trait_bound_ok>(_: &u8, _: T) { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:310:1 + --> $DIR/needless_lifetimes.rs:311:1 | LL | fn out_return_type_lts<'a>(e: &'a str) -> Cow<'a> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -300,7 +300,7 @@ LL + fn out_return_type_lts(e: &str) -> Cow<'_> { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:317:9 + --> $DIR/needless_lifetimes.rs:318:9 | LL | fn needless_lt<'a>(x: &'a u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -312,7 +312,7 @@ LL + fn needless_lt(x: &u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:321:9 + --> $DIR/needless_lifetimes.rs:322:9 | LL | fn needless_lt<'a>(_x: &'a u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -324,7 +324,7 @@ LL + fn needless_lt(_x: &u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:334:9 + --> $DIR/needless_lifetimes.rs:335:9 | LL | fn baz<'a>(&'a self) -> impl Foo + 'a { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -336,7 +336,7 @@ LL + fn baz(&self) -> impl Foo + '_ { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:366:5 + --> $DIR/needless_lifetimes.rs:367:5 | LL | fn impl_trait_elidable_nested_anonymous_lifetimes<'a>(i: &'a i32, f: impl Fn(&i32) -> &i32) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -348,7 +348,7 @@ LL + fn impl_trait_elidable_nested_anonymous_lifetimes(i: &i32, f: impl Fn(& | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:375:5 + --> $DIR/needless_lifetimes.rs:376:5 | LL | fn generics_elidable<'a, T: Fn(&i32) -> &i32>(i: &'a i32, f: T) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -360,7 +360,7 @@ LL + fn generics_elidable &i32>(i: &i32, f: T) -> &i32 { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:387:5 + --> $DIR/needless_lifetimes.rs:388:5 | LL | fn where_clause_elidadable<'a, T>(i: &'a i32, f: T) -> &'a i32 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -372,7 +372,7 @@ LL + fn where_clause_elidadable(i: &i32, f: T) -> &i32 | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:402:5 + --> $DIR/needless_lifetimes.rs:403:5 | LL | fn pointer_fn_elidable<'a>(i: &'a i32, f: fn(&i32) -> &i32) -> &'a i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -384,7 +384,7 @@ LL + fn pointer_fn_elidable(i: &i32, f: fn(&i32) -> &i32) -> &i32 { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:415:5 + --> $DIR/needless_lifetimes.rs:416:5 | LL | fn nested_fn_pointer_3<'a>(_: &'a i32) -> fn(fn(&i32) -> &i32) -> i32 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -396,7 +396,7 @@ LL + fn nested_fn_pointer_3(_: &i32) -> fn(fn(&i32) -> &i32) -> i32 { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:418:5 + --> $DIR/needless_lifetimes.rs:419:5 | LL | fn nested_fn_pointer_4<'a>(_: &'a i32) -> impl Fn(fn(&i32)) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -408,7 +408,7 @@ LL + fn nested_fn_pointer_4(_: &i32) -> impl Fn(fn(&i32)) { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:440:9 + --> $DIR/needless_lifetimes.rs:441:9 | LL | fn implicit<'a>(&'a self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -420,7 +420,7 @@ LL + fn implicit(&self) -> &() { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:443:9 + --> $DIR/needless_lifetimes.rs:444:9 | LL | fn implicit_mut<'a>(&'a mut self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -432,7 +432,7 @@ LL + fn implicit_mut(&mut self) -> &() { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:454:9 + --> $DIR/needless_lifetimes.rs:455:9 | LL | fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -444,7 +444,7 @@ LL + fn lifetime_elsewhere(self: Box, here: &()) -> &() { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:460:9 + --> $DIR/needless_lifetimes.rs:461:9 | LL | fn implicit<'a>(&'a self) -> &'a (); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -456,7 +456,7 @@ LL + fn implicit(&self) -> &(); | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:461:9 + --> $DIR/needless_lifetimes.rs:462:9 | LL | fn implicit_provided<'a>(&'a self) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -468,7 +468,7 @@ LL + fn implicit_provided(&self) -> &() { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:470:9 + --> $DIR/needless_lifetimes.rs:471:9 | LL | fn lifetime_elsewhere<'a>(self: Box, here: &'a ()) -> &'a (); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -480,7 +480,7 @@ LL + fn lifetime_elsewhere(self: Box, here: &()) -> &(); | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:471:9 + --> $DIR/needless_lifetimes.rs:472:9 | LL | fn lifetime_elsewhere_provided<'a>(self: Box, here: &'a ()) -> &'a () { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -492,7 +492,7 @@ LL + fn lifetime_elsewhere_provided(self: Box, here: &()) -> &() { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:480:5 + --> $DIR/needless_lifetimes.rs:481:5 | LL | fn foo<'a>(x: &'a u8, y: &'_ u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -504,7 +504,7 @@ LL + fn foo(x: &u8, y: &'_ u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:482:5 + --> $DIR/needless_lifetimes.rs:483:5 | LL | fn bar<'a>(x: &'a u8, y: &'_ u8, z: &'_ u8) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -516,7 +516,7 @@ LL + fn bar(x: &u8, y: &'_ u8, z: &'_ u8) {} | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:489:5 + --> $DIR/needless_lifetimes.rs:490:5 | LL | fn one_input<'a>(x: &'a u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -528,7 +528,7 @@ LL + fn one_input(x: &u8) -> &u8 { | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:494:5 + --> $DIR/needless_lifetimes.rs:495:5 | LL | fn multiple_inputs_output_not_elided<'a, 'b>(x: &'a u8, y: &'b u8, z: &'b u8) -> &'b u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -540,7 +540,7 @@ LL + fn multiple_inputs_output_not_elided<'b>(x: &u8, y: &'b u8, z: &'b u8) | error: the following explicit lifetimes could be elided: 'a - --> $DIR/needless_lifetimes.rs:507:13 + --> $DIR/needless_lifetimes.rs:508:13 | LL | fn one_input<'a>(x: &'a u8) -> &'a u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/tests/ui/new_without_default.rs b/tests/ui/new_without_default.rs index 65809023f8dff..7803418cb047d 100644 --- a/tests/ui/new_without_default.rs +++ b/tests/ui/new_without_default.rs @@ -1,4 +1,9 @@ -#![allow(dead_code, clippy::missing_safety_doc, clippy::extra_unused_lifetimes)] +#![allow( + dead_code, + clippy::missing_safety_doc, + clippy::extra_unused_lifetimes, + clippy::extra_unused_type_parameters +)] #![warn(clippy::new_without_default)] pub struct Foo; diff --git a/tests/ui/new_without_default.stderr b/tests/ui/new_without_default.stderr index 212a69ab94e65..583dd327d6a5d 100644 --- a/tests/ui/new_without_default.stderr +++ b/tests/ui/new_without_default.stderr @@ -1,5 +1,5 @@ error: you should consider adding a `Default` implementation for `Foo` - --> $DIR/new_without_default.rs:7:5 + --> $DIR/new_without_default.rs:12:5 | LL | / pub fn new() -> Foo { LL | | Foo @@ -17,7 +17,7 @@ LL + } | error: you should consider adding a `Default` implementation for `Bar` - --> $DIR/new_without_default.rs:15:5 + --> $DIR/new_without_default.rs:20:5 | LL | / pub fn new() -> Self { LL | | Bar @@ -34,7 +34,7 @@ LL + } | error: you should consider adding a `Default` implementation for `LtKo<'c>` - --> $DIR/new_without_default.rs:79:5 + --> $DIR/new_without_default.rs:84:5 | LL | / pub fn new() -> LtKo<'c> { LL | | unimplemented!() @@ -51,7 +51,7 @@ LL + } | error: you should consider adding a `Default` implementation for `NewNotEqualToDerive` - --> $DIR/new_without_default.rs:172:5 + --> $DIR/new_without_default.rs:177:5 | LL | / pub fn new() -> Self { LL | | NewNotEqualToDerive { foo: 1 } @@ -68,7 +68,7 @@ LL + } | error: you should consider adding a `Default` implementation for `FooGenerics` - --> $DIR/new_without_default.rs:180:5 + --> $DIR/new_without_default.rs:185:5 | LL | / pub fn new() -> Self { LL | | Self(Default::default()) @@ -85,7 +85,7 @@ LL + } | error: you should consider adding a `Default` implementation for `BarGenerics` - --> $DIR/new_without_default.rs:187:5 + --> $DIR/new_without_default.rs:192:5 | LL | / pub fn new() -> Self { LL | | Self(Default::default()) @@ -102,7 +102,7 @@ LL + } | error: you should consider adding a `Default` implementation for `Foo` - --> $DIR/new_without_default.rs:198:9 + --> $DIR/new_without_default.rs:203:9 | LL | / pub fn new() -> Self { LL | | todo!() diff --git a/tests/ui/redundant_field_names.fixed b/tests/ui/redundant_field_names.fixed index ec7f8ae923a79..276266a2dd803 100644 --- a/tests/ui/redundant_field_names.fixed +++ b/tests/ui/redundant_field_names.fixed @@ -1,7 +1,7 @@ // run-rustfix #![warn(clippy::redundant_field_names)] -#![allow(clippy::no_effect, dead_code, unused_variables)] +#![allow(clippy::extra_unused_type_parameters, clippy::no_effect, dead_code, unused_variables)] #[macro_use] extern crate derive_new; diff --git a/tests/ui/redundant_field_names.rs b/tests/ui/redundant_field_names.rs index 73122016cf69e..f674141c138e1 100644 --- a/tests/ui/redundant_field_names.rs +++ b/tests/ui/redundant_field_names.rs @@ -1,7 +1,7 @@ // run-rustfix #![warn(clippy::redundant_field_names)] -#![allow(clippy::no_effect, dead_code, unused_variables)] +#![allow(clippy::extra_unused_type_parameters, clippy::no_effect, dead_code, unused_variables)] #[macro_use] extern crate derive_new; diff --git a/tests/ui/seek_to_start_instead_of_rewind.fixed b/tests/ui/seek_to_start_instead_of_rewind.fixed index 713cff604a1d7..dc24d447c6075 100644 --- a/tests/ui/seek_to_start_instead_of_rewind.fixed +++ b/tests/ui/seek_to_start_instead_of_rewind.fixed @@ -26,7 +26,7 @@ fn seek_to_start_false_method(t: &mut StructWithSeekMethod) { // This should NOT trigger clippy warning because // StructWithSeekMethod does not implement std::io::Seek; -fn seek_to_start_method_owned_false(mut t: StructWithSeekMethod) { +fn seek_to_start_method_owned_false(mut t: StructWithSeekMethod) { t.seek(SeekFrom::Start(0)); } @@ -38,7 +38,7 @@ fn seek_to_start_false_trait(t: &mut StructWithSeekTrait) { // This should NOT trigger clippy warning because // StructWithSeekMethod does not implement std::io::Seek; -fn seek_to_start_false_trait_owned(mut t: StructWithSeekTrait) { +fn seek_to_start_false_trait_owned(mut t: StructWithSeekTrait) { t.seek(SeekFrom::Start(0)); } diff --git a/tests/ui/seek_to_start_instead_of_rewind.rs b/tests/ui/seek_to_start_instead_of_rewind.rs index 467003a1a66f6..4adde2c40182d 100644 --- a/tests/ui/seek_to_start_instead_of_rewind.rs +++ b/tests/ui/seek_to_start_instead_of_rewind.rs @@ -26,7 +26,7 @@ fn seek_to_start_false_method(t: &mut StructWithSeekMethod) { // This should NOT trigger clippy warning because // StructWithSeekMethod does not implement std::io::Seek; -fn seek_to_start_method_owned_false(mut t: StructWithSeekMethod) { +fn seek_to_start_method_owned_false(mut t: StructWithSeekMethod) { t.seek(SeekFrom::Start(0)); } @@ -38,7 +38,7 @@ fn seek_to_start_false_trait(t: &mut StructWithSeekTrait) { // This should NOT trigger clippy warning because // StructWithSeekMethod does not implement std::io::Seek; -fn seek_to_start_false_trait_owned(mut t: StructWithSeekTrait) { +fn seek_to_start_false_trait_owned(mut t: StructWithSeekTrait) { t.seek(SeekFrom::Start(0)); } diff --git a/tests/ui/type_repetition_in_bounds.rs b/tests/ui/type_repetition_in_bounds.rs index 2eca1f4701c9f..8b4613b3f6ec7 100644 --- a/tests/ui/type_repetition_in_bounds.rs +++ b/tests/ui/type_repetition_in_bounds.rs @@ -1,4 +1,5 @@ #![deny(clippy::type_repetition_in_bounds)] +#![allow(clippy::extra_unused_type_parameters)] use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign}; diff --git a/tests/ui/type_repetition_in_bounds.stderr b/tests/ui/type_repetition_in_bounds.stderr index 70d700c1cc460..a90df03c04ffc 100644 --- a/tests/ui/type_repetition_in_bounds.stderr +++ b/tests/ui/type_repetition_in_bounds.stderr @@ -1,5 +1,5 @@ error: this type has already been used as a bound predicate - --> $DIR/type_repetition_in_bounds.rs:8:5 + --> $DIR/type_repetition_in_bounds.rs:9:5 | LL | T: Clone, | ^^^^^^^^ @@ -12,7 +12,7 @@ LL | #![deny(clippy::type_repetition_in_bounds)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this type has already been used as a bound predicate - --> $DIR/type_repetition_in_bounds.rs:25:5 + --> $DIR/type_repetition_in_bounds.rs:26:5 | LL | Self: Copy + Default + Ord, | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -20,7 +20,7 @@ LL | Self: Copy + Default + Ord, = help: consider combining the bounds: `Self: Clone + Copy + Default + Ord` error: this type has already been used as a bound predicate - --> $DIR/type_repetition_in_bounds.rs:85:5 + --> $DIR/type_repetition_in_bounds.rs:86:5 | LL | T: Clone, | ^^^^^^^^ @@ -28,7 +28,7 @@ LL | T: Clone, = help: consider combining the bounds: `T: ?Sized + Clone` error: this type has already been used as a bound predicate - --> $DIR/type_repetition_in_bounds.rs:90:5 + --> $DIR/type_repetition_in_bounds.rs:91:5 | LL | T: ?Sized, | ^^^^^^^^^ From 9be0b3e2bc4f9fce1a47305d0efbc674e312e09e Mon Sep 17 00:00:00 2001 From: Joshua Nelson Date: Fri, 3 Feb 2023 05:29:43 +0000 Subject: [PATCH 245/501] Fix `x fix` on the standard library itself --- src/bootstrap/check.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 4b8a58e87b64e..1675ed158c9c6 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -99,6 +99,10 @@ impl Step for Std { cargo_subcommand(builder.kind), ); std_cargo(builder, target, compiler.stage, &mut cargo); + if matches!(builder.config.cmd, Subcommand::Fix { .. }) { + // By default, cargo tries to fix all targets. Tell it not to fix tests until we've added `test` to the sysroot. + cargo.arg("--lib"); + } builder.info(&format!( "Checking stage{} library artifacts ({} -> {})", From 98c8077495583ddb4102e8db6c62331b1ae13ec2 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 3 Feb 2023 18:11:11 +0900 Subject: [PATCH 246/501] fix: support non-ascii characters in case conversion --- .../hir-ty/src/diagnostics/decl_check/case_conv.rs | 2 ++ crates/stdx/src/lib.rs | 14 +++++++++----- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs index 88d607194f756..2c13689620924 100644 --- a/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs +++ b/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs @@ -162,6 +162,7 @@ mod tests { check(to_lower_snake_case, "a", expect![[""]]); check(to_lower_snake_case, "abc", expect![[""]]); check(to_lower_snake_case, "foo__bar", expect![["foo_bar"]]); + check(to_lower_snake_case, "Δ", expect!["δ"]); } #[test] @@ -195,5 +196,6 @@ mod tests { check(to_upper_snake_case, "X86_64", expect![[""]]); check(to_upper_snake_case, "FOO_BAr", expect![["FOO_BAR"]]); check(to_upper_snake_case, "FOO__BAR", expect![["FOO_BAR"]]); + check(to_upper_snake_case, "ß", expect!["SS"]); } } diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 51e109798d1df..bd24d7d28bac9 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -2,9 +2,9 @@ #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] +use std::io as sio; use std::process::Command; use std::{cmp::Ordering, ops, time::Instant}; -use std::{io as sio, iter}; mod macros; pub mod hash; @@ -39,15 +39,19 @@ Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`. } pub fn to_lower_snake_case(s: &str) -> String { - to_snake_case(s, char::to_ascii_lowercase) + to_snake_case(s, char::to_lowercase) } pub fn to_upper_snake_case(s: &str) -> String { - to_snake_case(s, char::to_ascii_uppercase) + to_snake_case(s, char::to_uppercase) } // Code partially taken from rust/compiler/rustc_lint/src/nonstandard_style.rs // commit: 9626f2b -fn to_snake_case char>(mut s: &str, change_case: F) -> String { +fn to_snake_case(mut s: &str, change_case: F) -> String +where + F: Fn(char) -> I, + I: Iterator, +{ let mut words = vec![]; // Preserve leading underscores @@ -75,7 +79,7 @@ fn to_snake_case char>(mut s: &str, change_case: F) -> String { } last_upper = ch.is_uppercase(); - buf.extend(iter::once(change_case(&ch))); + buf.extend(change_case(ch)); } words.push(buf); From 8e998c4aa79aab1fa0041bbc929f36b82a6c3aeb Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 3 Feb 2023 10:38:38 +0100 Subject: [PATCH 247/501] Fail spawning proc-macro servers when their api version is newer than r-a's --- crates/proc-macro-api/src/msg.rs | 2 +- crates/proc-macro-api/src/process.rs | 9 ++++++++- crates/proc-macro-srv/src/cli.rs | 2 +- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 262483d2cbee6..4040efe93f093 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -13,7 +13,7 @@ use crate::ProcMacroKind; pub use crate::msg::flat::FlatTree; pub const NO_VERSION_CHECK_VERSION: u32 = 0; -pub const API_VERSION: u32 = 1; +pub const CURRENT_API_VERSION: u32 = 1; #[derive(Debug, Serialize, Deserialize)] pub enum Request { diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index da4527c64f24e..e70b3850d667d 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -10,7 +10,7 @@ use paths::{AbsPath, AbsPathBuf}; use stdx::JodChild; use crate::{ - msg::{Message, Request, Response}, + msg::{Message, Request, Response, CURRENT_API_VERSION}, ProcMacroKind, ServerError, }; @@ -36,6 +36,13 @@ impl ProcMacroProcessSrv { let mut srv = create_srv()?; tracing::info!("sending version check"); match srv.version_check() { + Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( + io::ErrorKind::Other, + format!( + "proc-macro server's api version ({}) is newer than rust-analyzer's ({})", + v, CURRENT_API_VERSION + ), + )), Ok(v) => { tracing::info!("got version {v}"); srv.version = v; diff --git a/crates/proc-macro-srv/src/cli.rs b/crates/proc-macro-srv/src/cli.rs index eaf94714dd791..05168feb629f2 100644 --- a/crates/proc-macro-srv/src/cli.rs +++ b/crates/proc-macro-srv/src/cli.rs @@ -16,7 +16,7 @@ pub fn run() -> io::Result<()> { } msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)), msg::Request::ApiVersionCheck {} => { - msg::Response::ApiVersionCheck(proc_macro_api::msg::API_VERSION) + msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) } }; write_response(res)? From b97c191ff3424bccb8238dbb72f466c3aaad9364 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 3 Feb 2023 10:48:37 +0100 Subject: [PATCH 248/501] Add more basic issue templates with auto category labeling --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/ISSUE_TEMPLATE/feature_request.md | 8 ++++++++ .github/ISSUE_TEMPLATE/question.md | 8 ++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/question.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index b2a2153f0941f..5faee21bdb6da 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,7 +2,7 @@ name: Bug report about: Create a bug report for rust-analyzer. title: '' -labels: '' +labels: 'C-bug' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000000..5207957c45926 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,8 @@ +--- +name: Feature Request +about: Create a feature request for rust-analyzer. +title: '' +labels: 'C-feature' +assignees: '' + +--- diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000000000..a90ade882bd9e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,8 @@ +--- +name: Support Question +about: A question regarding functionality of rust-analyzer. +title: '' +labels: 'C-support' +assignees: '' + +--- From dab685dd87ba99f6c0f005f2ce7b0a3c10dada22 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 3 Feb 2023 11:47:33 +0100 Subject: [PATCH 249/501] De-magic number parser::Output encoding --- crates/parser/src/output.rs | 49 +++++++++++++++++++++++++++---------- 1 file changed, 36 insertions(+), 13 deletions(-) diff --git a/crates/parser/src/output.rs b/crates/parser/src/output.rs index 6ca841cfe0732..3de6c0aba86ef 100644 --- a/crates/parser/src/output.rs +++ b/crates/parser/src/output.rs @@ -31,47 +31,70 @@ pub enum Step<'a> { } impl Output { + const EVENT_MASK: u32 = 0b1; + const TAG_MASK: u32 = 0x0000_00F0; + const N_INPUT_TOKEN_MASK: u32 = 0x0000_FF00; + const KIND_MASK: u32 = 0xFFFF_0000; + + const ERROR_SHIFT: u32 = Self::EVENT_MASK.trailing_ones(); + const TAG_SHIFT: u32 = Self::TAG_MASK.trailing_zeros(); + const N_INPUT_TOKEN_SHIFT: u32 = Self::N_INPUT_TOKEN_MASK.trailing_zeros(); + const KIND_SHIFT: u32 = Self::KIND_MASK.trailing_zeros(); + + const TOKEN_EVENT: u8 = 0; + const ENTER_EVENT: u8 = 1; + const EXIT_EVENT: u8 = 2; + pub fn iter(&self) -> impl Iterator> { self.event.iter().map(|&event| { - if event & 0b1 == 0 { - return Step::Error { msg: self.error[(event as usize) >> 1].as_str() }; + if event & Self::EVENT_MASK == 0 { + return Step::Error { + msg: self.error[(event as usize) >> Self::ERROR_SHIFT].as_str(), + }; } - let tag = ((event & 0x0000_00F0) >> 4) as u8; + let tag = ((event & Self::TAG_MASK) >> Self::TAG_SHIFT) as u8; match tag { - 0 => { - let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into(); - let n_input_tokens = ((event & 0x0000_FF00) >> 8) as u8; + Self::TOKEN_EVENT => { + let kind: SyntaxKind = + (((event & Self::KIND_MASK) >> Self::KIND_SHIFT) as u16).into(); + let n_input_tokens = + ((event & Self::N_INPUT_TOKEN_MASK) >> Self::N_INPUT_TOKEN_SHIFT) as u8; Step::Token { kind, n_input_tokens } } - 1 => { - let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into(); + Self::ENTER_EVENT => { + let kind: SyntaxKind = + (((event & Self::KIND_MASK) >> Self::KIND_SHIFT) as u16).into(); Step::Enter { kind } } - 2 => Step::Exit, + Self::EXIT_EVENT => Step::Exit, _ => unreachable!(), } }) } pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { - let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1; + let e = ((kind as u16 as u32) << Self::KIND_SHIFT) + | ((n_tokens as u32) << Self::N_INPUT_TOKEN_SHIFT) + | Self::EVENT_MASK; self.event.push(e) } pub(crate) fn enter_node(&mut self, kind: SyntaxKind) { - let e = ((kind as u16 as u32) << 16) | (1 << 4) | 1; + let e = ((kind as u16 as u32) << Self::KIND_SHIFT) + | ((Self::ENTER_EVENT as u32) << Self::TAG_SHIFT) + | Self::EVENT_MASK; self.event.push(e) } pub(crate) fn leave_node(&mut self) { - let e = 2 << 4 | 1; + let e = (Self::EXIT_EVENT as u32) << Self::TAG_SHIFT | Self::EVENT_MASK; self.event.push(e) } pub(crate) fn error(&mut self, error: String) { let idx = self.error.len(); self.error.push(error); - let e = (idx as u32) << 1; + let e = (idx as u32) << Self::ERROR_SHIFT; self.event.push(e); } } From 05d7d33a0f1f4a96bbf0619f648e5343451fe0e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 3 Feb 2023 13:15:27 +0200 Subject: [PATCH 250/501] Update .github/workflows/fuzz.yml --- .github/workflows/fuzz.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 95bc386abc2b6..5af8aa1f77aac 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -19,7 +19,7 @@ env: jobs: rust: - if: ${{ (github.repository == 'rust-lang/rust-analyzer') || (github.event.action == 'workflow_dispatch') }} + if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }} name: Rust runs-on: ubuntu-latest env: From 6fa6efe90fc8a79395cacb5c71315f0e2b32e623 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 3 Feb 2023 17:18:48 +0100 Subject: [PATCH 251/501] fix: Fix parsing of nested tuple field accesses in a cursed way --- crates/parser/src/event.rs | 10 +- crates/parser/src/grammar/expressions.rs | 98 +++++++++++++------ crates/parser/src/lib.rs | 2 +- crates/parser/src/output.rs | 12 +++ crates/parser/src/parser.rs | 32 ++++++ crates/parser/src/shortcuts.rs | 54 +++++++++- crates/parser/src/tests/prefix_entries.rs | 4 + .../parser/inline/ok/0011_field_expr.rast | 33 +++++++ .../parser/inline/ok/0011_field_expr.rs | 2 + .../inline/ok/0107_method_call_expr.rast | 43 ++++++++ .../parser/inline/ok/0107_method_call_expr.rs | 2 + .../parser/inline/ok/0137_await_expr.rast | 35 +++++++ .../parser/inline/ok/0137_await_expr.rs | 2 + 13 files changed, 294 insertions(+), 35 deletions(-) diff --git a/crates/parser/src/event.rs b/crates/parser/src/event.rs index b0e70e794303c..fb2616cf01375 100644 --- a/crates/parser/src/event.rs +++ b/crates/parser/src/event.rs @@ -72,9 +72,12 @@ pub(crate) enum Event { /// `n_raw_tokens = 2` is used to produced a single `>>`. Token { kind: SyntaxKind, + // Consider custom enum here? n_raw_tokens: u8, }, - + FloatSplitHack { + has_pseudo_dot: bool, + }, Error { msg: String, }, @@ -125,6 +128,11 @@ pub(super) fn process(mut events: Vec) -> Output { Event::Token { kind, n_raw_tokens } => { res.token(kind, n_raw_tokens); } + Event::FloatSplitHack { has_pseudo_dot } => { + res.float_split_hack(has_pseudo_dot); + let ev = mem::replace(&mut events[i + 1], Event::tombstone()); + assert!(matches!(ev, Event::Finish), "{ev:?}"); + } Event::Error { msg } => res.error(msg), } } diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index 8932330b825db..7516ac3c4bd37 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -379,7 +379,7 @@ fn postfix_expr( // } T!['('] if allow_calls => call_expr(p, lhs), T!['['] if allow_calls => index_expr(p, lhs), - T![.] => match postfix_dot_expr(p, lhs) { + T![.] => match postfix_dot_expr::(p, lhs) { Ok(it) => it, Err(it) => { lhs = it; @@ -393,35 +393,44 @@ fn postfix_expr( block_like = BlockLike::NotBlock; } return (lhs, block_like); +} - fn postfix_dot_expr( - p: &mut Parser<'_>, - lhs: CompletedMarker, - ) -> Result { +fn postfix_dot_expr( + p: &mut Parser<'_>, + lhs: CompletedMarker, +) -> Result { + if !FLOAT_RECOVERY { assert!(p.at(T![.])); - if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) { - return Ok(method_call_expr(p, lhs)); - } + } + let nth1 = if FLOAT_RECOVERY { 0 } else { 1 }; + let nth2 = if FLOAT_RECOVERY { 1 } else { 2 }; - // test await_expr - // fn foo() { - // x.await; - // x.0.await; - // x.0().await?.hello(); - // } - if p.nth(1) == T![await] { - let m = lhs.precede(p); - p.bump(T![.]); - p.bump(T![await]); - return Ok(m.complete(p, AWAIT_EXPR)); - } + if p.nth(nth1) == IDENT && (p.nth(nth2) == T!['('] || p.nth_at(nth2, T![::])) { + return Ok(method_call_expr::(p, lhs)); + } - if p.at(T![..=]) || p.at(T![..]) { - return Err(lhs); + // test await_expr + // fn foo() { + // x.await; + // x.0.await; + // x.0().await?.hello(); + // x.0.0.await; + // x.0. await; + // } + if p.nth(nth1) == T![await] { + let m = lhs.precede(p); + if !FLOAT_RECOVERY { + p.bump(T![.]); } + p.bump(T![await]); + return Ok(m.complete(p, AWAIT_EXPR)); + } - Ok(field_expr(p, lhs)) + if p.at(T![..=]) || p.at(T![..]) { + return Err(lhs); } + + field_expr::(p, lhs) } // test call_expr @@ -455,11 +464,22 @@ fn index_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { // fn foo() { // x.foo(); // y.bar::(1, 2,); +// x.0.0.call(); +// x.0. call(); // } -fn method_call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::]))); +fn method_call_expr( + p: &mut Parser<'_>, + lhs: CompletedMarker, +) -> CompletedMarker { + if FLOAT_RECOVERY { + assert!(p.nth(0) == IDENT && (p.nth(1) == T!['('] || p.nth_at(1, T![::]))); + } else { + assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::]))); + } let m = lhs.precede(p); - p.bump_any(); + if !FLOAT_RECOVERY { + p.bump(T![.]); + } name_ref(p); generic_args::opt_generic_arg_list(p, true); if p.at(T!['(']) { @@ -472,21 +492,35 @@ fn method_call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker // fn foo() { // x.foo; // x.0.bar; +// x.0.1; +// x.0. bar; // x.0(); // } -fn field_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker { - assert!(p.at(T![.])); +fn field_expr( + p: &mut Parser<'_>, + lhs: CompletedMarker, +) -> Result { + if !FLOAT_RECOVERY { + assert!(p.at(T![.])); + } let m = lhs.precede(p); - p.bump(T![.]); + if !FLOAT_RECOVERY { + p.bump(T![.]); + } if p.at(IDENT) || p.at(INT_NUMBER) { name_ref_or_index(p); } else if p.at(FLOAT_NUMBER) { - // FIXME: How to recover and instead parse INT + T![.]? - p.bump_any(); + return match p.split_float(m) { + (true, m) => { + let lhs = m.complete(p, FIELD_EXPR); + postfix_dot_expr::(p, lhs) + } + (false, m) => Ok(m.complete(p, FIELD_EXPR)), + }; } else { p.error("expected field name or number"); } - m.complete(p, FIELD_EXPR) + Ok(m.complete(p, FIELD_EXPR)) } // test try_expr diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index 87be47927735a..f20d32d6cf877 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -102,7 +102,7 @@ impl TopEntryPoint { match step { Step::Enter { .. } => depth += 1, Step::Exit => depth -= 1, - Step::Token { .. } | Step::Error { .. } => (), + Step::FloatSplit { .. } | Step::Token { .. } | Step::Error { .. } => (), } } assert!(!first, "no tree at all"); diff --git a/crates/parser/src/output.rs b/crates/parser/src/output.rs index 3de6c0aba86ef..9587c8cb1ba62 100644 --- a/crates/parser/src/output.rs +++ b/crates/parser/src/output.rs @@ -25,6 +25,7 @@ pub struct Output { #[derive(Debug)] pub enum Step<'a> { Token { kind: SyntaxKind, n_input_tokens: u8 }, + FloatSplit { has_pseudo_dot: bool }, Enter { kind: SyntaxKind }, Exit, Error { msg: &'a str }, @@ -44,6 +45,7 @@ impl Output { const TOKEN_EVENT: u8 = 0; const ENTER_EVENT: u8 = 1; const EXIT_EVENT: u8 = 2; + const SPLIT_EVENT: u8 = 3; pub fn iter(&self) -> impl Iterator> { self.event.iter().map(|&event| { @@ -67,6 +69,9 @@ impl Output { Step::Enter { kind } } Self::EXIT_EVENT => Step::Exit, + Self::SPLIT_EVENT => { + Step::FloatSplit { has_pseudo_dot: event & Self::N_INPUT_TOKEN_MASK != 0 } + } _ => unreachable!(), } }) @@ -79,6 +84,13 @@ impl Output { self.event.push(e) } + pub(crate) fn float_split_hack(&mut self, has_pseudo_dot: bool) { + let e = (Self::SPLIT_EVENT as u32) << Self::TAG_SHIFT + | ((has_pseudo_dot as u32) << Self::N_INPUT_TOKEN_SHIFT) + | Self::EVENT_MASK; + self.event.push(e); + } + pub(crate) fn enter_node(&mut self, kind: SyntaxKind) { let e = ((kind as u16 as u32) << Self::KIND_SHIFT) | ((Self::ENTER_EVENT as u32) << Self::TAG_SHIFT) diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs index 48aecb35be128..0f4fa6022919c 100644 --- a/crates/parser/src/parser.rs +++ b/crates/parser/src/parser.rs @@ -181,6 +181,38 @@ impl<'t> Parser<'t> { self.do_bump(kind, 1); } + /// Advances the parser by one token + pub(crate) fn split_float(&mut self, marker: Marker) -> (bool, Marker) { + assert!(self.at(SyntaxKind::FLOAT_NUMBER)); + // we have parse `.` + // ``.0.1 + // here we need to insert an extra event + // + // ``. 0. 1; + // here we need to change the follow up parse, the return value will cause us to emulate a dot + // the actual splitting happens later + let has_pseudo_dot = !self.inp.is_joint(self.pos); + let marker = if !has_pseudo_dot { + let new_pos = self.start(); + let idx = marker.pos as usize; + match &mut self.events[idx] { + Event::Start { forward_parent, kind } => { + *kind = SyntaxKind::FIELD_EXPR; + *forward_parent = Some(new_pos.pos - marker.pos); + } + _ => unreachable!(), + } + // NOTE: This brings the start / finish pairs out of balance! + std::mem::forget(marker); + new_pos + } else { + marker + }; + self.pos += 1 as usize; + self.push_event(Event::FloatSplitHack { has_pseudo_dot }); + (has_pseudo_dot, marker) + } + /// Advances the parser by one token, remapping its kind. /// This is useful to create contextual keywords from /// identifiers. For example, the lexer creates a `union` diff --git a/crates/parser/src/shortcuts.rs b/crates/parser/src/shortcuts.rs index 2be4050d13579..18a6f838face5 100644 --- a/crates/parser/src/shortcuts.rs +++ b/crates/parser/src/shortcuts.rs @@ -44,7 +44,17 @@ impl<'a> LexedStr<'a> { } res.push(kind); } - was_joint = true; + if kind == SyntaxKind::FLOAT_NUMBER { + // we set jointness for floating point numbers as a hack to inform the + // parser about whether we have a `0.` or `0.1` style float + if self.text(i).split_once('.').map_or(false, |(_, it)| it.is_empty()) { + was_joint = false; + } else { + was_joint = true; + } + } else { + was_joint = true; + } } } res @@ -63,6 +73,7 @@ impl<'a> LexedStr<'a> { Step::Token { kind, n_input_tokens: n_raw_tokens } => { builder.token(kind, n_raw_tokens) } + Step::FloatSplit { has_pseudo_dot } => builder.float_split(has_pseudo_dot), Step::Enter { kind } => builder.enter(kind), Step::Exit => builder.exit(), Step::Error { msg } => { @@ -109,6 +120,16 @@ impl Builder<'_, '_> { self.do_token(kind, n_tokens as usize); } + fn float_split(&mut self, has_pseudo_dot: bool) { + match mem::replace(&mut self.state, State::Normal) { + State::PendingEnter => unreachable!(), + State::PendingExit => (self.sink)(StrStep::Exit), + State::Normal => (), + } + self.eat_trivias(); + self.do_float_split(has_pseudo_dot); + } + fn enter(&mut self, kind: SyntaxKind) { match mem::replace(&mut self.state, State::Normal) { State::PendingEnter => { @@ -164,6 +185,37 @@ impl Builder<'_, '_> { self.pos += n_tokens; (self.sink)(StrStep::Token { kind, text }); } + + fn do_float_split(&mut self, has_pseudo_dot: bool) { + let text = &self.lexed.range_text(self.pos..self.pos + 1); + self.pos += 1; + match text.split_once('.') { + Some((left, right)) => { + assert!(!left.is_empty()); + (self.sink)(StrStep::Enter { kind: SyntaxKind::NAME_REF }); + (self.sink)(StrStep::Token { kind: SyntaxKind::INT_NUMBER, text: left }); + (self.sink)(StrStep::Exit); + + // here we move the exit up, the original exit has been deleted in process + (self.sink)(StrStep::Exit); + + (self.sink)(StrStep::Token { kind: SyntaxKind::DOT, text: "." }); + + if has_pseudo_dot { + assert!(right.is_empty()); + self.state = State::Normal; + } else { + (self.sink)(StrStep::Enter { kind: SyntaxKind::NAME_REF }); + (self.sink)(StrStep::Token { kind: SyntaxKind::INT_NUMBER, text: right }); + (self.sink)(StrStep::Exit); + + // the parser creates an unbalanced start node, we are required to close it here + self.state = State::PendingExit; + } + } + None => unreachable!(), + } + } } fn n_attached_trivias<'a>( diff --git a/crates/parser/src/tests/prefix_entries.rs b/crates/parser/src/tests/prefix_entries.rs index e626b4f27e0c3..40f92e58804f9 100644 --- a/crates/parser/src/tests/prefix_entries.rs +++ b/crates/parser/src/tests/prefix_entries.rs @@ -51,6 +51,9 @@ fn expr() { check(PrefixEntryPoint::Expr, "-1", "-1"); check(PrefixEntryPoint::Expr, "fn foo() {}", "fn"); check(PrefixEntryPoint::Expr, "#[attr] ()", "#[attr] ()"); + check(PrefixEntryPoint::Expr, "foo.0", "foo.0"); + check(PrefixEntryPoint::Expr, "foo.0.1", "foo.0.1"); + check(PrefixEntryPoint::Expr, "foo.0. foo", "foo.0. foo"); } #[test] @@ -88,6 +91,7 @@ fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) { for step in entry.parse(&input).iter() { match step { Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize, + Step::FloatSplit { .. } => n_tokens += 1, Step::Enter { .. } | Step::Exit | Step::Error { .. } => (), } } diff --git a/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast b/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast index 8498724b9ef07..dd27dc4896424 100644 --- a/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast +++ b/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast @@ -40,6 +40,39 @@ SOURCE_FILE IDENT "bar" SEMICOLON ";" WHITESPACE "\n " + EXPR_STMT + FIELD_EXPR + FIELD_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + NAME_REF + INT_NUMBER "1" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + FIELD_EXPR + FIELD_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + WHITESPACE " " + NAME_REF + IDENT "bar" + SEMICOLON ";" + WHITESPACE "\n " EXPR_STMT CALL_EXPR FIELD_EXPR diff --git a/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs b/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs index b8da2ddc30947..98dbe45a7ec92 100644 --- a/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs +++ b/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs @@ -1,5 +1,7 @@ fn foo() { x.foo; x.0.bar; + x.0.1; + x.0. bar; x.0(); } diff --git a/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast b/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast index dcbcfe1231e62..b28b8eb673a70 100644 --- a/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast +++ b/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast @@ -58,6 +58,49 @@ SOURCE_FILE COMMA "," R_PAREN ")" SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + METHOD_CALL_EXPR + FIELD_EXPR + FIELD_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + NAME_REF + IDENT "call" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + METHOD_CALL_EXPR + FIELD_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + WHITESPACE " " + NAME_REF + IDENT "call" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs b/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs index 1a3aa35ae8e73..48bb6381e80bf 100644 --- a/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs +++ b/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs @@ -1,4 +1,6 @@ fn foo() { x.foo(); y.bar::(1, 2,); + x.0.0.call(); + x.0. call(); } diff --git a/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast b/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast index 9d37ada0da8de..af713a22072fe 100644 --- a/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast +++ b/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast @@ -65,6 +65,41 @@ SOURCE_FILE L_PAREN "(" R_PAREN ")" SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + AWAIT_EXPR + FIELD_EXPR + FIELD_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + AWAIT_KW "await" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + AWAIT_EXPR + FIELD_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "x" + DOT "." + NAME_REF + INT_NUMBER "0" + DOT "." + WHITESPACE " " + AWAIT_KW "await" + SEMICOLON ";" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs b/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs index d2ba89ca60758..fe9a3211bb18d 100644 --- a/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs +++ b/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs @@ -2,4 +2,6 @@ fn foo() { x.await; x.0.await; x.0().await?.hello(); + x.0.0.await; + x.0. await; } From a465d6a8600faed173f896f4b8dc2db0ee2d5498 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 3 Feb 2023 16:48:35 +0000 Subject: [PATCH 252/501] Move codegen_and_compile_fn to driver/jit.rs --- src/base.rs | 17 ----------------- src/driver/jit.rs | 44 ++++++++++++++++++++++++++------------------ 2 files changed, 26 insertions(+), 35 deletions(-) diff --git a/src/base.rs b/src/base.rs index d3a8c10657e8d..6e9a1bfa7a82b 100644 --- a/src/base.rs +++ b/src/base.rs @@ -21,23 +21,6 @@ pub(crate) struct CodegenedFunction { func_debug_cx: Option, } -#[cfg_attr(not(feature = "jit"), allow(dead_code))] -pub(crate) fn codegen_and_compile_fn<'tcx>( - tcx: TyCtxt<'tcx>, - cx: &mut crate::CodegenCx, - cached_context: &mut Context, - module: &mut dyn Module, - instance: Instance<'tcx>, -) { - let _inst_guard = - crate::PrintOnPanic(|| format!("{:?} {}", instance, tcx.symbol_name(instance).name)); - - let cached_func = std::mem::replace(&mut cached_context.func, Function::new()); - let codegened_func = codegen_fn(tcx, cx, cached_func, module, instance); - - compile_fn(cx, cached_context, module, codegened_func); -} - pub(crate) fn codegen_fn<'tcx>( tcx: TyCtxt<'tcx>, cx: &mut crate::CodegenCx, diff --git a/src/driver/jit.rs b/src/driver/jit.rs index be1b8c9ead3bf..25cb6ecbf6b88 100644 --- a/src/driver/jit.rs +++ b/src/driver/jit.rs @@ -128,15 +128,13 @@ pub(crate) fn run_jit(tcx: TyCtxt<'_>, backend_config: BackendConfig) -> ! { MonoItem::Fn(inst) => match backend_config.codegen_mode { CodegenMode::Aot => unreachable!(), CodegenMode::Jit => { - tcx.sess.time("codegen fn", || { - crate::base::codegen_and_compile_fn( - tcx, - &mut cx, - &mut cached_context, - &mut jit_module, - inst, - ) - }); + codegen_and_compile_fn( + tcx, + &mut cx, + &mut cached_context, + &mut jit_module, + inst, + ); } CodegenMode::JitLazy => { codegen_shim(tcx, &mut cx, &mut cached_context, &mut jit_module, inst) @@ -219,6 +217,24 @@ pub(crate) fn run_jit(tcx: TyCtxt<'_>, backend_config: BackendConfig) -> ! { } } +pub(crate) fn codegen_and_compile_fn<'tcx>( + tcx: TyCtxt<'tcx>, + cx: &mut crate::CodegenCx, + cached_context: &mut Context, + module: &mut dyn Module, + instance: Instance<'tcx>, +) { + tcx.sess.time("codegen and compile fn", || { + let _inst_guard = + crate::PrintOnPanic(|| format!("{:?} {}", instance, tcx.symbol_name(instance).name)); + + let cached_func = std::mem::replace(&mut cached_context.func, Function::new()); + let codegened_func = crate::base::codegen_fn(tcx, cx, cached_func, module, instance); + + crate::base::compile_fn(cx, cached_context, module, codegened_func); + }); +} + extern "C" fn clif_jit_fn( instance_ptr: *const Instance<'static>, trampoline_ptr: *const u8, @@ -271,15 +287,7 @@ fn jit_fn(instance_ptr: *const Instance<'static>, trampoline_ptr: *const u8) -> false, Symbol::intern("dummy_cgu_name"), ); - tcx.sess.time("codegen fn", || { - crate::base::codegen_and_compile_fn( - tcx, - &mut cx, - &mut Context::new(), - jit_module, - instance, - ) - }); + codegen_and_compile_fn(tcx, &mut cx, &mut Context::new(), jit_module, instance); assert!(cx.global_asm.is_empty()); jit_module.finalize_definitions().unwrap(); From d2ffe40864e6dc93881225621a05b833957963a5 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 3 Feb 2023 17:16:09 +0000 Subject: [PATCH 253/501] Cleanup -Ztime-passes output --- src/base.rs | 15 +++-- src/config.rs | 8 --- src/driver/aot.rs | 144 +++++++++++++++++++++++++--------------------- src/driver/jit.rs | 4 +- src/driver/mod.rs | 15 +---- 5 files changed, 92 insertions(+), 94 deletions(-) diff --git a/src/base.rs b/src/base.rs index 6e9a1bfa7a82b..184ddbdd28063 100644 --- a/src/base.rs +++ b/src/base.rs @@ -95,7 +95,7 @@ pub(crate) fn codegen_fn<'tcx>( next_ssa_var: 0, }; - tcx.sess.time("codegen clif ir", || codegen_fn_body(&mut fx, start_block)); + tcx.prof.generic_activity("codegen clif ir").run(|| codegen_fn_body(&mut fx, start_block)); fx.bcx.seal_all_blocks(); fx.bcx.finalize(); @@ -174,7 +174,7 @@ pub(crate) fn compile_fn( }; // Define function - cx.profiler.verbose_generic_activity("define function").run(|| { + cx.profiler.generic_activity("define function").run(|| { context.want_disasm = cx.should_write_ir; module.define_function(codegened_func.func_id, context).unwrap(); }); @@ -203,7 +203,7 @@ pub(crate) fn compile_fn( let isa = module.isa(); let debug_context = &mut cx.debug_context; let unwind_context = &mut cx.unwind_context; - cx.profiler.verbose_generic_activity("generate debug info").run(|| { + cx.profiler.generic_activity("generate debug info").run(|| { if let Some(debug_context) = debug_context { codegened_func.func_debug_cx.unwrap().finalize( debug_context, @@ -220,7 +220,7 @@ pub(crate) fn verify_func( writer: &crate::pretty_clif::CommentWriter, func: &Function, ) { - tcx.sess.time("verify clif ir", || { + tcx.prof.generic_activity("verify clif ir").run(|| { let flags = cranelift_codegen::settings::Flags::new(cranelift_codegen::settings::builder()); match cranelift_codegen::verify_function(&func, &flags) { Ok(_) => {} @@ -256,7 +256,10 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { fx.bcx.ins().trap(TrapCode::UnreachableCodeReached); return; } - fx.tcx.sess.time("codegen prelude", || crate::abi::codegen_fn_prelude(fx, start_block)); + fx.tcx + .prof + .generic_activity("codegen prelude") + .run(|| crate::abi::codegen_fn_prelude(fx, start_block)); for (bb, bb_data) in fx.mir.basic_blocks.iter_enumerated() { let block = fx.get_block(bb); @@ -417,7 +420,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { cleanup: _, from_hir_call: _, } => { - fx.tcx.sess.time("codegen call", || { + fx.tcx.prof.generic_activity("codegen call").run(|| { crate::abi::codegen_terminator_call( fx, mir::SourceInfo { span: *fn_span, ..source_info }, diff --git a/src/config.rs b/src/config.rs index 45522fb1a4cab..263401e1c4b83 100644 --- a/src/config.rs +++ b/src/config.rs @@ -42,12 +42,6 @@ pub struct BackendConfig { /// Defaults to the value of `CG_CLIF_JIT_ARGS`. pub jit_args: Vec, - /// Display the time it took to perform codegen for a crate. - /// - /// Defaults to true when the `CG_CLIF_DISPLAY_CG_TIME` env var is set to 1 or false otherwise. - /// Can be set using `-Cllvm-args=display_cg_time=...`. - pub display_cg_time: bool, - /// Enable the Cranelift ir verifier for all compilation passes. If not set it will only run /// once before passing the clif ir to Cranelift for compilation. /// @@ -73,7 +67,6 @@ impl Default for BackendConfig { let args = std::env::var("CG_CLIF_JIT_ARGS").unwrap_or_else(|_| String::new()); args.split(' ').map(|arg| arg.to_string()).collect() }, - display_cg_time: bool_env_var("CG_CLIF_DISPLAY_CG_TIME"), enable_verifier: cfg!(debug_assertions) || bool_env_var("CG_CLIF_ENABLE_VERIFIER"), disable_incr_cache: bool_env_var("CG_CLIF_DISABLE_INCR_CACHE"), } @@ -92,7 +85,6 @@ impl BackendConfig { if let Some((name, value)) = opt.split_once('=') { match name { "mode" => config.codegen_mode = value.parse()?, - "display_cg_time" => config.display_cg_time = parse_bool(name, value)?, "enable_verifier" => config.enable_verifier = parse_bool(name, value)?, "disable_incr_cache" => config.disable_incr_cache = parse_bool(name, value)?, _ => return Err(format!("Unknown option `{}`", name)), diff --git a/src/driver/aot.rs b/src/driver/aot.rs index d4494a9e45de4..2aa3209e2b158 100644 --- a/src/driver/aot.rs +++ b/src/driver/aot.rs @@ -272,80 +272,97 @@ fn module_codegen( ConcurrencyLimiterToken, ), ) -> OngoingModuleCodegen { - let (cgu_name, mut cx, mut module, codegened_functions) = tcx.sess.time("codegen cgu", || { - let cgu = tcx.codegen_unit(cgu_name); - let mono_items = cgu.items_in_deterministic_order(tcx); - - let mut module = make_module(tcx.sess, &backend_config, cgu_name.as_str().to_string()); - - let mut cx = crate::CodegenCx::new( - tcx, - backend_config.clone(), - module.isa(), - tcx.sess.opts.debuginfo != DebugInfo::None, - cgu_name, - ); - super::predefine_mono_items(tcx, &mut module, &mono_items); - let mut codegened_functions = vec![]; - for (mono_item, _) in mono_items { - match mono_item { - MonoItem::Fn(inst) => { - tcx.sess.time("codegen fn", || { - let codegened_function = crate::base::codegen_fn( + let (cgu_name, mut cx, mut module, codegened_functions) = + tcx.prof.verbose_generic_activity_with_arg("codegen cgu", cgu_name.as_str()).run(|| { + let cgu = tcx.codegen_unit(cgu_name); + let mono_items = cgu.items_in_deterministic_order(tcx); + + let mut module = make_module(tcx.sess, &backend_config, cgu_name.as_str().to_string()); + + let mut cx = crate::CodegenCx::new( + tcx, + backend_config.clone(), + module.isa(), + tcx.sess.opts.debuginfo != DebugInfo::None, + cgu_name, + ); + super::predefine_mono_items(tcx, &mut module, &mono_items); + let mut codegened_functions = vec![]; + for (mono_item, _) in mono_items { + match mono_item { + MonoItem::Fn(inst) => { + tcx.prof.generic_activity("codegen fn").run(|| { + let codegened_function = crate::base::codegen_fn( + tcx, + &mut cx, + Function::new(), + &mut module, + inst, + ); + codegened_functions.push(codegened_function); + }); + } + MonoItem::Static(def_id) => { + crate::constant::codegen_static(tcx, &mut module, def_id) + } + MonoItem::GlobalAsm(item_id) => { + crate::global_asm::codegen_global_asm_item( tcx, - &mut cx, - Function::new(), - &mut module, - inst, + &mut cx.global_asm, + item_id, ); - codegened_functions.push(codegened_function); - }); - } - MonoItem::Static(def_id) => { - crate::constant::codegen_static(tcx, &mut module, def_id) - } - MonoItem::GlobalAsm(item_id) => { - crate::global_asm::codegen_global_asm_item(tcx, &mut cx.global_asm, item_id); + } } } - } - crate::main_shim::maybe_create_entry_wrapper( - tcx, - &mut module, - &mut cx.unwind_context, - false, - cgu.is_primary(), - ); + crate::main_shim::maybe_create_entry_wrapper( + tcx, + &mut module, + &mut cx.unwind_context, + false, + cgu.is_primary(), + ); - let cgu_name = cgu.name().as_str().to_owned(); + let cgu_name = cgu.name().as_str().to_owned(); - (cgu_name, cx, module, codegened_functions) - }); + (cgu_name, cx, module, codegened_functions) + }); OngoingModuleCodegen::Async(std::thread::spawn(move || { - cx.profiler.clone().verbose_generic_activity("compile functions").run(|| { - let mut cached_context = Context::new(); - for codegened_func in codegened_functions { - crate::base::compile_fn(&mut cx, &mut cached_context, &mut module, codegened_func); - } - }); + cx.profiler.clone().verbose_generic_activity_with_arg("compile functions", &*cgu_name).run( + || { + let mut cached_context = Context::new(); + for codegened_func in codegened_functions { + crate::base::compile_fn( + &mut cx, + &mut cached_context, + &mut module, + codegened_func, + ); + } + }, + ); - let global_asm_object_file = - cx.profiler.verbose_generic_activity("compile assembly").run(|| { + let global_asm_object_file = cx + .profiler + .verbose_generic_activity_with_arg("compile assembly", &*cgu_name) + .run(|| { crate::global_asm::compile_global_asm(&global_asm_config, &cgu_name, &cx.global_asm) })?; - let codegen_result = cx.profiler.verbose_generic_activity("write object file").run(|| { - emit_cgu( - &global_asm_config.output_filenames, - &cx.profiler, - cgu_name, - module, - cx.debug_context, - cx.unwind_context, - global_asm_object_file, - ) - }); + let codegen_result = cx + .profiler + .verbose_generic_activity_with_arg("write object file", &*cgu_name) + .run(|| { + emit_cgu( + &global_asm_config.output_filenames, + &cx.profiler, + cgu_name, + module, + cx.debug_context, + cx.unwind_context, + global_asm_object_file, + ) + }); std::mem::drop(token); codegen_result })) @@ -375,7 +392,7 @@ pub(crate) fn run_aot( let mut concurrency_limiter = ConcurrencyLimiter::new(tcx.sess, cgus.len()); - let modules = super::time(tcx, backend_config.display_cg_time, "codegen mono items", || { + let modules = tcx.sess.time("codegen mono items", || { cgus.iter() .map(|cgu| { let cgu_reuse = if backend_config.disable_incr_cache { @@ -437,7 +454,6 @@ pub(crate) fn run_aot( }; let metadata_module = if need_metadata_module { - let _timer = tcx.prof.generic_activity("codegen crate metadata"); let (metadata_cgu_name, tmp_file) = tcx.sess.time("write compressed metadata", || { use rustc_middle::mir::mono::CodegenUnitNameBuilder; diff --git a/src/driver/jit.rs b/src/driver/jit.rs index 25cb6ecbf6b88..8b5a2da2c5944 100644 --- a/src/driver/jit.rs +++ b/src/driver/jit.rs @@ -121,7 +121,7 @@ pub(crate) fn run_jit(tcx: TyCtxt<'_>, backend_config: BackendConfig) -> ! { .into_iter() .collect::>(); - super::time(tcx, backend_config.display_cg_time, "codegen mono items", || { + tcx.sess.time("codegen mono items", || { super::predefine_mono_items(tcx, &mut jit_module, &mono_items); for (mono_item, _) in mono_items { match mono_item { @@ -224,7 +224,7 @@ pub(crate) fn codegen_and_compile_fn<'tcx>( module: &mut dyn Module, instance: Instance<'tcx>, ) { - tcx.sess.time("codegen and compile fn", || { + tcx.prof.generic_activity("codegen and compile fn").run(|| { let _inst_guard = crate::PrintOnPanic(|| format!("{:?} {}", instance, tcx.symbol_name(instance).name)); diff --git a/src/driver/mod.rs b/src/driver/mod.rs index 6e925cea27707..d09d3a529759c 100644 --- a/src/driver/mod.rs +++ b/src/driver/mod.rs @@ -17,7 +17,7 @@ fn predefine_mono_items<'tcx>( module: &mut dyn Module, mono_items: &[(MonoItem<'tcx>, (RLinkage, Visibility))], ) { - tcx.sess.time("predefine functions", || { + tcx.prof.generic_activity("predefine functions").run(|| { let is_compiler_builtins = tcx.is_compiler_builtins(LOCAL_CRATE); for &(mono_item, (linkage, visibility)) in mono_items { match mono_item { @@ -39,16 +39,3 @@ fn predefine_mono_items<'tcx>( } }); } - -fn time(tcx: TyCtxt<'_>, display: bool, name: &'static str, f: impl FnOnce() -> R) -> R { - if display { - println!("[{:<30}: {}] start", tcx.crate_name(LOCAL_CRATE), name); - let before = std::time::Instant::now(); - let res = tcx.sess.time(name, f); - let after = std::time::Instant::now(); - println!("[{:<30}: {}] end time: {:?}", tcx.crate_name(LOCAL_CRATE), name, after - before); - res - } else { - tcx.sess.time(name, f) - } -} From 469783c263c3ac0aadde100cbf7e575c21caad1f Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 3 Feb 2023 17:33:39 +0000 Subject: [PATCH 254/501] Use generic_activity_with_arg in codegen_fn and compile_fn --- src/base.rs | 7 ++++++- src/driver/aot.rs | 18 ++++++++---------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/base.rs b/src/base.rs index 184ddbdd28063..b43bb5e8668d6 100644 --- a/src/base.rs +++ b/src/base.rs @@ -30,6 +30,9 @@ pub(crate) fn codegen_fn<'tcx>( ) -> CodegenedFunction { debug_assert!(!instance.substs.needs_infer()); + let symbol_name = tcx.symbol_name(instance).name.to_string(); + let _timer = tcx.prof.generic_activity_with_arg("codegen fn", &*symbol_name); + let mir = tcx.instance_mir(instance.def); let _mir_guard = crate::PrintOnPanic(|| { let mut buf = Vec::new(); @@ -41,7 +44,6 @@ pub(crate) fn codegen_fn<'tcx>( }); // Declare function - let symbol_name = tcx.symbol_name(instance).name.to_string(); let sig = get_function_sig(tcx, module.target_config().default_call_conv, instance); let func_id = module.declare_function(&symbol_name, Linkage::Local, &sig).unwrap(); @@ -129,6 +131,9 @@ pub(crate) fn compile_fn( module: &mut dyn Module, codegened_func: CodegenedFunction, ) { + let _timer = + cx.profiler.generic_activity_with_arg("compile function", &*codegened_func.symbol_name); + let clif_comments = codegened_func.clif_comments; // Store function in context diff --git a/src/driver/aot.rs b/src/driver/aot.rs index 2aa3209e2b158..58b01dfb5b0ec 100644 --- a/src/driver/aot.rs +++ b/src/driver/aot.rs @@ -291,16 +291,14 @@ fn module_codegen( for (mono_item, _) in mono_items { match mono_item { MonoItem::Fn(inst) => { - tcx.prof.generic_activity("codegen fn").run(|| { - let codegened_function = crate::base::codegen_fn( - tcx, - &mut cx, - Function::new(), - &mut module, - inst, - ); - codegened_functions.push(codegened_function); - }); + let codegened_function = crate::base::codegen_fn( + tcx, + &mut cx, + Function::new(), + &mut module, + inst, + ); + codegened_functions.push(codegened_function); } MonoItem::Static(def_id) => { crate::constant::codegen_static(tcx, &mut module, def_id) From a0d3b0963d68ebf4d63bd8ac4a3ca5ab3cc44873 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 3 Feb 2023 18:35:09 +0000 Subject: [PATCH 255/501] Record cranelift pass timings in self profile results --- src/base.rs | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/base.rs b/src/base.rs index b43bb5e8668d6..3f5cc38b04c62 100644 --- a/src/base.rs +++ b/src/base.rs @@ -182,6 +182,27 @@ pub(crate) fn compile_fn( cx.profiler.generic_activity("define function").run(|| { context.want_disasm = cx.should_write_ir; module.define_function(codegened_func.func_id, context).unwrap(); + + if cx.profiler.enabled() { + let mut recording_args = false; + cx.profiler + .generic_activity_with_arg_recorder( + "define function (clif pass timings)", + |recorder| { + let pass_times = cranelift_codegen::timing::take_current(); + // Replace newlines with | as measureme doesn't allow control characters like + // newlines inside strings. + recorder.record_arg(format!("{}", pass_times).replace("\n", " | ")); + recording_args = true; + }, + ) + .run(|| { + if recording_args { + // Wait a tiny bit to ensure chrome's profiler doesn't hide the event + std::thread::sleep(std::time::Duration::from_nanos(2)) + } + }); + } }); if cx.should_write_ir { From f5669a3548ac853418e4890e142964e27fb2dd2b Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 3 Feb 2023 19:59:05 +0000 Subject: [PATCH 256/501] Fix assert_mem_uninitialized_valid in release mode --- src/intrinsics/mod.rs | 31 ++++++++++++------------------- 1 file changed, 12 insertions(+), 19 deletions(-) diff --git a/src/intrinsics/mod.rs b/src/intrinsics/mod.rs index d561cf139b6c9..892e7c30e2f7a 100644 --- a/src/intrinsics/mod.rs +++ b/src/intrinsics/mod.rs @@ -218,22 +218,6 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( let intrinsic = fx.tcx.item_name(instance.def_id()); let substs = instance.substs; - let target = if let Some(target) = target { - target - } else { - // Insert non returning intrinsics here - match intrinsic { - sym::abort => { - fx.bcx.ins().trap(TrapCode::User(0)); - } - sym::transmute => { - crate::base::codegen_panic(fx, "Transmuting to uninhabited type.", source_info); - } - _ => unimplemented!("unsupported intrinsic {}", intrinsic), - } - return; - }; - if intrinsic.as_str().starts_with("simd_") { self::simd::codegen_simd_intrinsic_call( fx, @@ -241,11 +225,11 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( substs, args, destination, - target, + target.expect("target for simd intrinsic"), source_info.span, ); } else if codegen_float_intrinsic_call(fx, intrinsic, args, destination) { - let ret_block = fx.get_block(target); + let ret_block = fx.get_block(target.expect("target for float intrinsic")); fx.bcx.ins().jump(ret_block, &[]); } else { codegen_regular_intrinsic_call( @@ -255,7 +239,7 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( substs, args, destination, - Some(target), + target, source_info, ); } @@ -382,6 +366,10 @@ fn codegen_regular_intrinsic_call<'tcx>( let usize_layout = fx.layout_of(fx.tcx.types.usize); match intrinsic { + sym::abort => { + fx.bcx.ins().trap(TrapCode::User(0)); + return; + } sym::likely | sym::unlikely => { intrinsic_args!(fx, args => (a); intrinsic); @@ -579,6 +567,11 @@ fn codegen_regular_intrinsic_call<'tcx>( sym::transmute => { intrinsic_args!(fx, args => (from); intrinsic); + if ret.layout().abi.is_uninhabited() { + crate::base::codegen_panic(fx, "Transmuting to uninhabited type.", source_info); + return; + } + ret.write_cvalue_transmute(fx, from); } sym::write_bytes | sym::volatile_set_memory => { From efb92a1ae85317ec5cbde43655e2975c56443c4c Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Fri, 3 Feb 2023 20:01:39 +0000 Subject: [PATCH 257/501] Use packed debuginfo for the sysroot on macOS This reduces the sysroot size --- build_system/build_sysroot.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build_system/build_sysroot.rs b/build_system/build_sysroot.rs index bd04fdbe304a3..2e04f2c68116c 100644 --- a/build_system/build_sysroot.rs +++ b/build_system/build_sysroot.rs @@ -248,6 +248,9 @@ fn build_clif_sysroot_for_triple( build_cmd.arg("--release"); } build_cmd.env("__CARGO_DEFAULT_LIB_METADATA", "cg_clif"); + if compiler.triple.contains("apple") { + build_cmd.env("CARGO_PROFILE_RELEASE_SPLIT_DEBUGINFO", "packed"); + } spawn_and_wait(build_cmd); for entry in fs::read_dir(build_dir.join("deps")).unwrap() { From a3637032db5426a47d2ed6c91dcb4f65929bcf58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Fri, 3 Feb 2023 20:48:25 +0100 Subject: [PATCH 258/501] unused-lifetimes: don't warn about lifetimes originating from expanded code previously, we would warn like this: ```` warning: lifetime parameter `'s` never used --> /tmp/unusedlif/code.rs:6:62 | 5 | #[derive(Clone)] | - help: elide the unused lifetime 6 | struct ShimMethod4(pub &'static dyn for<'s> Fn(&'s mut T::As)); | ^^ | = note: requested on the command line with `-W unused-lifetimes` ```` Fixes #104432 --- .../rustc_resolve/src/late/diagnostics.rs | 28 +++++++++++-------- ...ue-104432-unused-lifetimes-in-expansion.rs | 12 ++++++++ 2 files changed, 28 insertions(+), 12 deletions(-) create mode 100644 tests/ui/lifetimes/issue-104432-unused-lifetimes-in-expansion.rs diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index cee0a7f3c203d..a9dbb3ca1319c 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -2244,19 +2244,23 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> { } None => { debug!(?param.ident, ?param.ident.span); - let deletion_span = deletion_span(); - self.r.lint_buffer.buffer_lint_with_diagnostic( - lint::builtin::UNUSED_LIFETIMES, - param.id, - param.ident.span, - &format!("lifetime parameter `{}` never used", param.ident), - lint::BuiltinLintDiagnostics::SingleUseLifetime { - param_span: param.ident.span, - use_span: None, - deletion_span, - }, - ); + // the give lifetime originates from expanded code so we won't be able to remove it #104432 + let lifetime_only_in_expanded_code = + deletion_span.map(|sp| sp.in_derive_expansion()).unwrap_or(true); + if !lifetime_only_in_expanded_code { + self.r.lint_buffer.buffer_lint_with_diagnostic( + lint::builtin::UNUSED_LIFETIMES, + param.id, + param.ident.span, + &format!("lifetime parameter `{}` never used", param.ident), + lint::BuiltinLintDiagnostics::SingleUseLifetime { + param_span: param.ident.span, + use_span: None, + deletion_span, + }, + ); + } } } } diff --git a/tests/ui/lifetimes/issue-104432-unused-lifetimes-in-expansion.rs b/tests/ui/lifetimes/issue-104432-unused-lifetimes-in-expansion.rs new file mode 100644 index 0000000000000..5d5429ec895be --- /dev/null +++ b/tests/ui/lifetimes/issue-104432-unused-lifetimes-in-expansion.rs @@ -0,0 +1,12 @@ +// check-pass + +#![deny(unused_lifetimes)] +trait Trait2 { + type As; +} + +// we should not warn about an unused lifetime about code generated from this proc macro here +#[derive(Clone)] +struct ShimMethod4(pub &'static dyn for<'s> Fn(&'s mut T::As)); + +pub fn main() {} From 9053bcc65c41707e2272757fdccf3a97e167217d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 3 Feb 2023 21:39:24 +0100 Subject: [PATCH 259/501] Make mbe compile with parser changes --- crates/mbe/src/syntax_bridge.rs | 1 + crates/mbe/src/tt_iter.rs | 12 ++++++++---- crates/tt/src/buffer.rs | 8 +++++--- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index fbf6b53006ad1..7fe4fcfc68e42 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -95,6 +95,7 @@ pub fn token_tree_to_syntax_node( parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => { tree_sink.token(kind, n_raw_tokens) } + parser::Step::FloatSplit { .. } => tree_sink.token(SyntaxKind::FLOAT_NUMBER, 1), parser::Step::Enter { kind } => tree_sink.start_node(kind), parser::Step::Exit => tree_sink.finish_node(), parser::Step::Error { msg } => tree_sink.error(msg.to_string()), diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index e5f6b1372209f..b38243caeefd0 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -140,6 +140,7 @@ impl<'a> TtIter<'a> { let mut cursor = buffer.begin(); let mut error = false; + let mut float_splits = vec![]; for step in tree_traversal.iter() { match step { parser::Step::Token { kind, mut n_input_tokens } => { @@ -150,6 +151,10 @@ impl<'a> TtIter<'a> { cursor = cursor.bump_subtree(); } } + parser::Step::FloatSplit { .. } => { + float_splits.push(cursor); + cursor = cursor.bump_subtree(); + } parser::Step::Enter { .. } | parser::Step::Exit => (), parser::Step::Error { .. } => error = true, } @@ -167,18 +172,17 @@ impl<'a> TtIter<'a> { if cursor.is_root() { while curr != cursor { if let Some(token) = curr.token_tree() { - res.push(token); + res.push(token.cloned()); } curr = curr.bump(); } } self.inner = self.inner.as_slice()[res.len()..].iter(); let res = match res.len() { - 1 => Some(res[0].cloned()), - 0 => None, + 0 | 1 => res.pop(), _ => Some(tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter::unspecified(), - token_trees: res.into_iter().map(|it| it.cloned()).collect(), + token_trees: res, })), }; ExpandResult { value: res, err } diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index 4484431124e13..c4b455e3f138a 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs @@ -16,8 +16,8 @@ enum Entry<'t, Span> { // Mimicking types from proc-macro. Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId), Leaf(&'t TokenTree), - // End entries contain a pointer to the entry from the containing - // token tree, or None if this is the outermost level. + /// End entries contain a pointer to the entry from the containing + /// token tree, or [`None`] if this is the outermost level. End(Option), } @@ -226,7 +226,9 @@ impl<'a, Span> Cursor<'a, Span> { /// a cursor into that subtree pub fn bump_subtree(self) -> Cursor<'a, Span> { match self.entry() { - Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(), + Some(&Entry::Subtree(_, _, entry_id)) => { + Cursor::create(self.buffer, EntryPtr(entry_id, 0)) + } _ => self.bump(), } } From d1052349affb49f87d6684f4b7781dbcba8db5a2 Mon Sep 17 00:00:00 2001 From: Peter Jaszkowiak Date: Fri, 3 Feb 2023 16:50:49 -0700 Subject: [PATCH 260/501] reword descriptions of the deprecated int modules --- library/core/src/num/shells/i128.rs | 4 +--- library/core/src/num/shells/i16.rs | 4 +--- library/core/src/num/shells/i32.rs | 4 +--- library/core/src/num/shells/i64.rs | 4 +--- library/core/src/num/shells/i8.rs | 4 +--- library/core/src/num/shells/isize.rs | 4 +--- library/core/src/num/shells/u128.rs | 4 +--- library/core/src/num/shells/u16.rs | 4 +--- library/core/src/num/shells/u32.rs | 4 +--- library/core/src/num/shells/u64.rs | 4 +--- library/core/src/num/shells/u8.rs | 4 +--- library/core/src/num/shells/usize.rs | 4 +--- 12 files changed, 12 insertions(+), 36 deletions(-) diff --git a/library/core/src/num/shells/i128.rs b/library/core/src/num/shells/i128.rs index 7b048dc5206bc..b3b3d3b4875ab 100644 --- a/library/core/src/num/shells/i128.rs +++ b/library/core/src/num/shells/i128.rs @@ -1,6 +1,4 @@ -//! Constants for the 128-bit signed integer type. -//! -//! *[See also the `i128` primitive type][i128].* +//! Redundant constants module for the [`i128` primitive type][i128]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/i16.rs b/library/core/src/num/shells/i16.rs index 5c5812d5c5ecd..70a452e193983 100644 --- a/library/core/src/num/shells/i16.rs +++ b/library/core/src/num/shells/i16.rs @@ -1,6 +1,4 @@ -//! Constants for the 16-bit signed integer type. -//! -//! *[See also the `i16` primitive type][i16].* +//! Redundant constants module for the [`i16` primitive type][i16]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/i32.rs b/library/core/src/num/shells/i32.rs index b283ac64415f6..c30849e2591c3 100644 --- a/library/core/src/num/shells/i32.rs +++ b/library/core/src/num/shells/i32.rs @@ -1,6 +1,4 @@ -//! Constants for the 32-bit signed integer type. -//! -//! *[See also the `i32` primitive type][i32].* +//! Redundant constants module for the [`i32` primitive type][i32]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/i64.rs b/library/core/src/num/shells/i64.rs index a416fa7e9361d..77d95d712506b 100644 --- a/library/core/src/num/shells/i64.rs +++ b/library/core/src/num/shells/i64.rs @@ -1,6 +1,4 @@ -//! Constants for the 64-bit signed integer type. -//! -//! *[See also the `i64` primitive type][i64].* +//! Redundant constants module for the [`i64` primitive type][i64]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/i8.rs b/library/core/src/num/shells/i8.rs index 02465013a4a77..516ba8cdef3bf 100644 --- a/library/core/src/num/shells/i8.rs +++ b/library/core/src/num/shells/i8.rs @@ -1,6 +1,4 @@ -//! Constants for the 8-bit signed integer type. -//! -//! *[See also the `i8` primitive type][i8].* +//! Redundant constants module for the [`i8` primitive type][i8]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/isize.rs b/library/core/src/num/shells/isize.rs index 1579fbab6d47f..828f7345bafbe 100644 --- a/library/core/src/num/shells/isize.rs +++ b/library/core/src/num/shells/isize.rs @@ -1,6 +1,4 @@ -//! Constants for the pointer-sized signed integer type. -//! -//! *[See also the `isize` primitive type][isize].* +//! Redundant constants module for the [`isize` primitive type][isize]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/u128.rs b/library/core/src/num/shells/u128.rs index fe08cee586c3d..b1e30e3843525 100644 --- a/library/core/src/num/shells/u128.rs +++ b/library/core/src/num/shells/u128.rs @@ -1,6 +1,4 @@ -//! Constants for the 128-bit unsigned integer type. -//! -//! *[See also the `u128` primitive type][u128].* +//! Redundant constants module for the [`u128` primitive type][u128]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/u16.rs b/library/core/src/num/shells/u16.rs index 36f8c6978789d..b203806f46005 100644 --- a/library/core/src/num/shells/u16.rs +++ b/library/core/src/num/shells/u16.rs @@ -1,6 +1,4 @@ -//! Constants for the 16-bit unsigned integer type. -//! -//! *[See also the `u16` primitive type][u16].* +//! Redundant constants module for the [`i16` primitive type][i16]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/u32.rs b/library/core/src/num/shells/u32.rs index 1c369097dcdb0..4c84274e752ec 100644 --- a/library/core/src/num/shells/u32.rs +++ b/library/core/src/num/shells/u32.rs @@ -1,6 +1,4 @@ -//! Constants for the 32-bit unsigned integer type. -//! -//! *[See also the `u32` primitive type][u32].* +//! Redundant constants module for the [`u32` primitive type][u32]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/u64.rs b/library/core/src/num/shells/u64.rs index e8b691d155572..47a95c6820f2f 100644 --- a/library/core/src/num/shells/u64.rs +++ b/library/core/src/num/shells/u64.rs @@ -1,6 +1,4 @@ -//! Constants for the 64-bit unsigned integer type. -//! -//! *[See also the `u64` primitive type][u64].* +//! Redundant constants module for the [`u64` primitive type][u64]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/u8.rs b/library/core/src/num/shells/u8.rs index 817c6a18aaaa3..360baef722869 100644 --- a/library/core/src/num/shells/u8.rs +++ b/library/core/src/num/shells/u8.rs @@ -1,6 +1,4 @@ -//! Constants for the 8-bit unsigned integer type. -//! -//! *[See also the `u8` primitive type][u8].* +//! Redundant constants module for the [`u8` primitive type][u8]. //! //! New code should use the associated constants directly on the primitive type. diff --git a/library/core/src/num/shells/usize.rs b/library/core/src/num/shells/usize.rs index 3e1bec5ec4815..44c24dfc2cf58 100644 --- a/library/core/src/num/shells/usize.rs +++ b/library/core/src/num/shells/usize.rs @@ -1,6 +1,4 @@ -//! Constants for the pointer-sized unsigned integer type. -//! -//! *[See also the `usize` primitive type][usize].* +//! Redundant constants module for the [`usize` primitive type][usize]. //! //! New code should use the associated constants directly on the primitive type. From 5f98a7f00e338c0985e7743be5b23dbd8f1039b3 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Fri, 3 Feb 2023 17:36:27 -0700 Subject: [PATCH 261/501] rustdoc: use the same URL escape rules for fragments as for examples --- src/librustdoc/html/markdown.rs | 43 +---------- src/librustdoc/html/render/mod.rs | 75 +++++++++++++------ .../const-generics/const-generics-docs.rs | 6 +- tests/rustdoc/const-generics/const-impl.rs | 10 +-- tests/rustdoc/double-quote-escape.rs | 2 +- tests/rustdoc/primitive-tuple-variadic.rs | 4 +- .../rustdoc/sidebar-links-to-foreign-impl.rs | 4 +- tests/rustdoc/where-clause-order.rs | 2 +- 8 files changed, 68 insertions(+), 78 deletions(-) diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 00e3f859bfcb3..03382aeeb737d 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -46,6 +46,7 @@ use crate::html::escape::Escape; use crate::html::format::Buffer; use crate::html::highlight; use crate::html::length_limit::HtmlWithLimit; +use crate::html::render::small_url_encode; use crate::html::toc::TocBuilder; use pulldown_cmark::{ @@ -294,47 +295,7 @@ impl<'a, I: Iterator>> Iterator for CodeBlocks<'_, 'a, I> { doctest::make_test(&test, krate, false, &Default::default(), edition, None); let channel = if test.contains("#![feature(") { "&version=nightly" } else { "" }; - // These characters don't need to be escaped in a URI. - // See https://url.spec.whatwg.org/#query-percent-encode-set - // and https://url.spec.whatwg.org/#urlencoded-parsing - // and https://url.spec.whatwg.org/#url-code-points - fn dont_escape(c: u8) -> bool { - (b'a' <= c && c <= b'z') - || (b'A' <= c && c <= b'Z') - || (b'0' <= c && c <= b'9') - || c == b'-' - || c == b'_' - || c == b'.' - || c == b',' - || c == b'~' - || c == b'!' - || c == b'\'' - || c == b'(' - || c == b')' - || c == b'*' - || c == b'/' - || c == b';' - || c == b':' - || c == b'?' - // As described in urlencoded-parsing, the - // first `=` is the one that separates key from - // value. Following `=`s are part of the value. - || c == b'=' - } - let mut test_escaped = String::new(); - for b in test.bytes() { - if dont_escape(b) { - test_escaped.push(char::from(b)); - } else if b == b' ' { - // URL queries are decoded with + replaced with SP - test_escaped.push('+'); - } else if b == b'%' { - test_escaped.push('%'); - test_escaped.push('%'); - } else { - write!(test_escaped, "%{:02X}", b).unwrap(); - } - } + let test_escaped = small_url_encode(test); Some(format!( r#"Run"#, url, test_escaped, channel, edition, diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 816a8f4e274ce..fa22c46120517 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -38,7 +38,7 @@ pub(crate) use self::span_map::{collect_spans_and_sources, LinkFromSrc}; use std::collections::VecDeque; use std::default::Default; -use std::fmt; +use std::fmt::{self, Write}; use std::fs; use std::iter::Peekable; use std::path::PathBuf; @@ -2020,31 +2020,60 @@ fn get_associated_constants( .collect::>() } -// The point is to url encode any potential character from a type with genericity. -fn small_url_encode(s: String) -> String { +pub(crate) fn small_url_encode(s: String) -> String { + // These characters don't need to be escaped in a URI. + // See https://url.spec.whatwg.org/#query-percent-encode-set + // and https://url.spec.whatwg.org/#urlencoded-parsing + // and https://url.spec.whatwg.org/#url-code-points + fn dont_escape(c: u8) -> bool { + (b'a' <= c && c <= b'z') + || (b'A' <= c && c <= b'Z') + || (b'0' <= c && c <= b'9') + || c == b'-' + || c == b'_' + || c == b'.' + || c == b',' + || c == b'~' + || c == b'!' + || c == b'\'' + || c == b'(' + || c == b')' + || c == b'*' + || c == b'/' + || c == b';' + || c == b':' + || c == b'?' + // As described in urlencoded-parsing, the + // first `=` is the one that separates key from + // value. Following `=`s are part of the value. + || c == b'=' + } let mut st = String::new(); let mut last_match = 0; - for (idx, c) in s.char_indices() { - let escaped = match c { - '<' => "%3C", - '>' => "%3E", - ' ' => "%20", - '?' => "%3F", - '\'' => "%27", - '&' => "%26", - ',' => "%2C", - ':' => "%3A", - ';' => "%3B", - '[' => "%5B", - ']' => "%5D", - '"' => "%22", - _ => continue, - }; + for (idx, b) in s.bytes().enumerate() { + if dont_escape(b) { + continue; + } - st += &s[last_match..idx]; - st += escaped; - // NOTE: we only expect single byte characters here - which is fine as long as we - // only match single byte characters + if last_match != idx { + // Invariant: `idx` must be the first byte in a character at this point. + st += &s[last_match..idx]; + } + if b == b' ' { + // URL queries are decoded with + replaced with SP. + // While the same is not true for hashes, rustdoc only needs to be + // consistent with itself when encoding them. + st += "+"; + } else if b == b'%' { + st += "%%"; + } else { + write!(st, "%{:02X}", b).unwrap(); + } + // Invariant: if the current byte is not at the start of a multi-byte character, + // we need to get down here so that when the next turn of the loop comes around, + // last_match winds up equalling idx. + // + // In other words, dont_escape must always return `false` in multi-byte character. last_match = idx + 1; } diff --git a/tests/rustdoc/const-generics/const-generics-docs.rs b/tests/rustdoc/const-generics/const-generics-docs.rs index ade70bbe80d92..7e27ef8d8e5b8 100644 --- a/tests/rustdoc/const-generics/const-generics-docs.rs +++ b/tests/rustdoc/const-generics/const-generics-docs.rs @@ -21,8 +21,8 @@ pub use extern_crate::WTrait; // 'pub trait Trait' // @has - '//*[@id="impl-Trait%3C1%3E-for-u8"]//h3[@class="code-header"]' 'impl Trait<1> for u8' // @has - '//*[@id="impl-Trait%3C2%3E-for-u8"]//h3[@class="code-header"]' 'impl Trait<2> for u8' -// @has - '//*[@id="impl-Trait%3C{1%20+%202}%3E-for-u8"]//h3[@class="code-header"]' 'impl Trait<{1 + 2}> for u8' -// @has - '//*[@id="impl-Trait%3CN%3E-for-%5Bu8%3B%20N%5D"]//h3[@class="code-header"]' \ +// @has - '//*[@id="impl-Trait%3C%7B1+%2B+2%7D%3E-for-u8"]//h3[@class="code-header"]' 'impl Trait<{1 + 2}> for u8' +// @has - '//*[@id="impl-Trait%3CN%3E-for-%5Bu8;+N%5D"]//h3[@class="code-header"]' \ // 'impl Trait for [u8; N]' pub trait Trait {} impl Trait<1> for u8 {} @@ -47,7 +47,7 @@ impl Foo where u8: Trait { } } -// @has foo/struct.Bar.html '//*[@id="impl-Bar%3Cu8%2C%20M%3E"]/h3[@class="code-header"]' 'impl Bar' +// @has foo/struct.Bar.html '//*[@id="impl-Bar%3Cu8,+M%3E"]/h3[@class="code-header"]' 'impl Bar' impl Bar { // @has - '//*[@id="method.hey"]' \ // 'pub fn hey(&self) -> Foowhere u8: Trait' diff --git a/tests/rustdoc/const-generics/const-impl.rs b/tests/rustdoc/const-generics/const-impl.rs index 91866b7d890c7..152b643bf4bd8 100644 --- a/tests/rustdoc/const-generics/const-impl.rs +++ b/tests/rustdoc/const-generics/const-impl.rs @@ -9,20 +9,20 @@ pub enum Order { } // @has foo/struct.VSet.html '//pre[@class="rust item-decl"]' 'pub struct VSet' -// @has foo/struct.VSet.html '//*[@id="impl-Send-for-VSet%3CT%2C%20ORDER%3E"]/h3[@class="code-header"]' 'impl Send for VSet' -// @has foo/struct.VSet.html '//*[@id="impl-Sync-for-VSet%3CT%2C%20ORDER%3E"]/h3[@class="code-header"]' 'impl Sync for VSet' +// @has foo/struct.VSet.html '//*[@id="impl-Send-for-VSet%3CT,+ORDER%3E"]/h3[@class="code-header"]' 'impl Send for VSet' +// @has foo/struct.VSet.html '//*[@id="impl-Sync-for-VSet%3CT,+ORDER%3E"]/h3[@class="code-header"]' 'impl Sync for VSet' pub struct VSet { inner: Vec, } -// @has foo/struct.VSet.html '//*[@id="impl-VSet%3CT%2C%20{%20Order%3A%3ASorted%20}%3E"]/h3[@class="code-header"]' 'impl VSet' +// @has foo/struct.VSet.html '//*[@id="impl-VSet%3CT,+%7B+Order::Sorted+%7D%3E"]/h3[@class="code-header"]' 'impl VSet' impl VSet { pub fn new() -> Self { Self { inner: Vec::new() } } } -// @has foo/struct.VSet.html '//*[@id="impl-VSet%3CT%2C%20{%20Order%3A%3AUnsorted%20}%3E"]/h3[@class="code-header"]' 'impl VSet' +// @has foo/struct.VSet.html '//*[@id="impl-VSet%3CT,+%7B+Order::Unsorted+%7D%3E"]/h3[@class="code-header"]' 'impl VSet' impl VSet { pub fn new() -> Self { Self { inner: Vec::new() } @@ -31,7 +31,7 @@ impl VSet { pub struct Escape; -// @has foo/struct.Escape.html '//*[@id="impl-Escape%3Cr#%22%3Cscript%3Ealert(%22Escape%22)%3B%3C/script%3E%22#%3E"]/h3[@class="code-header"]' 'impl Escapealert("Escape");"#>' +// @has foo/struct.Escape.html '//*[@id="impl-Escape%3Cr%23%22%3Cscript%3Ealert(%22Escape%22);%3C/script%3E%22%23%3E"]/h3[@class="code-header"]' 'impl Escapealert("Escape");"#>' impl Escapealert("Escape");"#> { pub fn f() {} } diff --git a/tests/rustdoc/double-quote-escape.rs b/tests/rustdoc/double-quote-escape.rs index 350c897417d1f..4f4436377a07b 100644 --- a/tests/rustdoc/double-quote-escape.rs +++ b/tests/rustdoc/double-quote-escape.rs @@ -7,5 +7,5 @@ pub trait Foo { pub struct Bar; // @has foo/struct.Bar.html -// @has - '//*[@class="sidebar-elems"]//section//a[@href="#impl-Foo%3Cunsafe%20extern%20%22C%22%20fn()%3E-for-Bar"]' 'Foo' +// @has - '//*[@class="sidebar-elems"]//section//a[@href="#impl-Foo%3Cunsafe+extern+%22C%22+fn()%3E-for-Bar"]' 'Foo' impl Foo for Bar {} diff --git a/tests/rustdoc/primitive-tuple-variadic.rs b/tests/rustdoc/primitive-tuple-variadic.rs index db7cfd60c71a6..846028bbb1906 100644 --- a/tests/rustdoc/primitive-tuple-variadic.rs +++ b/tests/rustdoc/primitive-tuple-variadic.rs @@ -6,13 +6,13 @@ pub trait Foo {} // @has foo/trait.Foo.html -// @has - '//section[@id="impl-Foo-for-(T%2C)"]/h3' 'impl Foo for (T₁, T₂, …, Tₙ)' +// @has - '//section[@id="impl-Foo-for-(T,)"]/h3' 'impl Foo for (T₁, T₂, …, Tₙ)' #[doc(fake_variadic)] impl Foo for (T,) {} pub trait Bar {} // @has foo/trait.Bar.html -// @has - '//section[@id="impl-Bar-for-(U%2C)"]/h3' 'impl Bar for (U₁, U₂, …, Uₙ)' +// @has - '//section[@id="impl-Bar-for-(U,)"]/h3' 'impl Bar for (U₁, U₂, …, Uₙ)' #[doc(fake_variadic)] impl Bar for (U,) {} diff --git a/tests/rustdoc/sidebar-links-to-foreign-impl.rs b/tests/rustdoc/sidebar-links-to-foreign-impl.rs index 11e946948026d..caa17dfbb1c73 100644 --- a/tests/rustdoc/sidebar-links-to-foreign-impl.rs +++ b/tests/rustdoc/sidebar-links-to-foreign-impl.rs @@ -7,8 +7,8 @@ // @has - '//h2[@id="foreign-impls"]' 'Implementations on Foreign Types' // @has - '//*[@class="sidebar-elems"]//section//a[@href="#impl-Foo-for-u32"]' 'u32' // @has - '//*[@id="impl-Foo-for-u32"]//h3[@class="code-header"]' 'impl Foo for u32' -// @has - '//*[@class="sidebar-elems"]//section//a[@href="#impl-Foo-for-%26%27a%20str"]' "&'a str" -// @has - '//*[@id="impl-Foo-for-%26%27a%20str"]//h3[@class="code-header"]' "impl<'a> Foo for &'a str" +// @has - "//*[@class=\"sidebar-elems\"]//section//a[@href=\"#impl-Foo-for-%26'a+str\"]" "&'a str" +// @has - "//*[@id=\"impl-Foo-for-%26'a+str\"]//h3[@class=\"code-header\"]" "impl<'a> Foo for &'a str" pub trait Foo {} impl Foo for u32 {} diff --git a/tests/rustdoc/where-clause-order.rs b/tests/rustdoc/where-clause-order.rs index b8502e10a48c4..b10f8f6856e8c 100644 --- a/tests/rustdoc/where-clause-order.rs +++ b/tests/rustdoc/where-clause-order.rs @@ -7,7 +7,7 @@ where } // @has 'foo/trait.SomeTrait.html' -// @has - "//*[@id='impl-SomeTrait%3C(A%2C%20B%2C%20C%2C%20D%2C%20E)%3E-for-(A%2C%20B%2C%20C%2C%20D%2C%20E)']/h3" "impl SomeTrait<(A, B, C, D, E)> for (A, B, C, D, E)where A: PartialOrd + PartialEq, B: PartialOrd + PartialEq, C: PartialOrd + PartialEq, D: PartialOrd + PartialEq, E: PartialOrd + PartialEq + ?Sized, " +// @has - "//*[@id='impl-SomeTrait%3C(A,+B,+C,+D,+E)%3E-for-(A,+B,+C,+D,+E)']/h3" "impl SomeTrait<(A, B, C, D, E)> for (A, B, C, D, E)where A: PartialOrd + PartialEq, B: PartialOrd + PartialEq, C: PartialOrd + PartialEq, D: PartialOrd + PartialEq, E: PartialOrd + PartialEq + ?Sized, " impl SomeTrait<(A, B, C, D, E)> for (A, B, C, D, E) where A: PartialOrd + PartialEq, From fa6c3a2d2aab19fa95c8612c53ac87ac4c8c64dc Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Fri, 3 Feb 2023 19:02:20 -0700 Subject: [PATCH 262/501] docs: update fragment for Result impls --- library/core/src/result.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/library/core/src/result.rs b/library/core/src/result.rs index f00c40f35d584..7596e9cc005e9 100644 --- a/library/core/src/result.rs +++ b/library/core/src/result.rs @@ -458,7 +458,7 @@ //! [`Result`] of a collection of each contained value of the original //! [`Result`] values, or [`Err`] if any of the elements was [`Err`]. //! -//! [impl-FromIterator]: Result#impl-FromIterator%3CResult%3CA%2C%20E%3E%3E-for-Result%3CV%2C%20E%3E +//! [impl-FromIterator]: Result#impl-FromIterator%3CResult%3CA,+E%3E%3E-for-Result%3CV,+E%3E //! //! ``` //! let v = [Ok(2), Ok(4), Err("err!"), Ok(8)]; @@ -474,8 +474,8 @@ //! to provide the [`product`][Iterator::product] and //! [`sum`][Iterator::sum] methods. //! -//! [impl-Product]: Result#impl-Product%3CResult%3CU%2C%20E%3E%3E-for-Result%3CT%2C%20E%3E -//! [impl-Sum]: Result#impl-Sum%3CResult%3CU%2C%20E%3E%3E-for-Result%3CT%2C%20E%3E +//! [impl-Product]: Result#impl-Product%3CResult%3CU,+E%3E%3E-for-Result%3CT,+E%3E +//! [impl-Sum]: Result#impl-Sum%3CResult%3CU,+E%3E%3E-for-Result%3CT,+E%3E //! //! ``` //! let v = [Err("error!"), Ok(1), Ok(2), Ok(3), Err("foo")]; From 761a91ade850d4e21964ebe44fc9ba2e423c9571 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sat, 4 Feb 2023 10:22:24 +0000 Subject: [PATCH 263/501] Build CI dist artifacts without debug assertions This significantly improves performance. For example for the simple-raytracer benchmark it goes from a 13% improvement over LLVM to 39% improvement over LLVM. --- .github/workflows/main.yml | 105 ++++++++++++++++++++++++++++------ build_system/build_backend.rs | 6 +- build_system/mod.rs | 8 ++- build_system/utils.rs | 6 +- 4 files changed, 100 insertions(+), 25 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 92a1d8c189779..9d67886ba8698 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -21,6 +21,7 @@ jobs: cargo fmt --check rustfmt --check build_system/mod.rs + build: runs-on: ${{ matrix.os }} timeout-minutes: 60 @@ -112,23 +113,6 @@ jobs: TARGET_TRIPLE: ${{ matrix.env.TARGET_TRIPLE }} run: ./y.rs test - - name: Package prebuilt cg_clif - run: tar cvfJ cg_clif.tar.xz dist - - - name: Upload prebuilt cg_clif - if: matrix.os == 'windows-latest' || matrix.env.TARGET_TRIPLE != 'x86_64-pc-windows-gnu' - uses: actions/upload-artifact@v3 - with: - name: cg_clif-${{ matrix.env.TARGET_TRIPLE }} - path: cg_clif.tar.xz - - - name: Upload prebuilt cg_clif (cross compile) - if: matrix.os != 'windows-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' - uses: actions/upload-artifact@v3 - with: - name: cg_clif-${{ runner.os }}-cross-x86_64-mingw - path: cg_clif.tar.xz - abi_cafe: runs-on: ${{ matrix.os }} @@ -186,6 +170,7 @@ jobs: TARGET_TRIPLE: ${{ matrix.env.TARGET_TRIPLE }} run: ./y.rs abi-cafe + bench: runs-on: ubuntu-latest timeout-minutes: 60 @@ -223,7 +208,89 @@ jobs: run: ./y.rs prepare - name: Build - run: ./y.rs build --sysroot none + run: CI_OPT=1 ./y.rs build --sysroot none - name: Benchmark - run: ./y.rs bench + run: CI_OPT=1 ./y.rs bench + + + dist: + runs-on: ${{ matrix.os }} + timeout-minutes: 60 + + defaults: + run: + shell: bash + + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-20.04 # FIXME switch to ubuntu-22.04 once #1303 is fixed + env: + TARGET_TRIPLE: x86_64-unknown-linux-gnu + - os: macos-latest + env: + TARGET_TRIPLE: x86_64-apple-darwin + # cross-compile from Linux to Windows using mingw + - os: ubuntu-latest + env: + TARGET_TRIPLE: x86_64-pc-windows-gnu + - os: windows-latest + env: + TARGET_TRIPLE: x86_64-pc-windows-msvc + - os: windows-latest + env: + TARGET_TRIPLE: x86_64-pc-windows-gnu + + steps: + - uses: actions/checkout@v3 + + - name: Cache cargo target dir + uses: actions/cache@v3 + with: + path: build/cg_clif + key: ${{ runner.os }}-${{ matrix.env.TARGET_TRIPLE }}-dist-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }} + + - name: Set MinGW as the default toolchain + if: matrix.os == 'windows-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' + run: rustup set default-host x86_64-pc-windows-gnu + + - name: Install MinGW toolchain and wine + if: matrix.os == 'ubuntu-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu' + run: | + sudo apt-get update + sudo apt-get install -y gcc-mingw-w64-x86-64 wine-stable + + - name: Use sparse cargo registry + run: | + cat >> ~/.cargo/config.toml < bool { - env::var("CI").as_deref() == Ok("true") + env::var("CI").is_ok() +} + +pub(crate) fn is_ci_opt() -> bool { + env::var("CI_OPT").is_ok() } From c642cfe3bf245ffe34f6e994f0b359b267be5e3d Mon Sep 17 00:00:00 2001 From: xFrednet Date: Sat, 4 Feb 2023 19:10:19 +0100 Subject: [PATCH 264/501] Make `[clippy::dump]` support trait items --- clippy_lints/src/utils/dump_hir.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/clippy_lints/src/utils/dump_hir.rs b/clippy_lints/src/utils/dump_hir.rs index 01efc527a8c3a..092041aecf29c 100644 --- a/clippy_lints/src/utils/dump_hir.rs +++ b/clippy_lints/src/utils/dump_hir.rs @@ -1,4 +1,5 @@ use clippy_utils::get_attr; +use hir::TraitItem; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::{declare_lint_pass, declare_tool_lint}; @@ -47,6 +48,18 @@ impl<'tcx> LateLintPass<'tcx> for DumpHir { println!("{stmt:#?}"); } } + + fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &TraitItem<'_>) { + if has_attr(cx, item.hir_id()) { + println!("{item:#?}"); + } + } + + fn check_impl_item(&mut self, cx: &LateContext<'_>, item: &hir::ImplItem<'_>) { + if has_attr(cx, item.hir_id()) { + println!("{item:#?}"); + } + } } fn has_attr(cx: &LateContext<'_>, hir_id: hir::HirId) -> bool { From 5546c82051e05f3f30733c9885f68f5bf9d1759a Mon Sep 17 00:00:00 2001 From: Alex Macleod Date: Sat, 4 Feb 2023 19:12:06 +0000 Subject: [PATCH 265/501] Negate suggestions when needed in `bool_assert_comparison` --- clippy_lints/src/bool_assert_comparison.rs | 53 +++++---- tests/ui/bool_assert_comparison.fixed | 38 ++++--- tests/ui/bool_assert_comparison.rs | 10 ++ tests/ui/bool_assert_comparison.stderr | 126 ++++++++++++++++++--- 4 files changed, 176 insertions(+), 51 deletions(-) diff --git a/clippy_lints/src/bool_assert_comparison.rs b/clippy_lints/src/bool_assert_comparison.rs index 556fa579000c6..1d9096ea64d1c 100644 --- a/clippy_lints/src/bool_assert_comparison.rs +++ b/clippy_lints/src/bool_assert_comparison.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::macros::{find_assert_eq_args, root_macro_call_first_node}; +use clippy_utils::sugg::Sugg; use clippy_utils::ty::{implements_trait, is_copy}; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; @@ -34,14 +35,16 @@ declare_clippy_lint! { declare_lint_pass!(BoolAssertComparison => [BOOL_ASSERT_COMPARISON]); -fn is_bool_lit(e: &Expr<'_>) -> bool { - matches!( - e.kind, - ExprKind::Lit(Lit { - node: LitKind::Bool(_), - .. - }) - ) && !e.span.from_expansion() +fn extract_bool_lit(e: &Expr<'_>) -> Option { + if let ExprKind::Lit(Lit { + node: LitKind::Bool(b), .. + }) = e.kind + && !e.span.from_expansion() + { + Some(b) + } else { + None + } } fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { @@ -69,24 +72,23 @@ impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; let macro_name = cx.tcx.item_name(macro_call.def_id); - if !matches!( - macro_name.as_str(), - "assert_eq" | "debug_assert_eq" | "assert_ne" | "debug_assert_ne" - ) { - return; - } + let eq_macro = match macro_name.as_str() { + "assert_eq" | "debug_assert_eq" => true, + "assert_ne" | "debug_assert_ne" => false, + _ => return, + }; let Some ((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return }; let a_span = a.span.source_callsite(); let b_span = b.span.source_callsite(); - let (lit_span, non_lit_expr) = match (is_bool_lit(a), is_bool_lit(b)) { - // assert_eq!(true, b) - // ^^^^^^ - (true, false) => (a_span.until(b_span), b), - // assert_eq!(a, true) - // ^^^^^^ - (false, true) => (b_span.with_lo(a_span.hi()), a), + let (lit_span, bool_value, non_lit_expr) = match (extract_bool_lit(a), extract_bool_lit(b)) { + // assert_eq!(true/false, b) + // ^^^^^^^^^^^^ + (Some(bool_value), None) => (a_span.until(b_span), bool_value, b), + // assert_eq!(a, true/false) + // ^^^^^^^^^^^^ + (None, Some(bool_value)) => (b_span.with_lo(a_span.hi()), bool_value, a), // If there are two boolean arguments, we definitely don't understand // what's going on, so better leave things as is... // @@ -121,9 +123,16 @@ impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison { // ^^^^^^^^^ let name_span = cx.sess().source_map().span_until_char(macro_call.span, '!'); + let mut suggestions = vec![(name_span, non_eq_mac.to_string()), (lit_span, String::new())]; + + if bool_value ^ eq_macro { + let Some(sugg) = Sugg::hir_opt(cx, non_lit_expr) else { return }; + suggestions.push((non_lit_expr.span, (!sugg).to_string())); + } + diag.multipart_suggestion( format!("replace it with `{non_eq_mac}!(..)`"), - vec![(name_span, non_eq_mac.to_string()), (lit_span, String::new())], + suggestions, Applicability::MachineApplicable, ); }, diff --git a/tests/ui/bool_assert_comparison.fixed b/tests/ui/bool_assert_comparison.fixed index 95f35a61bb289..b8dd92906c8db 100644 --- a/tests/ui/bool_assert_comparison.fixed +++ b/tests/ui/bool_assert_comparison.fixed @@ -86,7 +86,7 @@ fn main() { let b = ImplNotTraitWithBool; assert_eq!("a".len(), 1); - assert!("a".is_empty()); + assert!(!"a".is_empty()); assert!("".is_empty()); assert!("".is_empty()); assert_eq!(a!(), b!()); @@ -97,16 +97,16 @@ fn main() { assert_ne!("a".len(), 1); assert!("a".is_empty()); - assert!("".is_empty()); - assert!("".is_empty()); + assert!(!"".is_empty()); + assert!(!"".is_empty()); assert_ne!(a!(), b!()); assert_ne!(a!(), "".is_empty()); assert_ne!("".is_empty(), b!()); assert_ne!(a, true); - assert!(b); + assert!(!b); debug_assert_eq!("a".len(), 1); - debug_assert!("a".is_empty()); + debug_assert!(!"a".is_empty()); debug_assert!("".is_empty()); debug_assert!("".is_empty()); debug_assert_eq!(a!(), b!()); @@ -117,27 +117,27 @@ fn main() { debug_assert_ne!("a".len(), 1); debug_assert!("a".is_empty()); - debug_assert!("".is_empty()); - debug_assert!("".is_empty()); + debug_assert!(!"".is_empty()); + debug_assert!(!"".is_empty()); debug_assert_ne!(a!(), b!()); debug_assert_ne!(a!(), "".is_empty()); debug_assert_ne!("".is_empty(), b!()); debug_assert_ne!(a, true); - debug_assert!(b); + debug_assert!(!b); // assert with error messages assert_eq!("a".len(), 1, "tadam {}", 1); assert_eq!("a".len(), 1, "tadam {}", true); - assert!("a".is_empty(), "tadam {}", 1); - assert!("a".is_empty(), "tadam {}", true); - assert!("a".is_empty(), "tadam {}", true); + assert!(!"a".is_empty(), "tadam {}", 1); + assert!(!"a".is_empty(), "tadam {}", true); + assert!(!"a".is_empty(), "tadam {}", true); assert_eq!(a, true, "tadam {}", false); debug_assert_eq!("a".len(), 1, "tadam {}", 1); debug_assert_eq!("a".len(), 1, "tadam {}", true); - debug_assert!("a".is_empty(), "tadam {}", 1); - debug_assert!("a".is_empty(), "tadam {}", true); - debug_assert!("a".is_empty(), "tadam {}", true); + debug_assert!(!"a".is_empty(), "tadam {}", 1); + debug_assert!(!"a".is_empty(), "tadam {}", true); + debug_assert!(!"a".is_empty(), "tadam {}", true); debug_assert_eq!(a, true, "tadam {}", false); assert!(a!()); @@ -158,4 +158,14 @@ fn main() { }}; } in_macro!(a); + + assert!("".is_empty()); + assert!("".is_empty()); + assert!(!"requires negation".is_empty()); + assert!(!"requires negation".is_empty()); + + debug_assert!("".is_empty()); + debug_assert!("".is_empty()); + debug_assert!(!"requires negation".is_empty()); + debug_assert!(!"requires negation".is_empty()); } diff --git a/tests/ui/bool_assert_comparison.rs b/tests/ui/bool_assert_comparison.rs index 88e7560b4f984..0a8ad34fda52a 100644 --- a/tests/ui/bool_assert_comparison.rs +++ b/tests/ui/bool_assert_comparison.rs @@ -158,4 +158,14 @@ fn main() { }}; } in_macro!(a); + + assert_eq!("".is_empty(), true); + assert_ne!("".is_empty(), false); + assert_ne!("requires negation".is_empty(), true); + assert_eq!("requires negation".is_empty(), false); + + debug_assert_eq!("".is_empty(), true); + debug_assert_ne!("".is_empty(), false); + debug_assert_ne!("requires negation".is_empty(), true); + debug_assert_eq!("requires negation".is_empty(), false); } diff --git a/tests/ui/bool_assert_comparison.stderr b/tests/ui/bool_assert_comparison.stderr index 3d9f8573e617c..89cefc95a9f69 100644 --- a/tests/ui/bool_assert_comparison.stderr +++ b/tests/ui/bool_assert_comparison.stderr @@ -8,7 +8,7 @@ LL | assert_eq!("a".is_empty(), false); help: replace it with `assert!(..)` | LL - assert_eq!("a".is_empty(), false); -LL + assert!("a".is_empty()); +LL + assert!(!"a".is_empty()); | error: used `assert_eq!` with a literal bool @@ -68,7 +68,7 @@ LL | assert_ne!("".is_empty(), true); help: replace it with `assert!(..)` | LL - assert_ne!("".is_empty(), true); -LL + assert!("".is_empty()); +LL + assert!(!"".is_empty()); | error: used `assert_ne!` with a literal bool @@ -80,7 +80,7 @@ LL | assert_ne!(true, "".is_empty()); help: replace it with `assert!(..)` | LL - assert_ne!(true, "".is_empty()); -LL + assert!("".is_empty()); +LL + assert!(!"".is_empty()); | error: used `assert_ne!` with a literal bool @@ -92,7 +92,7 @@ LL | assert_ne!(b, true); help: replace it with `assert!(..)` | LL - assert_ne!(b, true); -LL + assert!(b); +LL + assert!(!b); | error: used `debug_assert_eq!` with a literal bool @@ -104,7 +104,7 @@ LL | debug_assert_eq!("a".is_empty(), false); help: replace it with `debug_assert!(..)` | LL - debug_assert_eq!("a".is_empty(), false); -LL + debug_assert!("a".is_empty()); +LL + debug_assert!(!"a".is_empty()); | error: used `debug_assert_eq!` with a literal bool @@ -164,7 +164,7 @@ LL | debug_assert_ne!("".is_empty(), true); help: replace it with `debug_assert!(..)` | LL - debug_assert_ne!("".is_empty(), true); -LL + debug_assert!("".is_empty()); +LL + debug_assert!(!"".is_empty()); | error: used `debug_assert_ne!` with a literal bool @@ -176,7 +176,7 @@ LL | debug_assert_ne!(true, "".is_empty()); help: replace it with `debug_assert!(..)` | LL - debug_assert_ne!(true, "".is_empty()); -LL + debug_assert!("".is_empty()); +LL + debug_assert!(!"".is_empty()); | error: used `debug_assert_ne!` with a literal bool @@ -188,7 +188,7 @@ LL | debug_assert_ne!(b, true); help: replace it with `debug_assert!(..)` | LL - debug_assert_ne!(b, true); -LL + debug_assert!(b); +LL + debug_assert!(!b); | error: used `assert_eq!` with a literal bool @@ -200,7 +200,7 @@ LL | assert_eq!("a".is_empty(), false, "tadam {}", 1); help: replace it with `assert!(..)` | LL - assert_eq!("a".is_empty(), false, "tadam {}", 1); -LL + assert!("a".is_empty(), "tadam {}", 1); +LL + assert!(!"a".is_empty(), "tadam {}", 1); | error: used `assert_eq!` with a literal bool @@ -212,7 +212,7 @@ LL | assert_eq!("a".is_empty(), false, "tadam {}", true); help: replace it with `assert!(..)` | LL - assert_eq!("a".is_empty(), false, "tadam {}", true); -LL + assert!("a".is_empty(), "tadam {}", true); +LL + assert!(!"a".is_empty(), "tadam {}", true); | error: used `assert_eq!` with a literal bool @@ -224,7 +224,7 @@ LL | assert_eq!(false, "a".is_empty(), "tadam {}", true); help: replace it with `assert!(..)` | LL - assert_eq!(false, "a".is_empty(), "tadam {}", true); -LL + assert!("a".is_empty(), "tadam {}", true); +LL + assert!(!"a".is_empty(), "tadam {}", true); | error: used `debug_assert_eq!` with a literal bool @@ -236,7 +236,7 @@ LL | debug_assert_eq!("a".is_empty(), false, "tadam {}", 1); help: replace it with `debug_assert!(..)` | LL - debug_assert_eq!("a".is_empty(), false, "tadam {}", 1); -LL + debug_assert!("a".is_empty(), "tadam {}", 1); +LL + debug_assert!(!"a".is_empty(), "tadam {}", 1); | error: used `debug_assert_eq!` with a literal bool @@ -248,7 +248,7 @@ LL | debug_assert_eq!("a".is_empty(), false, "tadam {}", true); help: replace it with `debug_assert!(..)` | LL - debug_assert_eq!("a".is_empty(), false, "tadam {}", true); -LL + debug_assert!("a".is_empty(), "tadam {}", true); +LL + debug_assert!(!"a".is_empty(), "tadam {}", true); | error: used `debug_assert_eq!` with a literal bool @@ -260,7 +260,7 @@ LL | debug_assert_eq!(false, "a".is_empty(), "tadam {}", true); help: replace it with `debug_assert!(..)` | LL - debug_assert_eq!(false, "a".is_empty(), "tadam {}", true); -LL + debug_assert!("a".is_empty(), "tadam {}", true); +LL + debug_assert!(!"a".is_empty(), "tadam {}", true); | error: used `assert_eq!` with a literal bool @@ -299,5 +299,101 @@ LL - renamed!(b, true); LL + debug_assert!(b); | -error: aborting due to 25 previous errors +error: used `assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:162:5 + | +LL | assert_eq!("".is_empty(), true); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!("".is_empty(), true); +LL + assert!("".is_empty()); + | + +error: used `assert_ne!` with a literal bool + --> $DIR/bool_assert_comparison.rs:163:5 + | +LL | assert_ne!("".is_empty(), false); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_ne!("".is_empty(), false); +LL + assert!("".is_empty()); + | + +error: used `assert_ne!` with a literal bool + --> $DIR/bool_assert_comparison.rs:164:5 + | +LL | assert_ne!("requires negation".is_empty(), true); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_ne!("requires negation".is_empty(), true); +LL + assert!(!"requires negation".is_empty()); + | + +error: used `assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:165:5 + | +LL | assert_eq!("requires negation".is_empty(), false); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `assert!(..)` + | +LL - assert_eq!("requires negation".is_empty(), false); +LL + assert!(!"requires negation".is_empty()); + | + +error: used `debug_assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:167:5 + | +LL | debug_assert_eq!("".is_empty(), true); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!("".is_empty(), true); +LL + debug_assert!("".is_empty()); + | + +error: used `debug_assert_ne!` with a literal bool + --> $DIR/bool_assert_comparison.rs:168:5 + | +LL | debug_assert_ne!("".is_empty(), false); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_ne!("".is_empty(), false); +LL + debug_assert!("".is_empty()); + | + +error: used `debug_assert_ne!` with a literal bool + --> $DIR/bool_assert_comparison.rs:169:5 + | +LL | debug_assert_ne!("requires negation".is_empty(), true); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_ne!("requires negation".is_empty(), true); +LL + debug_assert!(!"requires negation".is_empty()); + | + +error: used `debug_assert_eq!` with a literal bool + --> $DIR/bool_assert_comparison.rs:170:5 + | +LL | debug_assert_eq!("requires negation".is_empty(), false); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: replace it with `debug_assert!(..)` + | +LL - debug_assert_eq!("requires negation".is_empty(), false); +LL + debug_assert!(!"requires negation".is_empty()); + | + +error: aborting due to 33 previous errors From 1fb42daf11e68393f2cf9eb8e67e4b2779a4bf5e Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 5 Feb 2023 01:05:31 +0530 Subject: [PATCH 266/501] use span_suggestions to suggest both intents --- clippy_lints/src/methods/suspicious_to_owned.rs | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/clippy_lints/src/methods/suspicious_to_owned.rs b/clippy_lints/src/methods/suspicious_to_owned.rs index fe88fa41fd91e..1adc7b7453e0a 100644 --- a/clippy_lints/src/methods/suspicious_to_owned.rs +++ b/clippy_lints/src/methods/suspicious_to_owned.rs @@ -1,4 +1,4 @@ -use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::is_diag_trait_item; use clippy_utils::source::snippet_with_context; use if_chain::if_chain; @@ -17,19 +17,25 @@ pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) - let input_type = cx.typeck_results().expr_ty(expr); if let ty::Adt(adt, _) = cx.typeck_results().expr_ty(expr).kind(); if cx.tcx.is_diagnostic_item(sym::Cow, adt.did()); + then { let mut app = Applicability::MaybeIncorrect; let recv_snip = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut app).0; - span_lint_and_sugg( + span_lint_and_then( cx, SUSPICIOUS_TO_OWNED, expr.span, &with_forced_trimmed_paths!(format!( "this `to_owned` call clones the {input_type} itself and does not cause the {input_type} contents to become owned" )), - "consider using, depending on intent", - format!("{recv_snip}.clone()` or `{recv_snip}.into_owned()"), - app, + |diag| { + diag.span_suggestions( + expr.span, + "depending on intent, either make the Cow an Owned variant or clone the Cow itself", + [format!("{recv_snip}.into_owned()"), format!("{recv_snip}.clone()")], + Applicability::Unspecified + ); + } ); return true; } From 46edafe8c1df5fd4aef6492cee0a96e72bb428ba Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 5 Feb 2023 01:18:29 +0530 Subject: [PATCH 267/501] blessed stderr output --- tests/ui/suspicious_to_owned.stderr | 35 +++++++++++++++++++++++++---- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/tests/ui/suspicious_to_owned.stderr b/tests/ui/suspicious_to_owned.stderr index dec3f50d6f1b6..2d8bbf0d6f11a 100644 --- a/tests/ui/suspicious_to_owned.stderr +++ b/tests/ui/suspicious_to_owned.stderr @@ -2,27 +2,54 @@ error: this `to_owned` call clones the Cow<'_, str> itself and does not cause th --> $DIR/suspicious_to_owned.rs:16:13 | LL | let _ = cow.to_owned(); - | ^^^^^^^^^^^^^^ help: consider using, depending on intent: `cow.clone()` or `cow.into_owned()` + | ^^^^^^^^^^^^^^ | = note: `-D clippy::suspicious-to-owned` implied by `-D warnings` +help: depending on intent, either make the Cow an Owned variant or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ +LL | let _ = cow.into_owned(); + | ~~~~~~~~~~~~~~~~ error: this `to_owned` call clones the Cow<'_, [char; 3]> itself and does not cause the Cow<'_, [char; 3]> contents to become owned --> $DIR/suspicious_to_owned.rs:26:13 | LL | let _ = cow.to_owned(); - | ^^^^^^^^^^^^^^ help: consider using, depending on intent: `cow.clone()` or `cow.into_owned()` + | ^^^^^^^^^^^^^^ + | +help: depending on intent, either make the Cow an Owned variant or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ +LL | let _ = cow.into_owned(); + | ~~~~~~~~~~~~~~~~ error: this `to_owned` call clones the Cow<'_, Vec> itself and does not cause the Cow<'_, Vec> contents to become owned --> $DIR/suspicious_to_owned.rs:36:13 | LL | let _ = cow.to_owned(); - | ^^^^^^^^^^^^^^ help: consider using, depending on intent: `cow.clone()` or `cow.into_owned()` + | ^^^^^^^^^^^^^^ + | +help: depending on intent, either make the Cow an Owned variant or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ +LL | let _ = cow.into_owned(); + | ~~~~~~~~~~~~~~~~ error: this `to_owned` call clones the Cow<'_, str> itself and does not cause the Cow<'_, str> contents to become owned --> $DIR/suspicious_to_owned.rs:46:13 | LL | let _ = cow.to_owned(); - | ^^^^^^^^^^^^^^ help: consider using, depending on intent: `cow.clone()` or `cow.into_owned()` + | ^^^^^^^^^^^^^^ + | +help: depending on intent, either make the Cow an Owned variant or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ +LL | let _ = cow.into_owned(); + | ~~~~~~~~~~~~~~~~ error: implicitly cloning a `String` by calling `to_owned` on its dereferenced type --> $DIR/suspicious_to_owned.rs:60:13 From 63a57ee1c381ec8ee0dec13213619113f030d773 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Sun, 5 Feb 2023 01:50:44 +0530 Subject: [PATCH 268/501] retain applicability --- clippy_lints/src/methods/suspicious_to_owned.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clippy_lints/src/methods/suspicious_to_owned.rs b/clippy_lints/src/methods/suspicious_to_owned.rs index 1adc7b7453e0a..62eeb3f7a4ec8 100644 --- a/clippy_lints/src/methods/suspicious_to_owned.rs +++ b/clippy_lints/src/methods/suspicious_to_owned.rs @@ -33,7 +33,7 @@ pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) - expr.span, "depending on intent, either make the Cow an Owned variant or clone the Cow itself", [format!("{recv_snip}.into_owned()"), format!("{recv_snip}.clone()")], - Applicability::Unspecified + app ); } ); From 0d6fdefce02ce0aa9c014c921826bab83a91029c Mon Sep 17 00:00:00 2001 From: Lukas Markeffsky <@> Date: Sat, 4 Feb 2023 22:28:45 +0100 Subject: [PATCH 269/501] update ICU4X to 1.1.0 --- Cargo.lock | 52 +- compiler/rustc_baked_icu_data/Cargo.toml | 11 +- compiler/rustc_baked_icu_data/src/data/any.rs | 42 +- .../src/data/fallback/likelysubtags_v1.rs | 733 ----------- .../src/data/fallback/likelysubtags_v1/mod.rs | 6 + .../fallback/likelysubtags_v1/und.rs.data | 728 +++++++++++ .../src/data/fallback/parents_v1.rs | 207 --- .../src/data/fallback/parents_v1/mod.rs | 6 + .../src/data/fallback/parents_v1/und.rs.data | 216 +++ .../src/data/fallback/supplement/co_v1.rs | 41 - .../src/data/fallback/supplement/co_v1/mod.rs | 6 + .../fallback/supplement/co_v1/und.rs.data | 36 + .../src/data/list/and_v1.rs | 1161 ----------------- .../src/data/list/and_v1/en.rs.data | 74 ++ .../src/data/list/and_v1/es.rs.data | 836 ++++++++++++ .../src/data/list/and_v1/fr.rs.data | 74 ++ .../src/data/list/and_v1/it.rs.data | 74 ++ .../src/data/list/and_v1/ja.rs.data | 74 ++ .../src/data/list/and_v1/mod.rs | 22 + .../src/data/list/and_v1/pt.rs.data | 74 ++ .../src/data/list/and_v1/ru.rs.data | 74 ++ .../src/data/list/and_v1/tr.rs.data | 74 ++ .../src/data/list/and_v1/und.rs.data | 74 ++ .../src/data/list/and_v1/zh-Hant.rs.data | 74 ++ .../src/data/list/and_v1/zh.rs.data | 74 ++ compiler/rustc_baked_icu_data/src/data/mod.rs | 187 +-- compiler/rustc_error_messages/Cargo.toml | 7 +- 27 files changed, 2734 insertions(+), 2303 deletions(-) delete mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data delete mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data delete mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data delete mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data create mode 100644 compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data diff --git a/Cargo.lock b/Cargo.lock index 705210e44b24c..8971e9e848aa9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2027,23 +2027,21 @@ checksum = "3c1ad908cc71012b7bea4d0c53ba96a8cba9962f048fa68d143376143d863b7a" [[package]] name = "icu_list" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c40218275f081c4493f190357c5395647b06734c2dc3dcb41cc099a0f60168b1" +checksum = "01a65ff0cab77c33c7e165c858eaa6e84a09f1e485dd495d9d0ae61083c6f786" dependencies = [ "displaydoc", - "icu_locid", "icu_provider", "regex-automata 0.2.0", "writeable", - "zerovec", ] [[package]] name = "icu_locid" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b3de5d99a0e275fe6193b9586dbf37364daebc0d39c89b5cf8376a53b789e8" +checksum = "71d7a98ecb812760b5f077e55a4763edeefa7ccc30d6eb5680a70841ede81928" dependencies = [ "displaydoc", "litemap", @@ -2054,9 +2052,9 @@ dependencies = [ [[package]] name = "icu_provider" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f911086e3c521a8a824d4f8bfd87769645ced2f07ff913b521c0d793be07100" +checksum = "a86816c97bc4e613086497f9479f63e120315e056763e8c4435604f98d21d82d" dependencies = [ "displaydoc", "icu_locid", @@ -2070,9 +2068,9 @@ dependencies = [ [[package]] name = "icu_provider_adapters" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "980c71d8a91b246ebbb97847178a4b816eea39d1d550c70ee566384555bb6545" +checksum = "8e89bf33962b24bb48a4a21330c20c9ff17949338ea376360dd9eda2c209dca1" dependencies = [ "icu_locid", "icu_provider", @@ -2083,9 +2081,9 @@ dependencies = [ [[package]] name = "icu_provider_macros" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38cf6f5b65cf81f0b4298da647101acbfe6ae0e25263f92bd7a22597e9d6d606" +checksum = "9ddb07844c2ffc4c28840e799e9e54ff054393cf090740decf25624e9d94b93a" dependencies = [ "proc-macro2", "quote", @@ -2460,9 +2458,9 @@ checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" [[package]] name = "litemap" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f34a3f4798fac63fb48cf277eefa38f94d3443baff555bb98e4f56bc9092368e" +checksum = "575d8a551c59104b4df91269921e5eab561aa1b77c618dac0414b5d44a4617de" [[package]] name = "lld-wrapper" @@ -3759,7 +3757,6 @@ dependencies = [ "icu_locid", "icu_provider", "icu_provider_adapters", - "litemap", "zerovec", ] @@ -4004,7 +4001,6 @@ dependencies = [ "rustc_span", "tracing", "unic-langid", - "writeable", ] [[package]] @@ -5667,9 +5663,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8aeafdfd935e4a7fe16a91ab711fa52d54df84f9c8f7ca5837a9d1d902ef4c2" +checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef" dependencies = [ "displaydoc", "zerovec", @@ -6312,9 +6308,9 @@ checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" [[package]] name = "writeable" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8e6ab4f5da1b24daf2c590cfac801bacb27b15b4f050e84eb60149ea726f06b" +checksum = "92d74a687e3b9a7a129db0a8c82b4d464eb9c36f5a66ca68572a7e5f1cfdb5bc" [[package]] name = "xattr" @@ -6371,9 +6367,9 @@ dependencies = [ [[package]] name = "yoke" -version = "0.6.2" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fe1d55ca72c32d573bfbd5cb2f0ca65a497854c44762957a6d3da96041a5184" +checksum = "222180af14a6b54ef2c33493c1eff77ae95a3687a21b243e752624006fb8f26e" dependencies = [ "serde", "stable_deref_trait", @@ -6383,9 +6379,9 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1346e4cd025ae818b88566eac7eb65ab33a994ea55f355c86889af2e7e56b14e" +checksum = "ca800d73d6b7a7ee54f2608205c98b549fca71c9500c1abcb3abdc7708b4a8cb" dependencies = [ "proc-macro2", "quote", @@ -6422,9 +6418,9 @@ checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" [[package]] name = "zerovec" -version = "0.9.0" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d919a74c17749ccb17beaf6405562e413cd94e98ba52ca1e64bbe7eefbd8b8" +checksum = "154df60c74c4a844bc04a53cef4fc18a909d3ea07e19f5225eaba86209da3aa6" dependencies = [ "yoke", "zerofrom", @@ -6433,9 +6429,9 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.9.0" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "490e5f878c2856225e884c35927e7ea6db3c24cdb7229b72542c7526ad7ed49e" +checksum = "2154cb6e2a748163354165e22c6a555effb09ca2d16334767bf66bb404f2206e" dependencies = [ "proc-macro2", "quote", diff --git a/compiler/rustc_baked_icu_data/Cargo.toml b/compiler/rustc_baked_icu_data/Cargo.toml index 3477306dbfc95..184fea8683295 100644 --- a/compiler/rustc_baked_icu_data/Cargo.toml +++ b/compiler/rustc_baked_icu_data/Cargo.toml @@ -4,12 +4,11 @@ version = "0.0.0" edition = "2021" [dependencies] -icu_list = "1.0.0" -icu_locid = "1.0.0" -icu_provider = "1.0.1" -icu_provider_adapters = "1.0.0" -litemap = "0.6.0" -zerovec = "0.9.0" +icu_list = "1.1.0" +icu_locid = "1.1.0" +icu_provider = "1.1.0" +icu_provider_adapters = "1.1.0" +zerovec = "0.9.2" [features] rustc_use_parallel_compiler = ['icu_provider/sync'] diff --git a/compiler/rustc_baked_icu_data/src/data/any.rs b/compiler/rustc_baked_icu_data/src/data/any.rs index e8e99be93f2c5..230288766764c 100644 --- a/compiler/rustc_baked_icu_data/src/data/any.rs +++ b/compiler/rustc_baked_icu_data/src/data/any.rs @@ -1,42 +1,2 @@ // @generated -impl AnyProvider for BakedDataProvider { - fn load_any(&self, key: DataKey, req: DataRequest) -> Result { - const ANDLISTV1MARKER: ::icu_provider::DataKeyHash = - ::icu_list::provider::AndListV1Marker::KEY.hashed(); - const COLLATIONFALLBACKSUPPLEMENTV1MARKER: ::icu_provider::DataKeyHash = - ::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY - .hashed(); - const LOCALEFALLBACKLIKELYSUBTAGSV1MARKER: ::icu_provider::DataKeyHash = - ::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY - .hashed(); - const LOCALEFALLBACKPARENTSV1MARKER: ::icu_provider::DataKeyHash = - ::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY - .hashed(); - #[allow(clippy::match_single_binding)] - match key.hashed() { - ANDLISTV1MARKER => list::and_v1::DATA - .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()) - .copied() - .map(AnyPayload::from_static_ref) - .ok_or(DataErrorKind::MissingLocale), - COLLATIONFALLBACKSUPPLEMENTV1MARKER => fallback::supplement::co_v1::DATA - .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()) - .copied() - .map(AnyPayload::from_static_ref) - .ok_or(DataErrorKind::MissingLocale), - LOCALEFALLBACKLIKELYSUBTAGSV1MARKER => fallback::likelysubtags_v1::DATA - .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()) - .copied() - .map(AnyPayload::from_static_ref) - .ok_or(DataErrorKind::MissingLocale), - LOCALEFALLBACKPARENTSV1MARKER => fallback::parents_v1::DATA - .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()) - .copied() - .map(AnyPayload::from_static_ref) - .ok_or(DataErrorKind::MissingLocale), - _ => Err(DataErrorKind::MissingDataKey), - } - .map_err(|e| e.with_req(key, req)) - .map(|payload| AnyResponse { payload: Some(payload), metadata: Default::default() }) - } -} +impl_any_provider!(BakedDataProvider); diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs deleted file mode 100644 index 0a90c832e8c79..0000000000000 --- a/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1.rs +++ /dev/null @@ -1,733 +0,0 @@ -// @generated -type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackLikelySubtagsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; -pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> = - litemap::LiteMap::from_sorted_store_unchecked(&[("und", UND)]); -static UND: &DataStruct = - &::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1 { - l2s: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 97u8, 98u8, 0u8, 97u8, 98u8, 113u8, 97u8, 100u8, 112u8, 97u8, 100u8, 121u8, - 97u8, 101u8, 0u8, 97u8, 101u8, 98u8, 97u8, 104u8, 111u8, 97u8, 106u8, - 116u8, 97u8, 107u8, 107u8, 97u8, 108u8, 116u8, 97u8, 109u8, 0u8, 97u8, - 112u8, 99u8, 97u8, 112u8, 100u8, 97u8, 114u8, 0u8, 97u8, 114u8, 99u8, 97u8, - 114u8, 113u8, 97u8, 114u8, 115u8, 97u8, 114u8, 121u8, 97u8, 114u8, 122u8, - 97u8, 115u8, 0u8, 97u8, 115u8, 101u8, 97u8, 118u8, 0u8, 97u8, 118u8, 108u8, - 97u8, 119u8, 97u8, 98u8, 97u8, 0u8, 98u8, 97u8, 108u8, 98u8, 97u8, 112u8, - 98u8, 97u8, 120u8, 98u8, 99u8, 113u8, 98u8, 101u8, 0u8, 98u8, 101u8, 106u8, - 98u8, 102u8, 113u8, 98u8, 102u8, 116u8, 98u8, 102u8, 121u8, 98u8, 103u8, - 0u8, 98u8, 103u8, 99u8, 98u8, 103u8, 110u8, 98u8, 103u8, 120u8, 98u8, - 104u8, 98u8, 98u8, 104u8, 105u8, 98u8, 104u8, 111u8, 98u8, 106u8, 105u8, - 98u8, 106u8, 106u8, 98u8, 108u8, 116u8, 98u8, 110u8, 0u8, 98u8, 111u8, 0u8, - 98u8, 112u8, 121u8, 98u8, 113u8, 105u8, 98u8, 114u8, 97u8, 98u8, 114u8, - 104u8, 98u8, 114u8, 120u8, 98u8, 115u8, 113u8, 98u8, 115u8, 116u8, 98u8, - 116u8, 118u8, 98u8, 117u8, 97u8, 98u8, 121u8, 110u8, 99u8, 99u8, 112u8, - 99u8, 101u8, 0u8, 99u8, 104u8, 109u8, 99u8, 104u8, 114u8, 99u8, 106u8, - 97u8, 99u8, 106u8, 109u8, 99u8, 107u8, 98u8, 99u8, 109u8, 103u8, 99u8, - 111u8, 112u8, 99u8, 114u8, 0u8, 99u8, 114u8, 104u8, 99u8, 114u8, 107u8, - 99u8, 114u8, 108u8, 99u8, 115u8, 119u8, 99u8, 116u8, 100u8, 99u8, 117u8, - 0u8, 99u8, 118u8, 0u8, 100u8, 97u8, 114u8, 100u8, 99u8, 99u8, 100u8, 103u8, - 108u8, 100u8, 109u8, 102u8, 100u8, 111u8, 105u8, 100u8, 114u8, 104u8, - 100u8, 114u8, 115u8, 100u8, 116u8, 121u8, 100u8, 118u8, 0u8, 100u8, 122u8, - 0u8, 101u8, 103u8, 121u8, 101u8, 107u8, 121u8, 101u8, 108u8, 0u8, 101u8, - 115u8, 103u8, 101u8, 116u8, 116u8, 102u8, 97u8, 0u8, 102u8, 105u8, 97u8, - 102u8, 117u8, 98u8, 103u8, 97u8, 110u8, 103u8, 98u8, 109u8, 103u8, 98u8, - 122u8, 103u8, 101u8, 122u8, 103u8, 103u8, 110u8, 103u8, 106u8, 107u8, - 103u8, 106u8, 117u8, 103u8, 108u8, 107u8, 103u8, 109u8, 118u8, 103u8, - 111u8, 102u8, 103u8, 111u8, 109u8, 103u8, 111u8, 110u8, 103u8, 111u8, - 116u8, 103u8, 114u8, 99u8, 103u8, 114u8, 116u8, 103u8, 117u8, 0u8, 103u8, - 118u8, 114u8, 103u8, 119u8, 99u8, 103u8, 119u8, 116u8, 104u8, 97u8, 107u8, - 104u8, 97u8, 122u8, 104u8, 100u8, 121u8, 104u8, 101u8, 0u8, 104u8, 105u8, - 0u8, 104u8, 108u8, 117u8, 104u8, 109u8, 100u8, 104u8, 110u8, 100u8, 104u8, - 110u8, 101u8, 104u8, 110u8, 106u8, 104u8, 110u8, 111u8, 104u8, 111u8, 99u8, - 104u8, 111u8, 106u8, 104u8, 115u8, 110u8, 104u8, 121u8, 0u8, 105u8, 105u8, - 0u8, 105u8, 110u8, 104u8, 105u8, 117u8, 0u8, 105u8, 119u8, 0u8, 106u8, - 97u8, 0u8, 106u8, 105u8, 0u8, 106u8, 109u8, 108u8, 107u8, 97u8, 0u8, 107u8, - 97u8, 97u8, 107u8, 97u8, 119u8, 107u8, 98u8, 100u8, 107u8, 98u8, 121u8, - 107u8, 100u8, 116u8, 107u8, 102u8, 114u8, 107u8, 102u8, 121u8, 107u8, - 104u8, 98u8, 107u8, 104u8, 110u8, 107u8, 104u8, 116u8, 107u8, 104u8, 119u8, - 107u8, 106u8, 103u8, 107u8, 107u8, 0u8, 107u8, 109u8, 0u8, 107u8, 110u8, - 0u8, 107u8, 111u8, 0u8, 107u8, 111u8, 105u8, 107u8, 111u8, 107u8, 107u8, - 113u8, 121u8, 107u8, 114u8, 99u8, 107u8, 114u8, 117u8, 107u8, 115u8, 0u8, - 107u8, 116u8, 98u8, 107u8, 117u8, 109u8, 107u8, 118u8, 0u8, 107u8, 118u8, - 120u8, 107u8, 120u8, 99u8, 107u8, 120u8, 108u8, 107u8, 120u8, 109u8, 107u8, - 120u8, 112u8, 107u8, 121u8, 0u8, 107u8, 122u8, 104u8, 108u8, 97u8, 98u8, - 108u8, 97u8, 100u8, 108u8, 97u8, 104u8, 108u8, 98u8, 101u8, 108u8, 99u8, - 112u8, 108u8, 101u8, 112u8, 108u8, 101u8, 122u8, 108u8, 105u8, 102u8, - 108u8, 105u8, 115u8, 108u8, 107u8, 105u8, 108u8, 109u8, 110u8, 108u8, - 111u8, 0u8, 108u8, 114u8, 99u8, 108u8, 117u8, 122u8, 108u8, 119u8, 108u8, - 108u8, 122u8, 104u8, 109u8, 97u8, 103u8, 109u8, 97u8, 105u8, 109u8, 100u8, - 101u8, 109u8, 100u8, 102u8, 109u8, 100u8, 120u8, 109u8, 102u8, 97u8, 109u8, - 103u8, 112u8, 109u8, 107u8, 0u8, 109u8, 107u8, 105u8, 109u8, 108u8, 0u8, - 109u8, 110u8, 0u8, 109u8, 110u8, 105u8, 109u8, 110u8, 119u8, 109u8, 114u8, - 0u8, 109u8, 114u8, 100u8, 109u8, 114u8, 106u8, 109u8, 114u8, 111u8, 109u8, - 116u8, 114u8, 109u8, 118u8, 121u8, 109u8, 119u8, 114u8, 109u8, 119u8, - 119u8, 109u8, 121u8, 0u8, 109u8, 121u8, 109u8, 109u8, 121u8, 118u8, 109u8, - 121u8, 122u8, 109u8, 122u8, 110u8, 110u8, 97u8, 110u8, 110u8, 101u8, 0u8, - 110u8, 101u8, 119u8, 110u8, 110u8, 112u8, 110u8, 111u8, 100u8, 110u8, - 111u8, 101u8, 110u8, 111u8, 110u8, 110u8, 113u8, 111u8, 110u8, 115u8, - 107u8, 110u8, 115u8, 116u8, 111u8, 106u8, 0u8, 111u8, 106u8, 115u8, 111u8, - 114u8, 0u8, 111u8, 114u8, 117u8, 111u8, 115u8, 0u8, 111u8, 115u8, 97u8, - 111u8, 116u8, 97u8, 111u8, 116u8, 107u8, 111u8, 117u8, 105u8, 112u8, 97u8, - 0u8, 112u8, 97u8, 108u8, 112u8, 101u8, 111u8, 112u8, 104u8, 108u8, 112u8, - 104u8, 110u8, 112u8, 107u8, 97u8, 112u8, 110u8, 116u8, 112u8, 112u8, 97u8, - 112u8, 114u8, 97u8, 112u8, 114u8, 100u8, 112u8, 115u8, 0u8, 114u8, 97u8, - 106u8, 114u8, 104u8, 103u8, 114u8, 105u8, 102u8, 114u8, 106u8, 115u8, - 114u8, 107u8, 116u8, 114u8, 109u8, 116u8, 114u8, 117u8, 0u8, 114u8, 117u8, - 101u8, 114u8, 121u8, 117u8, 115u8, 97u8, 0u8, 115u8, 97u8, 104u8, 115u8, - 97u8, 116u8, 115u8, 97u8, 122u8, 115u8, 99u8, 107u8, 115u8, 99u8, 108u8, - 115u8, 100u8, 0u8, 115u8, 100u8, 104u8, 115u8, 103u8, 97u8, 115u8, 103u8, - 119u8, 115u8, 104u8, 105u8, 115u8, 104u8, 110u8, 115u8, 104u8, 117u8, - 115u8, 105u8, 0u8, 115u8, 107u8, 114u8, 115u8, 109u8, 112u8, 115u8, 111u8, - 103u8, 115u8, 111u8, 117u8, 115u8, 114u8, 0u8, 115u8, 114u8, 98u8, 115u8, - 114u8, 120u8, 115u8, 119u8, 98u8, 115u8, 119u8, 118u8, 115u8, 121u8, 108u8, - 115u8, 121u8, 114u8, 116u8, 97u8, 0u8, 116u8, 97u8, 106u8, 116u8, 99u8, - 121u8, 116u8, 100u8, 100u8, 116u8, 100u8, 103u8, 116u8, 100u8, 104u8, - 116u8, 101u8, 0u8, 116u8, 103u8, 0u8, 116u8, 104u8, 0u8, 116u8, 104u8, - 108u8, 116u8, 104u8, 113u8, 116u8, 104u8, 114u8, 116u8, 105u8, 0u8, 116u8, - 105u8, 103u8, 116u8, 107u8, 116u8, 116u8, 114u8, 119u8, 116u8, 115u8, - 100u8, 116u8, 115u8, 102u8, 116u8, 115u8, 106u8, 116u8, 116u8, 0u8, 116u8, - 116u8, 115u8, 116u8, 120u8, 103u8, 116u8, 120u8, 111u8, 116u8, 121u8, - 118u8, 117u8, 100u8, 105u8, 117u8, 100u8, 109u8, 117u8, 103u8, 0u8, 117u8, - 103u8, 97u8, 117u8, 107u8, 0u8, 117u8, 110u8, 114u8, 117u8, 110u8, 120u8, - 117u8, 114u8, 0u8, 118u8, 97u8, 105u8, 119u8, 97u8, 108u8, 119u8, 98u8, - 113u8, 119u8, 98u8, 114u8, 119u8, 110u8, 105u8, 119u8, 115u8, 103u8, 119u8, - 116u8, 109u8, 119u8, 117u8, 117u8, 120u8, 99u8, 111u8, 120u8, 99u8, 114u8, - 120u8, 108u8, 99u8, 120u8, 108u8, 100u8, 120u8, 109u8, 102u8, 120u8, 109u8, - 110u8, 120u8, 109u8, 114u8, 120u8, 110u8, 97u8, 120u8, 110u8, 114u8, 120u8, - 112u8, 114u8, 120u8, 115u8, 97u8, 120u8, 115u8, 114u8, 121u8, 105u8, 0u8, - 121u8, 117u8, 101u8, 122u8, 100u8, 106u8, 122u8, 103u8, 104u8, 122u8, - 104u8, 0u8, 122u8, 104u8, 120u8, 122u8, 107u8, 116u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 84u8, 105u8, 98u8, - 116u8, 67u8, 121u8, 114u8, 108u8, 65u8, 118u8, 115u8, 116u8, 65u8, 114u8, - 97u8, 98u8, 65u8, 104u8, 111u8, 109u8, 65u8, 114u8, 97u8, 98u8, 88u8, - 115u8, 117u8, 120u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, 105u8, - 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, - 65u8, 114u8, 109u8, 105u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, - 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 66u8, 101u8, 110u8, - 103u8, 83u8, 103u8, 110u8, 119u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, - 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 65u8, - 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 66u8, 97u8, 109u8, 117u8, - 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, - 98u8, 84u8, 97u8, 109u8, 108u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, - 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, 118u8, 97u8, 65u8, - 114u8, 97u8, 98u8, 71u8, 114u8, 101u8, 107u8, 68u8, 101u8, 118u8, 97u8, - 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 69u8, 116u8, 104u8, - 105u8, 68u8, 101u8, 118u8, 97u8, 84u8, 97u8, 118u8, 116u8, 66u8, 101u8, - 110u8, 103u8, 84u8, 105u8, 98u8, 116u8, 66u8, 101u8, 110u8, 103u8, 65u8, - 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 68u8, - 101u8, 118u8, 97u8, 66u8, 97u8, 115u8, 115u8, 69u8, 116u8, 104u8, 105u8, - 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, - 105u8, 67u8, 97u8, 107u8, 109u8, 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, - 114u8, 108u8, 67u8, 104u8, 101u8, 114u8, 65u8, 114u8, 97u8, 98u8, 67u8, - 104u8, 97u8, 109u8, 65u8, 114u8, 97u8, 98u8, 83u8, 111u8, 121u8, 111u8, - 67u8, 111u8, 112u8, 116u8, 67u8, 97u8, 110u8, 115u8, 67u8, 121u8, 114u8, - 108u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, - 110u8, 115u8, 80u8, 97u8, 117u8, 99u8, 67u8, 121u8, 114u8, 108u8, 67u8, - 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, - 65u8, 114u8, 97u8, 98u8, 77u8, 101u8, 100u8, 102u8, 68u8, 101u8, 118u8, - 97u8, 77u8, 111u8, 110u8, 103u8, 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, - 118u8, 97u8, 84u8, 104u8, 97u8, 97u8, 84u8, 105u8, 98u8, 116u8, 69u8, - 103u8, 121u8, 112u8, 75u8, 97u8, 108u8, 105u8, 71u8, 114u8, 101u8, 107u8, - 71u8, 111u8, 110u8, 109u8, 73u8, 116u8, 97u8, 108u8, 65u8, 114u8, 97u8, - 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, - 115u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, - 104u8, 105u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 65u8, - 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, - 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, 118u8, 97u8, 84u8, 101u8, 108u8, - 117u8, 71u8, 111u8, 116u8, 104u8, 67u8, 112u8, 114u8, 116u8, 66u8, 101u8, - 110u8, 103u8, 71u8, 117u8, 106u8, 114u8, 68u8, 101u8, 118u8, 97u8, 65u8, - 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, 115u8, 65u8, - 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, 72u8, 101u8, 98u8, 114u8, - 68u8, 101u8, 118u8, 97u8, 72u8, 108u8, 117u8, 119u8, 80u8, 108u8, 114u8, - 100u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 109u8, - 110u8, 112u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 68u8, - 101u8, 118u8, 97u8, 72u8, 97u8, 110u8, 115u8, 65u8, 114u8, 109u8, 110u8, - 89u8, 105u8, 105u8, 105u8, 67u8, 121u8, 114u8, 108u8, 67u8, 97u8, 110u8, - 115u8, 72u8, 101u8, 98u8, 114u8, 74u8, 112u8, 97u8, 110u8, 72u8, 101u8, - 98u8, 114u8, 68u8, 101u8, 118u8, 97u8, 71u8, 101u8, 111u8, 114u8, 67u8, - 121u8, 114u8, 108u8, 75u8, 97u8, 119u8, 105u8, 67u8, 121u8, 114u8, 108u8, - 65u8, 114u8, 97u8, 98u8, 84u8, 104u8, 97u8, 105u8, 68u8, 101u8, 118u8, - 97u8, 68u8, 101u8, 118u8, 97u8, 84u8, 97u8, 108u8, 117u8, 68u8, 101u8, - 118u8, 97u8, 77u8, 121u8, 109u8, 114u8, 65u8, 114u8, 97u8, 98u8, 76u8, - 97u8, 111u8, 111u8, 67u8, 121u8, 114u8, 108u8, 75u8, 104u8, 109u8, 114u8, - 75u8, 110u8, 100u8, 97u8, 75u8, 111u8, 114u8, 101u8, 67u8, 121u8, 114u8, - 108u8, 68u8, 101u8, 118u8, 97u8, 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, - 114u8, 108u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 69u8, - 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, - 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, 118u8, - 97u8, 84u8, 104u8, 97u8, 105u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, - 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 76u8, 105u8, 110u8, 97u8, 72u8, - 101u8, 98u8, 114u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, - 84u8, 104u8, 97u8, 105u8, 76u8, 101u8, 112u8, 99u8, 67u8, 121u8, 114u8, - 108u8, 68u8, 101u8, 118u8, 97u8, 76u8, 105u8, 115u8, 117u8, 65u8, 114u8, - 97u8, 98u8, 84u8, 101u8, 108u8, 117u8, 76u8, 97u8, 111u8, 111u8, 65u8, - 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 84u8, 104u8, 97u8, 105u8, 72u8, - 97u8, 110u8, 115u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, - 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, - 105u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, - 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 77u8, 108u8, 121u8, 109u8, 67u8, - 121u8, 114u8, 108u8, 66u8, 101u8, 110u8, 103u8, 77u8, 121u8, 109u8, 114u8, - 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, - 108u8, 77u8, 114u8, 111u8, 111u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, - 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 109u8, 110u8, 112u8, 77u8, - 121u8, 109u8, 114u8, 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, - 77u8, 97u8, 110u8, 100u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, - 115u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 87u8, 99u8, - 104u8, 111u8, 76u8, 97u8, 110u8, 97u8, 68u8, 101u8, 118u8, 97u8, 82u8, - 117u8, 110u8, 114u8, 78u8, 107u8, 111u8, 111u8, 67u8, 97u8, 110u8, 115u8, - 84u8, 110u8, 115u8, 97u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, 110u8, - 115u8, 79u8, 114u8, 121u8, 97u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, - 114u8, 108u8, 79u8, 115u8, 103u8, 101u8, 65u8, 114u8, 97u8, 98u8, 79u8, - 114u8, 107u8, 104u8, 79u8, 117u8, 103u8, 114u8, 71u8, 117u8, 114u8, 117u8, - 80u8, 104u8, 108u8, 105u8, 88u8, 112u8, 101u8, 111u8, 65u8, 114u8, 97u8, - 98u8, 80u8, 104u8, 110u8, 120u8, 66u8, 114u8, 97u8, 104u8, 71u8, 114u8, - 101u8, 107u8, 68u8, 101u8, 118u8, 97u8, 75u8, 104u8, 97u8, 114u8, 65u8, - 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 82u8, - 111u8, 104u8, 103u8, 84u8, 102u8, 110u8, 103u8, 68u8, 101u8, 118u8, 97u8, - 66u8, 101u8, 110u8, 103u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, - 108u8, 67u8, 121u8, 114u8, 108u8, 75u8, 97u8, 110u8, 97u8, 68u8, 101u8, - 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 79u8, 108u8, 99u8, 107u8, 83u8, - 97u8, 117u8, 114u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, - 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 79u8, 103u8, 97u8, 109u8, - 69u8, 116u8, 104u8, 105u8, 84u8, 102u8, 110u8, 103u8, 77u8, 121u8, 109u8, - 114u8, 65u8, 114u8, 97u8, 98u8, 83u8, 105u8, 110u8, 104u8, 65u8, 114u8, - 97u8, 98u8, 83u8, 97u8, 109u8, 114u8, 83u8, 111u8, 103u8, 100u8, 84u8, - 104u8, 97u8, 105u8, 67u8, 121u8, 114u8, 108u8, 83u8, 111u8, 114u8, 97u8, - 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, - 97u8, 66u8, 101u8, 110u8, 103u8, 83u8, 121u8, 114u8, 99u8, 84u8, 97u8, - 109u8, 108u8, 68u8, 101u8, 118u8, 97u8, 75u8, 110u8, 100u8, 97u8, 84u8, - 97u8, 108u8, 101u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, - 84u8, 101u8, 108u8, 117u8, 67u8, 121u8, 114u8, 108u8, 84u8, 104u8, 97u8, - 105u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, - 118u8, 97u8, 69u8, 116u8, 104u8, 105u8, 69u8, 116u8, 104u8, 105u8, 68u8, - 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 71u8, 114u8, 101u8, 107u8, - 68u8, 101u8, 118u8, 97u8, 84u8, 105u8, 98u8, 116u8, 67u8, 121u8, 114u8, - 108u8, 84u8, 104u8, 97u8, 105u8, 84u8, 97u8, 110u8, 103u8, 84u8, 111u8, - 116u8, 111u8, 67u8, 121u8, 114u8, 108u8, 65u8, 103u8, 104u8, 98u8, 67u8, - 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 85u8, 103u8, 97u8, 114u8, - 67u8, 121u8, 114u8, 108u8, 66u8, 101u8, 110u8, 103u8, 66u8, 101u8, 110u8, - 103u8, 65u8, 114u8, 97u8, 98u8, 86u8, 97u8, 105u8, 105u8, 69u8, 116u8, - 104u8, 105u8, 84u8, 101u8, 108u8, 117u8, 68u8, 101u8, 118u8, 97u8, 65u8, - 114u8, 97u8, 98u8, 71u8, 111u8, 110u8, 103u8, 68u8, 101u8, 118u8, 97u8, - 72u8, 97u8, 110u8, 115u8, 67u8, 104u8, 114u8, 115u8, 67u8, 97u8, 114u8, - 105u8, 76u8, 121u8, 99u8, 105u8, 76u8, 121u8, 100u8, 105u8, 71u8, 101u8, - 111u8, 114u8, 77u8, 97u8, 110u8, 105u8, 77u8, 101u8, 114u8, 99u8, 78u8, - 97u8, 114u8, 98u8, 68u8, 101u8, 118u8, 97u8, 80u8, 114u8, 116u8, 105u8, - 83u8, 97u8, 114u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 101u8, 98u8, - 114u8, 72u8, 97u8, 110u8, 116u8, 65u8, 114u8, 97u8, 98u8, 84u8, 102u8, - 110u8, 103u8, 72u8, 97u8, 110u8, 115u8, 78u8, 115u8, 104u8, 117u8, 75u8, - 105u8, 116u8, 115u8, - ]) - }, - ) - }, - lr2s: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap2d::from_parts_unchecked( - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 97u8, 122u8, 0u8, 104u8, 97u8, 0u8, 107u8, 107u8, 0u8, 107u8, 117u8, 0u8, - 107u8, 121u8, 0u8, 109u8, 97u8, 110u8, 109u8, 110u8, 0u8, 109u8, 115u8, - 0u8, 112u8, 97u8, 0u8, 114u8, 105u8, 102u8, 115u8, 100u8, 0u8, 115u8, - 114u8, 0u8, 116u8, 103u8, 0u8, 117u8, 103u8, 0u8, 117u8, 110u8, 114u8, - 117u8, 122u8, 0u8, 121u8, 117u8, 101u8, 122u8, 104u8, 0u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 3u8, 0u8, 0u8, 0u8, 5u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 10u8, 0u8, 0u8, - 0u8, 12u8, 0u8, 0u8, 0u8, 13u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 15u8, - 0u8, 0u8, 0u8, 16u8, 0u8, 0u8, 0u8, 17u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 22u8, 0u8, 0u8, 0u8, 23u8, 0u8, 0u8, 0u8, 25u8, 0u8, 0u8, 0u8, 26u8, - 0u8, 0u8, 0u8, 28u8, 0u8, 0u8, 0u8, 29u8, 0u8, 0u8, 0u8, 44u8, 0u8, 0u8, - 0u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 73u8, 81u8, 0u8, 73u8, 82u8, 0u8, 82u8, 85u8, 0u8, 67u8, 77u8, 0u8, 83u8, - 68u8, 0u8, 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 73u8, 82u8, 0u8, 77u8, 78u8, - 0u8, 76u8, 66u8, 0u8, 67u8, 78u8, 0u8, 84u8, 82u8, 0u8, 71u8, 78u8, 0u8, - 67u8, 78u8, 0u8, 67u8, 67u8, 0u8, 80u8, 75u8, 0u8, 78u8, 76u8, 0u8, 73u8, - 78u8, 0u8, 77u8, 69u8, 0u8, 82u8, 79u8, 0u8, 82u8, 85u8, 0u8, 84u8, 82u8, - 0u8, 80u8, 75u8, 0u8, 75u8, 90u8, 0u8, 77u8, 78u8, 0u8, 78u8, 80u8, 0u8, - 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 67u8, 78u8, 0u8, 65u8, 85u8, 0u8, 66u8, - 78u8, 0u8, 71u8, 66u8, 0u8, 71u8, 70u8, 0u8, 72u8, 75u8, 0u8, 73u8, 68u8, - 0u8, 77u8, 79u8, 0u8, 80u8, 65u8, 0u8, 80u8, 70u8, 0u8, 80u8, 72u8, 0u8, - 83u8, 82u8, 0u8, 84u8, 72u8, 0u8, 84u8, 87u8, 0u8, 85u8, 83u8, 0u8, 86u8, - 78u8, 0u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, - 108u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, - 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, - 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, 116u8, - 110u8, 78u8, 107u8, 111u8, 111u8, 77u8, 111u8, 110u8, 103u8, 65u8, 114u8, - 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, 116u8, 110u8, 68u8, 101u8, - 118u8, 97u8, 76u8, 97u8, 116u8, 110u8, 76u8, 97u8, 116u8, 110u8, 76u8, - 97u8, 116u8, 110u8, 76u8, 97u8, 116u8, 110u8, 65u8, 114u8, 97u8, 98u8, - 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, 118u8, - 97u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 72u8, 97u8, - 110u8, 115u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, - 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, - 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, - 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, - 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, - 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, - ]) - }, - ) - }, - l2r: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 97u8, 97u8, 0u8, 97u8, 98u8, 0u8, 97u8, 98u8, 114u8, 97u8, 99u8, 101u8, - 97u8, 99u8, 104u8, 97u8, 100u8, 97u8, 97u8, 100u8, 112u8, 97u8, 100u8, - 121u8, 97u8, 101u8, 0u8, 97u8, 101u8, 98u8, 97u8, 102u8, 0u8, 97u8, 103u8, - 113u8, 97u8, 104u8, 111u8, 97u8, 106u8, 116u8, 97u8, 107u8, 0u8, 97u8, - 107u8, 107u8, 97u8, 108u8, 110u8, 97u8, 108u8, 116u8, 97u8, 109u8, 0u8, - 97u8, 109u8, 111u8, 97u8, 110u8, 0u8, 97u8, 110u8, 110u8, 97u8, 111u8, - 122u8, 97u8, 112u8, 100u8, 97u8, 114u8, 0u8, 97u8, 114u8, 99u8, 97u8, - 114u8, 110u8, 97u8, 114u8, 111u8, 97u8, 114u8, 113u8, 97u8, 114u8, 115u8, - 97u8, 114u8, 121u8, 97u8, 114u8, 122u8, 97u8, 115u8, 0u8, 97u8, 115u8, - 97u8, 97u8, 115u8, 101u8, 97u8, 115u8, 116u8, 97u8, 116u8, 106u8, 97u8, - 118u8, 0u8, 97u8, 119u8, 97u8, 97u8, 121u8, 0u8, 97u8, 122u8, 0u8, 98u8, - 97u8, 0u8, 98u8, 97u8, 108u8, 98u8, 97u8, 110u8, 98u8, 97u8, 112u8, 98u8, - 97u8, 114u8, 98u8, 97u8, 115u8, 98u8, 97u8, 120u8, 98u8, 98u8, 99u8, 98u8, - 98u8, 106u8, 98u8, 99u8, 105u8, 98u8, 101u8, 0u8, 98u8, 101u8, 106u8, 98u8, - 101u8, 109u8, 98u8, 101u8, 119u8, 98u8, 101u8, 122u8, 98u8, 102u8, 100u8, - 98u8, 102u8, 113u8, 98u8, 102u8, 116u8, 98u8, 102u8, 121u8, 98u8, 103u8, - 0u8, 98u8, 103u8, 99u8, 98u8, 103u8, 110u8, 98u8, 103u8, 120u8, 98u8, - 104u8, 98u8, 98u8, 104u8, 105u8, 98u8, 104u8, 111u8, 98u8, 105u8, 0u8, - 98u8, 105u8, 107u8, 98u8, 105u8, 110u8, 98u8, 106u8, 106u8, 98u8, 106u8, - 110u8, 98u8, 106u8, 116u8, 98u8, 107u8, 109u8, 98u8, 107u8, 117u8, 98u8, - 108u8, 97u8, 98u8, 108u8, 103u8, 98u8, 108u8, 116u8, 98u8, 109u8, 0u8, - 98u8, 109u8, 113u8, 98u8, 110u8, 0u8, 98u8, 111u8, 0u8, 98u8, 112u8, 121u8, - 98u8, 113u8, 105u8, 98u8, 113u8, 118u8, 98u8, 114u8, 0u8, 98u8, 114u8, - 97u8, 98u8, 114u8, 104u8, 98u8, 114u8, 120u8, 98u8, 115u8, 0u8, 98u8, - 115u8, 113u8, 98u8, 115u8, 115u8, 98u8, 116u8, 111u8, 98u8, 116u8, 118u8, - 98u8, 117u8, 97u8, 98u8, 117u8, 99u8, 98u8, 117u8, 103u8, 98u8, 117u8, - 109u8, 98u8, 118u8, 98u8, 98u8, 121u8, 110u8, 98u8, 121u8, 118u8, 98u8, - 122u8, 101u8, 99u8, 97u8, 0u8, 99u8, 97u8, 100u8, 99u8, 99u8, 104u8, 99u8, - 99u8, 112u8, 99u8, 101u8, 0u8, 99u8, 101u8, 98u8, 99u8, 103u8, 103u8, 99u8, - 104u8, 0u8, 99u8, 104u8, 107u8, 99u8, 104u8, 109u8, 99u8, 104u8, 111u8, - 99u8, 104u8, 112u8, 99u8, 104u8, 114u8, 99u8, 105u8, 99u8, 99u8, 106u8, - 97u8, 99u8, 106u8, 109u8, 99u8, 107u8, 98u8, 99u8, 108u8, 99u8, 99u8, - 109u8, 103u8, 99u8, 111u8, 0u8, 99u8, 111u8, 112u8, 99u8, 112u8, 115u8, - 99u8, 114u8, 0u8, 99u8, 114u8, 103u8, 99u8, 114u8, 104u8, 99u8, 114u8, - 107u8, 99u8, 114u8, 108u8, 99u8, 114u8, 115u8, 99u8, 115u8, 0u8, 99u8, - 115u8, 98u8, 99u8, 115u8, 119u8, 99u8, 116u8, 100u8, 99u8, 117u8, 0u8, - 99u8, 118u8, 0u8, 99u8, 121u8, 0u8, 100u8, 97u8, 0u8, 100u8, 97u8, 102u8, - 100u8, 97u8, 107u8, 100u8, 97u8, 114u8, 100u8, 97u8, 118u8, 100u8, 99u8, - 99u8, 100u8, 101u8, 0u8, 100u8, 101u8, 110u8, 100u8, 103u8, 114u8, 100u8, - 106u8, 101u8, 100u8, 109u8, 102u8, 100u8, 110u8, 106u8, 100u8, 111u8, - 105u8, 100u8, 114u8, 104u8, 100u8, 115u8, 98u8, 100u8, 116u8, 109u8, 100u8, - 116u8, 112u8, 100u8, 116u8, 121u8, 100u8, 117u8, 97u8, 100u8, 118u8, 0u8, - 100u8, 121u8, 111u8, 100u8, 121u8, 117u8, 100u8, 122u8, 0u8, 101u8, 98u8, - 117u8, 101u8, 101u8, 0u8, 101u8, 102u8, 105u8, 101u8, 103u8, 108u8, 101u8, - 103u8, 121u8, 101u8, 107u8, 121u8, 101u8, 108u8, 0u8, 101u8, 110u8, 0u8, - 101u8, 111u8, 0u8, 101u8, 115u8, 0u8, 101u8, 115u8, 103u8, 101u8, 115u8, - 117u8, 101u8, 116u8, 0u8, 101u8, 116u8, 116u8, 101u8, 117u8, 0u8, 101u8, - 119u8, 111u8, 101u8, 120u8, 116u8, 102u8, 97u8, 0u8, 102u8, 97u8, 110u8, - 102u8, 102u8, 0u8, 102u8, 102u8, 109u8, 102u8, 105u8, 0u8, 102u8, 105u8, - 97u8, 102u8, 105u8, 108u8, 102u8, 105u8, 116u8, 102u8, 106u8, 0u8, 102u8, - 111u8, 0u8, 102u8, 111u8, 110u8, 102u8, 114u8, 0u8, 102u8, 114u8, 99u8, - 102u8, 114u8, 112u8, 102u8, 114u8, 114u8, 102u8, 114u8, 115u8, 102u8, - 117u8, 98u8, 102u8, 117u8, 100u8, 102u8, 117u8, 102u8, 102u8, 117u8, 113u8, - 102u8, 117u8, 114u8, 102u8, 117u8, 118u8, 102u8, 118u8, 114u8, 102u8, - 121u8, 0u8, 103u8, 97u8, 0u8, 103u8, 97u8, 97u8, 103u8, 97u8, 103u8, 103u8, - 97u8, 110u8, 103u8, 97u8, 121u8, 103u8, 98u8, 109u8, 103u8, 98u8, 122u8, - 103u8, 99u8, 114u8, 103u8, 100u8, 0u8, 103u8, 101u8, 122u8, 103u8, 103u8, - 110u8, 103u8, 105u8, 108u8, 103u8, 106u8, 107u8, 103u8, 106u8, 117u8, - 103u8, 108u8, 0u8, 103u8, 108u8, 107u8, 103u8, 110u8, 0u8, 103u8, 111u8, - 109u8, 103u8, 111u8, 110u8, 103u8, 111u8, 114u8, 103u8, 111u8, 115u8, - 103u8, 111u8, 116u8, 103u8, 114u8, 99u8, 103u8, 114u8, 116u8, 103u8, 115u8, - 119u8, 103u8, 117u8, 0u8, 103u8, 117u8, 98u8, 103u8, 117u8, 99u8, 103u8, - 117u8, 114u8, 103u8, 117u8, 122u8, 103u8, 118u8, 0u8, 103u8, 118u8, 114u8, - 103u8, 119u8, 105u8, 104u8, 97u8, 0u8, 104u8, 97u8, 107u8, 104u8, 97u8, - 119u8, 104u8, 97u8, 122u8, 104u8, 101u8, 0u8, 104u8, 105u8, 0u8, 104u8, - 105u8, 102u8, 104u8, 105u8, 108u8, 104u8, 108u8, 117u8, 104u8, 109u8, - 100u8, 104u8, 110u8, 100u8, 104u8, 110u8, 101u8, 104u8, 110u8, 106u8, - 104u8, 110u8, 110u8, 104u8, 110u8, 111u8, 104u8, 111u8, 0u8, 104u8, 111u8, - 99u8, 104u8, 111u8, 106u8, 104u8, 114u8, 0u8, 104u8, 115u8, 98u8, 104u8, - 115u8, 110u8, 104u8, 116u8, 0u8, 104u8, 117u8, 0u8, 104u8, 117u8, 114u8, - 104u8, 121u8, 0u8, 104u8, 122u8, 0u8, 105u8, 97u8, 0u8, 105u8, 98u8, 97u8, - 105u8, 98u8, 98u8, 105u8, 100u8, 0u8, 105u8, 102u8, 101u8, 105u8, 103u8, - 0u8, 105u8, 105u8, 0u8, 105u8, 107u8, 0u8, 105u8, 108u8, 111u8, 105u8, - 110u8, 0u8, 105u8, 110u8, 104u8, 105u8, 111u8, 0u8, 105u8, 115u8, 0u8, - 105u8, 116u8, 0u8, 105u8, 117u8, 0u8, 105u8, 119u8, 0u8, 105u8, 122u8, - 104u8, 106u8, 97u8, 0u8, 106u8, 97u8, 109u8, 106u8, 98u8, 111u8, 106u8, - 103u8, 111u8, 106u8, 105u8, 0u8, 106u8, 109u8, 99u8, 106u8, 109u8, 108u8, - 106u8, 117u8, 116u8, 106u8, 118u8, 0u8, 106u8, 119u8, 0u8, 107u8, 97u8, - 0u8, 107u8, 97u8, 97u8, 107u8, 97u8, 98u8, 107u8, 97u8, 99u8, 107u8, 97u8, - 106u8, 107u8, 97u8, 109u8, 107u8, 97u8, 111u8, 107u8, 97u8, 119u8, 107u8, - 98u8, 100u8, 107u8, 98u8, 121u8, 107u8, 99u8, 103u8, 107u8, 99u8, 107u8, - 107u8, 100u8, 101u8, 107u8, 100u8, 104u8, 107u8, 100u8, 116u8, 107u8, - 101u8, 97u8, 107u8, 101u8, 110u8, 107u8, 102u8, 111u8, 107u8, 102u8, 114u8, - 107u8, 102u8, 121u8, 107u8, 103u8, 0u8, 107u8, 103u8, 101u8, 107u8, 103u8, - 112u8, 107u8, 104u8, 97u8, 107u8, 104u8, 98u8, 107u8, 104u8, 110u8, 107u8, - 104u8, 113u8, 107u8, 104u8, 116u8, 107u8, 104u8, 119u8, 107u8, 105u8, 0u8, - 107u8, 105u8, 117u8, 107u8, 106u8, 0u8, 107u8, 106u8, 103u8, 107u8, 107u8, - 0u8, 107u8, 107u8, 106u8, 107u8, 108u8, 0u8, 107u8, 108u8, 110u8, 107u8, - 109u8, 0u8, 107u8, 109u8, 98u8, 107u8, 110u8, 0u8, 107u8, 110u8, 102u8, - 107u8, 111u8, 0u8, 107u8, 111u8, 105u8, 107u8, 111u8, 107u8, 107u8, 111u8, - 115u8, 107u8, 112u8, 101u8, 107u8, 114u8, 99u8, 107u8, 114u8, 105u8, 107u8, - 114u8, 106u8, 107u8, 114u8, 108u8, 107u8, 114u8, 117u8, 107u8, 115u8, 0u8, - 107u8, 115u8, 98u8, 107u8, 115u8, 102u8, 107u8, 115u8, 104u8, 107u8, 116u8, - 114u8, 107u8, 117u8, 0u8, 107u8, 117u8, 109u8, 107u8, 118u8, 0u8, 107u8, - 118u8, 114u8, 107u8, 118u8, 120u8, 107u8, 119u8, 0u8, 107u8, 119u8, 107u8, - 107u8, 120u8, 108u8, 107u8, 120u8, 109u8, 107u8, 120u8, 112u8, 107u8, - 121u8, 0u8, 107u8, 122u8, 106u8, 107u8, 122u8, 116u8, 108u8, 97u8, 0u8, - 108u8, 97u8, 98u8, 108u8, 97u8, 100u8, 108u8, 97u8, 103u8, 108u8, 97u8, - 104u8, 108u8, 97u8, 106u8, 108u8, 98u8, 0u8, 108u8, 98u8, 101u8, 108u8, - 98u8, 119u8, 108u8, 99u8, 112u8, 108u8, 101u8, 112u8, 108u8, 101u8, 122u8, - 108u8, 103u8, 0u8, 108u8, 105u8, 0u8, 108u8, 105u8, 102u8, 108u8, 105u8, - 106u8, 108u8, 105u8, 108u8, 108u8, 105u8, 115u8, 108u8, 106u8, 112u8, - 108u8, 107u8, 105u8, 108u8, 107u8, 116u8, 108u8, 109u8, 110u8, 108u8, - 109u8, 111u8, 108u8, 110u8, 0u8, 108u8, 111u8, 0u8, 108u8, 111u8, 108u8, - 108u8, 111u8, 122u8, 108u8, 114u8, 99u8, 108u8, 116u8, 0u8, 108u8, 116u8, - 103u8, 108u8, 117u8, 0u8, 108u8, 117u8, 97u8, 108u8, 117u8, 111u8, 108u8, - 117u8, 121u8, 108u8, 117u8, 122u8, 108u8, 118u8, 0u8, 108u8, 119u8, 108u8, - 108u8, 122u8, 104u8, 108u8, 122u8, 122u8, 109u8, 97u8, 100u8, 109u8, 97u8, - 102u8, 109u8, 97u8, 103u8, 109u8, 97u8, 105u8, 109u8, 97u8, 107u8, 109u8, - 97u8, 110u8, 109u8, 97u8, 115u8, 109u8, 97u8, 122u8, 109u8, 100u8, 102u8, - 109u8, 100u8, 104u8, 109u8, 100u8, 114u8, 109u8, 101u8, 110u8, 109u8, - 101u8, 114u8, 109u8, 102u8, 97u8, 109u8, 102u8, 101u8, 109u8, 103u8, 0u8, - 109u8, 103u8, 104u8, 109u8, 103u8, 111u8, 109u8, 103u8, 112u8, 109u8, - 103u8, 121u8, 109u8, 104u8, 0u8, 109u8, 105u8, 0u8, 109u8, 105u8, 99u8, - 109u8, 105u8, 110u8, 109u8, 107u8, 0u8, 109u8, 108u8, 0u8, 109u8, 108u8, - 115u8, 109u8, 110u8, 0u8, 109u8, 110u8, 105u8, 109u8, 110u8, 119u8, 109u8, - 111u8, 0u8, 109u8, 111u8, 101u8, 109u8, 111u8, 104u8, 109u8, 111u8, 115u8, - 109u8, 114u8, 0u8, 109u8, 114u8, 100u8, 109u8, 114u8, 106u8, 109u8, 114u8, - 111u8, 109u8, 115u8, 0u8, 109u8, 116u8, 0u8, 109u8, 116u8, 114u8, 109u8, - 117u8, 97u8, 109u8, 117u8, 115u8, 109u8, 118u8, 121u8, 109u8, 119u8, 107u8, - 109u8, 119u8, 114u8, 109u8, 119u8, 118u8, 109u8, 119u8, 119u8, 109u8, - 120u8, 99u8, 109u8, 121u8, 0u8, 109u8, 121u8, 118u8, 109u8, 121u8, 120u8, - 109u8, 121u8, 122u8, 109u8, 122u8, 110u8, 110u8, 97u8, 0u8, 110u8, 97u8, - 110u8, 110u8, 97u8, 112u8, 110u8, 97u8, 113u8, 110u8, 98u8, 0u8, 110u8, - 99u8, 104u8, 110u8, 100u8, 0u8, 110u8, 100u8, 99u8, 110u8, 100u8, 115u8, - 110u8, 101u8, 0u8, 110u8, 101u8, 119u8, 110u8, 103u8, 0u8, 110u8, 103u8, - 108u8, 110u8, 104u8, 101u8, 110u8, 104u8, 119u8, 110u8, 105u8, 106u8, - 110u8, 105u8, 117u8, 110u8, 106u8, 111u8, 110u8, 108u8, 0u8, 110u8, 109u8, - 103u8, 110u8, 110u8, 0u8, 110u8, 110u8, 104u8, 110u8, 110u8, 112u8, 110u8, - 111u8, 0u8, 110u8, 111u8, 100u8, 110u8, 111u8, 101u8, 110u8, 111u8, 110u8, - 110u8, 113u8, 111u8, 110u8, 114u8, 0u8, 110u8, 115u8, 107u8, 110u8, 115u8, - 111u8, 110u8, 115u8, 116u8, 110u8, 117u8, 115u8, 110u8, 118u8, 0u8, 110u8, - 120u8, 113u8, 110u8, 121u8, 0u8, 110u8, 121u8, 109u8, 110u8, 121u8, 110u8, - 110u8, 122u8, 105u8, 111u8, 99u8, 0u8, 111u8, 106u8, 0u8, 111u8, 106u8, - 115u8, 111u8, 107u8, 97u8, 111u8, 109u8, 0u8, 111u8, 114u8, 0u8, 111u8, - 115u8, 0u8, 111u8, 115u8, 97u8, 111u8, 116u8, 107u8, 111u8, 117u8, 105u8, - 112u8, 97u8, 0u8, 112u8, 97u8, 103u8, 112u8, 97u8, 108u8, 112u8, 97u8, - 109u8, 112u8, 97u8, 112u8, 112u8, 97u8, 117u8, 112u8, 99u8, 100u8, 112u8, - 99u8, 109u8, 112u8, 100u8, 99u8, 112u8, 100u8, 116u8, 112u8, 101u8, 111u8, - 112u8, 102u8, 108u8, 112u8, 104u8, 110u8, 112u8, 105u8, 115u8, 112u8, - 107u8, 97u8, 112u8, 107u8, 111u8, 112u8, 108u8, 0u8, 112u8, 109u8, 115u8, - 112u8, 110u8, 116u8, 112u8, 111u8, 110u8, 112u8, 112u8, 97u8, 112u8, 113u8, - 109u8, 112u8, 114u8, 97u8, 112u8, 114u8, 100u8, 112u8, 114u8, 103u8, 112u8, - 115u8, 0u8, 112u8, 116u8, 0u8, 112u8, 117u8, 117u8, 113u8, 117u8, 0u8, - 113u8, 117u8, 99u8, 113u8, 117u8, 103u8, 114u8, 97u8, 106u8, 114u8, 99u8, - 102u8, 114u8, 101u8, 106u8, 114u8, 103u8, 110u8, 114u8, 104u8, 103u8, - 114u8, 105u8, 97u8, 114u8, 105u8, 102u8, 114u8, 106u8, 115u8, 114u8, 107u8, - 116u8, 114u8, 109u8, 0u8, 114u8, 109u8, 102u8, 114u8, 109u8, 111u8, 114u8, - 109u8, 116u8, 114u8, 109u8, 117u8, 114u8, 110u8, 0u8, 114u8, 110u8, 103u8, - 114u8, 111u8, 0u8, 114u8, 111u8, 98u8, 114u8, 111u8, 102u8, 114u8, 116u8, - 109u8, 114u8, 117u8, 0u8, 114u8, 117u8, 101u8, 114u8, 117u8, 103u8, 114u8, - 119u8, 0u8, 114u8, 119u8, 107u8, 114u8, 121u8, 117u8, 115u8, 97u8, 0u8, - 115u8, 97u8, 102u8, 115u8, 97u8, 104u8, 115u8, 97u8, 113u8, 115u8, 97u8, - 115u8, 115u8, 97u8, 116u8, 115u8, 97u8, 118u8, 115u8, 97u8, 122u8, 115u8, - 98u8, 112u8, 115u8, 99u8, 0u8, 115u8, 99u8, 107u8, 115u8, 99u8, 110u8, - 115u8, 99u8, 111u8, 115u8, 100u8, 0u8, 115u8, 100u8, 99u8, 115u8, 100u8, - 104u8, 115u8, 101u8, 0u8, 115u8, 101u8, 102u8, 115u8, 101u8, 104u8, 115u8, - 101u8, 105u8, 115u8, 101u8, 115u8, 115u8, 103u8, 0u8, 115u8, 103u8, 97u8, - 115u8, 103u8, 115u8, 115u8, 104u8, 105u8, 115u8, 104u8, 110u8, 115u8, - 105u8, 0u8, 115u8, 105u8, 100u8, 115u8, 107u8, 0u8, 115u8, 107u8, 114u8, - 115u8, 108u8, 0u8, 115u8, 108u8, 105u8, 115u8, 108u8, 121u8, 115u8, 109u8, - 0u8, 115u8, 109u8, 97u8, 115u8, 109u8, 100u8, 115u8, 109u8, 106u8, 115u8, - 109u8, 110u8, 115u8, 109u8, 112u8, 115u8, 109u8, 115u8, 115u8, 110u8, 0u8, - 115u8, 110u8, 98u8, 115u8, 110u8, 107u8, 115u8, 111u8, 0u8, 115u8, 111u8, - 103u8, 115u8, 111u8, 117u8, 115u8, 113u8, 0u8, 115u8, 114u8, 0u8, 115u8, - 114u8, 98u8, 115u8, 114u8, 110u8, 115u8, 114u8, 114u8, 115u8, 114u8, 120u8, - 115u8, 115u8, 0u8, 115u8, 115u8, 121u8, 115u8, 116u8, 0u8, 115u8, 116u8, - 113u8, 115u8, 117u8, 0u8, 115u8, 117u8, 107u8, 115u8, 117u8, 115u8, 115u8, - 118u8, 0u8, 115u8, 119u8, 0u8, 115u8, 119u8, 98u8, 115u8, 119u8, 99u8, - 115u8, 119u8, 103u8, 115u8, 119u8, 118u8, 115u8, 120u8, 110u8, 115u8, - 121u8, 108u8, 115u8, 121u8, 114u8, 115u8, 122u8, 108u8, 116u8, 97u8, 0u8, - 116u8, 97u8, 106u8, 116u8, 98u8, 119u8, 116u8, 99u8, 121u8, 116u8, 100u8, - 100u8, 116u8, 100u8, 103u8, 116u8, 100u8, 104u8, 116u8, 100u8, 117u8, - 116u8, 101u8, 0u8, 116u8, 101u8, 109u8, 116u8, 101u8, 111u8, 116u8, 101u8, - 116u8, 116u8, 103u8, 0u8, 116u8, 104u8, 0u8, 116u8, 104u8, 108u8, 116u8, - 104u8, 113u8, 116u8, 104u8, 114u8, 116u8, 105u8, 0u8, 116u8, 105u8, 103u8, - 116u8, 105u8, 118u8, 116u8, 107u8, 0u8, 116u8, 107u8, 108u8, 116u8, 107u8, - 114u8, 116u8, 107u8, 116u8, 116u8, 108u8, 0u8, 116u8, 108u8, 121u8, 116u8, - 109u8, 104u8, 116u8, 110u8, 0u8, 116u8, 111u8, 0u8, 116u8, 111u8, 103u8, - 116u8, 111u8, 107u8, 116u8, 112u8, 105u8, 116u8, 114u8, 0u8, 116u8, 114u8, - 117u8, 116u8, 114u8, 118u8, 116u8, 114u8, 119u8, 116u8, 115u8, 0u8, 116u8, - 115u8, 100u8, 116u8, 115u8, 102u8, 116u8, 115u8, 103u8, 116u8, 115u8, - 106u8, 116u8, 116u8, 0u8, 116u8, 116u8, 106u8, 116u8, 116u8, 115u8, 116u8, - 116u8, 116u8, 116u8, 117u8, 109u8, 116u8, 118u8, 108u8, 116u8, 119u8, - 113u8, 116u8, 120u8, 103u8, 116u8, 120u8, 111u8, 116u8, 121u8, 0u8, 116u8, - 121u8, 118u8, 116u8, 122u8, 109u8, 117u8, 100u8, 105u8, 117u8, 100u8, - 109u8, 117u8, 103u8, 0u8, 117u8, 103u8, 97u8, 117u8, 107u8, 0u8, 117u8, - 108u8, 105u8, 117u8, 109u8, 98u8, 117u8, 110u8, 114u8, 117u8, 110u8, 120u8, - 117u8, 114u8, 0u8, 117u8, 122u8, 0u8, 118u8, 97u8, 105u8, 118u8, 101u8, - 0u8, 118u8, 101u8, 99u8, 118u8, 101u8, 112u8, 118u8, 105u8, 0u8, 118u8, - 105u8, 99u8, 118u8, 108u8, 115u8, 118u8, 109u8, 102u8, 118u8, 109u8, 119u8, - 118u8, 111u8, 0u8, 118u8, 111u8, 116u8, 118u8, 114u8, 111u8, 118u8, 117u8, - 110u8, 119u8, 97u8, 0u8, 119u8, 97u8, 101u8, 119u8, 97u8, 108u8, 119u8, - 97u8, 114u8, 119u8, 98u8, 112u8, 119u8, 98u8, 113u8, 119u8, 98u8, 114u8, - 119u8, 108u8, 115u8, 119u8, 110u8, 105u8, 119u8, 111u8, 0u8, 119u8, 115u8, - 103u8, 119u8, 116u8, 109u8, 119u8, 117u8, 117u8, 120u8, 97u8, 118u8, 120u8, - 99u8, 111u8, 120u8, 99u8, 114u8, 120u8, 104u8, 0u8, 120u8, 108u8, 99u8, - 120u8, 108u8, 100u8, 120u8, 109u8, 102u8, 120u8, 109u8, 110u8, 120u8, - 109u8, 114u8, 120u8, 110u8, 97u8, 120u8, 110u8, 114u8, 120u8, 111u8, 103u8, - 120u8, 112u8, 114u8, 120u8, 115u8, 97u8, 120u8, 115u8, 114u8, 121u8, 97u8, - 111u8, 121u8, 97u8, 112u8, 121u8, 97u8, 118u8, 121u8, 98u8, 98u8, 121u8, - 105u8, 0u8, 121u8, 111u8, 0u8, 121u8, 114u8, 108u8, 121u8, 117u8, 97u8, - 121u8, 117u8, 101u8, 122u8, 97u8, 0u8, 122u8, 97u8, 103u8, 122u8, 100u8, - 106u8, 122u8, 101u8, 97u8, 122u8, 103u8, 104u8, 122u8, 104u8, 0u8, 122u8, - 104u8, 120u8, 122u8, 107u8, 116u8, 122u8, 108u8, 109u8, 122u8, 109u8, - 105u8, 122u8, 117u8, 0u8, 122u8, 122u8, 97u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 69u8, 84u8, 0u8, 71u8, 69u8, 0u8, 71u8, 72u8, 0u8, 73u8, 68u8, 0u8, 85u8, - 71u8, 0u8, 71u8, 72u8, 0u8, 66u8, 84u8, 0u8, 82u8, 85u8, 0u8, 73u8, 82u8, - 0u8, 84u8, 78u8, 0u8, 90u8, 65u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, - 84u8, 78u8, 0u8, 71u8, 72u8, 0u8, 73u8, 81u8, 0u8, 88u8, 75u8, 0u8, 82u8, - 85u8, 0u8, 69u8, 84u8, 0u8, 78u8, 71u8, 0u8, 69u8, 83u8, 0u8, 78u8, 71u8, - 0u8, 73u8, 68u8, 0u8, 84u8, 71u8, 0u8, 69u8, 71u8, 0u8, 73u8, 82u8, 0u8, - 67u8, 76u8, 0u8, 66u8, 79u8, 0u8, 68u8, 90u8, 0u8, 83u8, 65u8, 0u8, 77u8, - 65u8, 0u8, 69u8, 71u8, 0u8, 73u8, 78u8, 0u8, 84u8, 90u8, 0u8, 85u8, 83u8, - 0u8, 69u8, 83u8, 0u8, 67u8, 65u8, 0u8, 82u8, 85u8, 0u8, 73u8, 78u8, 0u8, - 66u8, 79u8, 0u8, 65u8, 90u8, 0u8, 82u8, 85u8, 0u8, 80u8, 75u8, 0u8, 73u8, - 68u8, 0u8, 78u8, 80u8, 0u8, 65u8, 84u8, 0u8, 67u8, 77u8, 0u8, 67u8, 77u8, - 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 67u8, 73u8, 0u8, 66u8, 89u8, 0u8, - 83u8, 68u8, 0u8, 90u8, 77u8, 0u8, 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 67u8, - 77u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 66u8, 71u8, - 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 84u8, 82u8, 0u8, 73u8, 78u8, 0u8, - 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 86u8, 85u8, 0u8, 80u8, 72u8, 0u8, 78u8, - 71u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, 83u8, 78u8, 0u8, 67u8, 77u8, - 0u8, 80u8, 72u8, 0u8, 67u8, 65u8, 0u8, 77u8, 89u8, 0u8, 86u8, 78u8, 0u8, - 77u8, 76u8, 0u8, 77u8, 76u8, 0u8, 66u8, 68u8, 0u8, 67u8, 78u8, 0u8, 73u8, - 78u8, 0u8, 73u8, 82u8, 0u8, 67u8, 73u8, 0u8, 70u8, 82u8, 0u8, 73u8, 78u8, - 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 66u8, 65u8, 0u8, 76u8, 82u8, 0u8, - 67u8, 77u8, 0u8, 80u8, 72u8, 0u8, 80u8, 75u8, 0u8, 82u8, 85u8, 0u8, 89u8, - 84u8, 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 71u8, 81u8, 0u8, 69u8, 82u8, - 0u8, 67u8, 77u8, 0u8, 77u8, 76u8, 0u8, 69u8, 83u8, 0u8, 85u8, 83u8, 0u8, - 78u8, 71u8, 0u8, 66u8, 68u8, 0u8, 82u8, 85u8, 0u8, 80u8, 72u8, 0u8, 85u8, - 71u8, 0u8, 71u8, 85u8, 0u8, 70u8, 77u8, 0u8, 82u8, 85u8, 0u8, 85u8, 83u8, - 0u8, 67u8, 65u8, 0u8, 85u8, 83u8, 0u8, 85u8, 83u8, 0u8, 75u8, 72u8, 0u8, - 86u8, 78u8, 0u8, 73u8, 81u8, 0u8, 67u8, 65u8, 0u8, 77u8, 78u8, 0u8, 70u8, - 82u8, 0u8, 69u8, 71u8, 0u8, 80u8, 72u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, - 0u8, 85u8, 65u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 83u8, 67u8, 0u8, - 67u8, 90u8, 0u8, 80u8, 76u8, 0u8, 67u8, 65u8, 0u8, 77u8, 77u8, 0u8, 82u8, - 85u8, 0u8, 82u8, 85u8, 0u8, 71u8, 66u8, 0u8, 68u8, 75u8, 0u8, 67u8, 73u8, - 0u8, 85u8, 83u8, 0u8, 82u8, 85u8, 0u8, 75u8, 69u8, 0u8, 73u8, 78u8, 0u8, - 68u8, 69u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 78u8, 69u8, 0u8, 78u8, - 71u8, 0u8, 67u8, 73u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, 68u8, 69u8, - 0u8, 77u8, 76u8, 0u8, 77u8, 89u8, 0u8, 78u8, 80u8, 0u8, 67u8, 77u8, 0u8, - 77u8, 86u8, 0u8, 83u8, 78u8, 0u8, 66u8, 70u8, 0u8, 66u8, 84u8, 0u8, 75u8, - 69u8, 0u8, 71u8, 72u8, 0u8, 78u8, 71u8, 0u8, 73u8, 84u8, 0u8, 69u8, 71u8, - 0u8, 77u8, 77u8, 0u8, 71u8, 82u8, 0u8, 85u8, 83u8, 0u8, 48u8, 48u8, 49u8, - 69u8, 83u8, 0u8, 73u8, 78u8, 0u8, 85u8, 83u8, 0u8, 69u8, 69u8, 0u8, 73u8, - 84u8, 0u8, 69u8, 83u8, 0u8, 67u8, 77u8, 0u8, 69u8, 83u8, 0u8, 73u8, 82u8, - 0u8, 71u8, 81u8, 0u8, 83u8, 78u8, 0u8, 77u8, 76u8, 0u8, 70u8, 73u8, 0u8, - 83u8, 68u8, 0u8, 80u8, 72u8, 0u8, 83u8, 69u8, 0u8, 70u8, 74u8, 0u8, 70u8, - 79u8, 0u8, 66u8, 74u8, 0u8, 70u8, 82u8, 0u8, 85u8, 83u8, 0u8, 70u8, 82u8, - 0u8, 68u8, 69u8, 0u8, 68u8, 69u8, 0u8, 67u8, 77u8, 0u8, 87u8, 70u8, 0u8, - 71u8, 78u8, 0u8, 78u8, 69u8, 0u8, 73u8, 84u8, 0u8, 78u8, 71u8, 0u8, 83u8, - 68u8, 0u8, 78u8, 76u8, 0u8, 73u8, 69u8, 0u8, 71u8, 72u8, 0u8, 77u8, 68u8, - 0u8, 67u8, 78u8, 0u8, 73u8, 68u8, 0u8, 73u8, 78u8, 0u8, 73u8, 82u8, 0u8, - 71u8, 70u8, 0u8, 71u8, 66u8, 0u8, 69u8, 84u8, 0u8, 78u8, 80u8, 0u8, 75u8, - 73u8, 0u8, 80u8, 75u8, 0u8, 80u8, 75u8, 0u8, 69u8, 83u8, 0u8, 73u8, 82u8, - 0u8, 80u8, 89u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, - 78u8, 76u8, 0u8, 85u8, 65u8, 0u8, 67u8, 89u8, 0u8, 73u8, 78u8, 0u8, 67u8, - 72u8, 0u8, 73u8, 78u8, 0u8, 66u8, 82u8, 0u8, 67u8, 79u8, 0u8, 71u8, 72u8, - 0u8, 75u8, 69u8, 0u8, 73u8, 77u8, 0u8, 78u8, 80u8, 0u8, 67u8, 65u8, 0u8, - 78u8, 71u8, 0u8, 67u8, 78u8, 0u8, 85u8, 83u8, 0u8, 65u8, 70u8, 0u8, 73u8, - 76u8, 0u8, 73u8, 78u8, 0u8, 70u8, 74u8, 0u8, 80u8, 72u8, 0u8, 84u8, 82u8, - 0u8, 67u8, 78u8, 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 85u8, 83u8, 0u8, - 80u8, 72u8, 0u8, 80u8, 75u8, 0u8, 80u8, 71u8, 0u8, 73u8, 78u8, 0u8, 73u8, - 78u8, 0u8, 72u8, 82u8, 0u8, 68u8, 69u8, 0u8, 67u8, 78u8, 0u8, 72u8, 84u8, - 0u8, 72u8, 85u8, 0u8, 67u8, 65u8, 0u8, 65u8, 77u8, 0u8, 78u8, 65u8, 0u8, - 48u8, 48u8, 49u8, 77u8, 89u8, 0u8, 78u8, 71u8, 0u8, 73u8, 68u8, 0u8, 84u8, - 71u8, 0u8, 78u8, 71u8, 0u8, 67u8, 78u8, 0u8, 85u8, 83u8, 0u8, 80u8, 72u8, - 0u8, 73u8, 68u8, 0u8, 82u8, 85u8, 0u8, 48u8, 48u8, 49u8, 73u8, 83u8, 0u8, - 73u8, 84u8, 0u8, 67u8, 65u8, 0u8, 73u8, 76u8, 0u8, 82u8, 85u8, 0u8, 74u8, - 80u8, 0u8, 74u8, 77u8, 0u8, 48u8, 48u8, 49u8, 67u8, 77u8, 0u8, 85u8, 65u8, - 0u8, 84u8, 90u8, 0u8, 78u8, 80u8, 0u8, 68u8, 75u8, 0u8, 73u8, 68u8, 0u8, - 73u8, 68u8, 0u8, 71u8, 69u8, 0u8, 85u8, 90u8, 0u8, 68u8, 90u8, 0u8, 77u8, - 77u8, 0u8, 78u8, 71u8, 0u8, 75u8, 69u8, 0u8, 77u8, 76u8, 0u8, 73u8, 68u8, - 0u8, 82u8, 85u8, 0u8, 78u8, 69u8, 0u8, 78u8, 71u8, 0u8, 90u8, 87u8, 0u8, - 84u8, 90u8, 0u8, 84u8, 71u8, 0u8, 84u8, 72u8, 0u8, 67u8, 86u8, 0u8, 67u8, - 77u8, 0u8, 67u8, 73u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 67u8, 68u8, - 0u8, 73u8, 68u8, 0u8, 66u8, 82u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, - 73u8, 78u8, 0u8, 77u8, 76u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 75u8, - 69u8, 0u8, 84u8, 82u8, 0u8, 78u8, 65u8, 0u8, 76u8, 65u8, 0u8, 75u8, 90u8, - 0u8, 67u8, 77u8, 0u8, 71u8, 76u8, 0u8, 75u8, 69u8, 0u8, 75u8, 72u8, 0u8, - 65u8, 79u8, 0u8, 73u8, 78u8, 0u8, 71u8, 87u8, 0u8, 75u8, 82u8, 0u8, 82u8, - 85u8, 0u8, 73u8, 78u8, 0u8, 70u8, 77u8, 0u8, 76u8, 82u8, 0u8, 82u8, 85u8, - 0u8, 83u8, 76u8, 0u8, 80u8, 72u8, 0u8, 82u8, 85u8, 0u8, 73u8, 78u8, 0u8, - 73u8, 78u8, 0u8, 84u8, 90u8, 0u8, 67u8, 77u8, 0u8, 68u8, 69u8, 0u8, 77u8, - 89u8, 0u8, 84u8, 82u8, 0u8, 82u8, 85u8, 0u8, 82u8, 85u8, 0u8, 73u8, 68u8, - 0u8, 80u8, 75u8, 0u8, 71u8, 66u8, 0u8, 67u8, 65u8, 0u8, 73u8, 78u8, 0u8, - 84u8, 72u8, 0u8, 80u8, 75u8, 0u8, 75u8, 71u8, 0u8, 77u8, 89u8, 0u8, 77u8, - 89u8, 0u8, 86u8, 65u8, 0u8, 71u8, 82u8, 0u8, 73u8, 76u8, 0u8, 84u8, 90u8, - 0u8, 80u8, 75u8, 0u8, 85u8, 71u8, 0u8, 76u8, 85u8, 0u8, 82u8, 85u8, 0u8, - 73u8, 68u8, 0u8, 67u8, 78u8, 0u8, 73u8, 78u8, 0u8, 82u8, 85u8, 0u8, 85u8, - 71u8, 0u8, 78u8, 76u8, 0u8, 78u8, 80u8, 0u8, 73u8, 84u8, 0u8, 67u8, 65u8, - 0u8, 67u8, 78u8, 0u8, 73u8, 68u8, 0u8, 73u8, 82u8, 0u8, 85u8, 83u8, 0u8, - 73u8, 78u8, 0u8, 73u8, 84u8, 0u8, 67u8, 68u8, 0u8, 76u8, 65u8, 0u8, 67u8, - 68u8, 0u8, 90u8, 77u8, 0u8, 73u8, 82u8, 0u8, 76u8, 84u8, 0u8, 76u8, 86u8, - 0u8, 67u8, 68u8, 0u8, 67u8, 68u8, 0u8, 75u8, 69u8, 0u8, 75u8, 69u8, 0u8, - 73u8, 82u8, 0u8, 76u8, 86u8, 0u8, 84u8, 72u8, 0u8, 67u8, 78u8, 0u8, 84u8, - 82u8, 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, - 0u8, 73u8, 68u8, 0u8, 71u8, 77u8, 0u8, 75u8, 69u8, 0u8, 77u8, 88u8, 0u8, - 82u8, 85u8, 0u8, 80u8, 72u8, 0u8, 73u8, 68u8, 0u8, 83u8, 76u8, 0u8, 75u8, - 69u8, 0u8, 84u8, 72u8, 0u8, 77u8, 85u8, 0u8, 77u8, 71u8, 0u8, 77u8, 90u8, - 0u8, 67u8, 77u8, 0u8, 78u8, 80u8, 0u8, 84u8, 90u8, 0u8, 77u8, 72u8, 0u8, - 78u8, 90u8, 0u8, 67u8, 65u8, 0u8, 73u8, 68u8, 0u8, 77u8, 75u8, 0u8, 73u8, - 78u8, 0u8, 83u8, 68u8, 0u8, 77u8, 78u8, 0u8, 73u8, 78u8, 0u8, 77u8, 77u8, - 0u8, 82u8, 79u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 66u8, 70u8, 0u8, - 73u8, 78u8, 0u8, 78u8, 80u8, 0u8, 82u8, 85u8, 0u8, 66u8, 68u8, 0u8, 77u8, - 89u8, 0u8, 77u8, 84u8, 0u8, 73u8, 78u8, 0u8, 67u8, 77u8, 0u8, 85u8, 83u8, - 0u8, 80u8, 75u8, 0u8, 77u8, 76u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, - 85u8, 83u8, 0u8, 90u8, 87u8, 0u8, 77u8, 77u8, 0u8, 82u8, 85u8, 0u8, 85u8, - 71u8, 0u8, 73u8, 82u8, 0u8, 73u8, 82u8, 0u8, 78u8, 82u8, 0u8, 67u8, 78u8, - 0u8, 73u8, 84u8, 0u8, 78u8, 65u8, 0u8, 78u8, 79u8, 0u8, 77u8, 88u8, 0u8, - 90u8, 87u8, 0u8, 77u8, 90u8, 0u8, 68u8, 69u8, 0u8, 78u8, 80u8, 0u8, 78u8, - 80u8, 0u8, 78u8, 65u8, 0u8, 77u8, 90u8, 0u8, 77u8, 88u8, 0u8, 77u8, 88u8, - 0u8, 73u8, 68u8, 0u8, 78u8, 85u8, 0u8, 73u8, 78u8, 0u8, 78u8, 76u8, 0u8, - 67u8, 77u8, 0u8, 78u8, 79u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, 78u8, - 79u8, 0u8, 84u8, 72u8, 0u8, 73u8, 78u8, 0u8, 83u8, 69u8, 0u8, 71u8, 78u8, - 0u8, 90u8, 65u8, 0u8, 67u8, 65u8, 0u8, 90u8, 65u8, 0u8, 73u8, 78u8, 0u8, - 83u8, 83u8, 0u8, 85u8, 83u8, 0u8, 67u8, 78u8, 0u8, 77u8, 87u8, 0u8, 84u8, - 90u8, 0u8, 85u8, 71u8, 0u8, 71u8, 72u8, 0u8, 70u8, 82u8, 0u8, 67u8, 65u8, - 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 69u8, 84u8, 0u8, 73u8, 78u8, 0u8, - 71u8, 69u8, 0u8, 85u8, 83u8, 0u8, 77u8, 78u8, 0u8, 49u8, 52u8, 51u8, 73u8, - 78u8, 0u8, 80u8, 72u8, 0u8, 73u8, 82u8, 0u8, 80u8, 72u8, 0u8, 65u8, 87u8, - 0u8, 80u8, 87u8, 0u8, 70u8, 82u8, 0u8, 78u8, 71u8, 0u8, 85u8, 83u8, 0u8, - 67u8, 65u8, 0u8, 73u8, 82u8, 0u8, 68u8, 69u8, 0u8, 76u8, 66u8, 0u8, 83u8, - 66u8, 0u8, 73u8, 78u8, 0u8, 75u8, 69u8, 0u8, 80u8, 76u8, 0u8, 73u8, 84u8, - 0u8, 71u8, 82u8, 0u8, 70u8, 77u8, 0u8, 73u8, 78u8, 0u8, 67u8, 65u8, 0u8, - 80u8, 75u8, 0u8, 73u8, 82u8, 0u8, 48u8, 48u8, 49u8, 65u8, 70u8, 0u8, 66u8, - 82u8, 0u8, 71u8, 65u8, 0u8, 80u8, 69u8, 0u8, 71u8, 84u8, 0u8, 69u8, 67u8, - 0u8, 73u8, 78u8, 0u8, 82u8, 69u8, 0u8, 73u8, 68u8, 0u8, 73u8, 84u8, 0u8, - 77u8, 77u8, 0u8, 73u8, 78u8, 0u8, 77u8, 65u8, 0u8, 78u8, 80u8, 0u8, 66u8, - 68u8, 0u8, 67u8, 72u8, 0u8, 70u8, 73u8, 0u8, 67u8, 72u8, 0u8, 73u8, 82u8, - 0u8, 83u8, 69u8, 0u8, 66u8, 73u8, 0u8, 77u8, 90u8, 0u8, 82u8, 79u8, 0u8, - 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 70u8, 74u8, 0u8, 82u8, 85u8, 0u8, 85u8, - 65u8, 0u8, 83u8, 66u8, 0u8, 82u8, 87u8, 0u8, 84u8, 90u8, 0u8, 74u8, 80u8, - 0u8, 73u8, 78u8, 0u8, 71u8, 72u8, 0u8, 82u8, 85u8, 0u8, 75u8, 69u8, 0u8, - 73u8, 68u8, 0u8, 73u8, 78u8, 0u8, 83u8, 78u8, 0u8, 73u8, 78u8, 0u8, 84u8, - 90u8, 0u8, 73u8, 84u8, 0u8, 73u8, 78u8, 0u8, 73u8, 84u8, 0u8, 71u8, 66u8, - 0u8, 80u8, 75u8, 0u8, 73u8, 84u8, 0u8, 73u8, 82u8, 0u8, 78u8, 79u8, 0u8, - 67u8, 73u8, 0u8, 77u8, 90u8, 0u8, 77u8, 88u8, 0u8, 77u8, 76u8, 0u8, 67u8, - 70u8, 0u8, 73u8, 69u8, 0u8, 76u8, 84u8, 0u8, 77u8, 65u8, 0u8, 77u8, 77u8, - 0u8, 76u8, 75u8, 0u8, 69u8, 84u8, 0u8, 83u8, 75u8, 0u8, 80u8, 75u8, 0u8, - 83u8, 73u8, 0u8, 80u8, 76u8, 0u8, 73u8, 68u8, 0u8, 87u8, 83u8, 0u8, 83u8, - 69u8, 0u8, 65u8, 79u8, 0u8, 83u8, 69u8, 0u8, 70u8, 73u8, 0u8, 73u8, 76u8, - 0u8, 70u8, 73u8, 0u8, 90u8, 87u8, 0u8, 77u8, 89u8, 0u8, 77u8, 76u8, 0u8, - 83u8, 79u8, 0u8, 85u8, 90u8, 0u8, 84u8, 72u8, 0u8, 65u8, 76u8, 0u8, 82u8, - 83u8, 0u8, 73u8, 78u8, 0u8, 83u8, 82u8, 0u8, 83u8, 78u8, 0u8, 73u8, 78u8, - 0u8, 90u8, 65u8, 0u8, 69u8, 82u8, 0u8, 90u8, 65u8, 0u8, 68u8, 69u8, 0u8, - 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 71u8, 78u8, 0u8, 83u8, 69u8, 0u8, 84u8, - 90u8, 0u8, 89u8, 84u8, 0u8, 67u8, 68u8, 0u8, 68u8, 69u8, 0u8, 73u8, 78u8, - 0u8, 73u8, 68u8, 0u8, 66u8, 68u8, 0u8, 73u8, 81u8, 0u8, 80u8, 76u8, 0u8, - 73u8, 78u8, 0u8, 78u8, 80u8, 0u8, 80u8, 72u8, 0u8, 73u8, 78u8, 0u8, 67u8, - 78u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 77u8, 89u8, 0u8, 73u8, 78u8, - 0u8, 83u8, 76u8, 0u8, 85u8, 71u8, 0u8, 84u8, 76u8, 0u8, 84u8, 74u8, 0u8, - 84u8, 72u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 69u8, - 84u8, 0u8, 69u8, 82u8, 0u8, 78u8, 71u8, 0u8, 84u8, 77u8, 0u8, 84u8, 75u8, - 0u8, 65u8, 90u8, 0u8, 78u8, 80u8, 0u8, 80u8, 72u8, 0u8, 65u8, 90u8, 0u8, - 78u8, 69u8, 0u8, 90u8, 65u8, 0u8, 84u8, 79u8, 0u8, 77u8, 87u8, 0u8, 48u8, - 48u8, 49u8, 80u8, 71u8, 0u8, 84u8, 82u8, 0u8, 84u8, 82u8, 0u8, 84u8, 87u8, - 0u8, 80u8, 75u8, 0u8, 90u8, 65u8, 0u8, 71u8, 82u8, 0u8, 78u8, 80u8, 0u8, - 80u8, 72u8, 0u8, 66u8, 84u8, 0u8, 82u8, 85u8, 0u8, 85u8, 71u8, 0u8, 84u8, - 72u8, 0u8, 65u8, 90u8, 0u8, 77u8, 87u8, 0u8, 84u8, 86u8, 0u8, 78u8, 69u8, - 0u8, 67u8, 78u8, 0u8, 73u8, 78u8, 0u8, 80u8, 70u8, 0u8, 82u8, 85u8, 0u8, - 77u8, 65u8, 0u8, 82u8, 85u8, 0u8, 82u8, 85u8, 0u8, 67u8, 78u8, 0u8, 83u8, - 89u8, 0u8, 85u8, 65u8, 0u8, 70u8, 77u8, 0u8, 65u8, 79u8, 0u8, 73u8, 78u8, - 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 85u8, 90u8, 0u8, 76u8, 82u8, 0u8, - 90u8, 65u8, 0u8, 73u8, 84u8, 0u8, 82u8, 85u8, 0u8, 86u8, 78u8, 0u8, 83u8, - 88u8, 0u8, 66u8, 69u8, 0u8, 68u8, 69u8, 0u8, 77u8, 90u8, 0u8, 48u8, 48u8, - 49u8, 82u8, 85u8, 0u8, 69u8, 69u8, 0u8, 84u8, 90u8, 0u8, 66u8, 69u8, 0u8, - 67u8, 72u8, 0u8, 69u8, 84u8, 0u8, 80u8, 72u8, 0u8, 65u8, 85u8, 0u8, 73u8, - 78u8, 0u8, 73u8, 78u8, 0u8, 87u8, 70u8, 0u8, 75u8, 77u8, 0u8, 83u8, 78u8, - 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, 66u8, 82u8, 0u8, - 85u8, 90u8, 0u8, 84u8, 82u8, 0u8, 90u8, 65u8, 0u8, 84u8, 82u8, 0u8, 84u8, - 82u8, 0u8, 71u8, 69u8, 0u8, 67u8, 78u8, 0u8, 83u8, 68u8, 0u8, 83u8, 65u8, - 0u8, 73u8, 78u8, 0u8, 85u8, 71u8, 0u8, 73u8, 82u8, 0u8, 89u8, 69u8, 0u8, - 78u8, 80u8, 0u8, 77u8, 90u8, 0u8, 70u8, 77u8, 0u8, 67u8, 77u8, 0u8, 67u8, - 77u8, 0u8, 48u8, 48u8, 49u8, 78u8, 71u8, 0u8, 66u8, 82u8, 0u8, 77u8, 88u8, - 0u8, 72u8, 75u8, 0u8, 67u8, 78u8, 0u8, 83u8, 68u8, 0u8, 75u8, 77u8, 0u8, - 78u8, 76u8, 0u8, 77u8, 65u8, 0u8, 67u8, 78u8, 0u8, 67u8, 78u8, 0u8, 67u8, - 78u8, 0u8, 84u8, 71u8, 0u8, 77u8, 89u8, 0u8, 90u8, 65u8, 0u8, 84u8, 82u8, - 0u8, - ]) - }, - ) - }, - ls2r: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap2d::from_parts_unchecked( - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 97u8, 114u8, 99u8, 97u8, 122u8, 0u8, 99u8, 117u8, 0u8, 101u8, 110u8, 0u8, - 102u8, 102u8, 0u8, 103u8, 114u8, 99u8, 107u8, 107u8, 0u8, 107u8, 117u8, - 0u8, 107u8, 121u8, 0u8, 108u8, 105u8, 102u8, 109u8, 97u8, 110u8, 109u8, - 110u8, 0u8, 112u8, 97u8, 0u8, 112u8, 97u8, 108u8, 115u8, 100u8, 0u8, 116u8, - 103u8, 0u8, 117u8, 103u8, 0u8, 117u8, 110u8, 114u8, 117u8, 122u8, 0u8, - 121u8, 117u8, 101u8, 122u8, 104u8, 0u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 2u8, 0u8, 0u8, 0u8, 3u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 5u8, 0u8, 0u8, - 0u8, 6u8, 0u8, 0u8, 0u8, 7u8, 0u8, 0u8, 0u8, 8u8, 0u8, 0u8, 0u8, 10u8, 0u8, - 0u8, 0u8, 12u8, 0u8, 0u8, 0u8, 13u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, - 15u8, 0u8, 0u8, 0u8, 16u8, 0u8, 0u8, 0u8, 17u8, 0u8, 0u8, 0u8, 20u8, 0u8, - 0u8, 0u8, 21u8, 0u8, 0u8, 0u8, 22u8, 0u8, 0u8, 0u8, 23u8, 0u8, 0u8, 0u8, - 24u8, 0u8, 0u8, 0u8, 25u8, 0u8, 0u8, 0u8, 28u8, 0u8, 0u8, 0u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 78u8, 98u8, 97u8, 116u8, 80u8, 97u8, 108u8, 109u8, 65u8, 114u8, 97u8, 98u8, - 71u8, 108u8, 97u8, 103u8, 83u8, 104u8, 97u8, 119u8, 65u8, 100u8, 108u8, - 109u8, 76u8, 105u8, 110u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, - 97u8, 98u8, 89u8, 101u8, 122u8, 105u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, - 116u8, 110u8, 76u8, 105u8, 109u8, 98u8, 78u8, 107u8, 111u8, 111u8, 77u8, - 111u8, 110u8, 103u8, 65u8, 114u8, 97u8, 98u8, 80u8, 104u8, 108u8, 112u8, - 68u8, 101u8, 118u8, 97u8, 75u8, 104u8, 111u8, 106u8, 83u8, 105u8, 110u8, - 100u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, - 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, 115u8, 66u8, - 111u8, 112u8, 111u8, 72u8, 97u8, 110u8, 98u8, 72u8, 97u8, 110u8, 116u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 74u8, 79u8, 0u8, 83u8, 89u8, 0u8, 73u8, 82u8, 0u8, 66u8, 71u8, 0u8, 71u8, - 66u8, 0u8, 71u8, 78u8, 0u8, 71u8, 82u8, 0u8, 67u8, 78u8, 0u8, 73u8, 81u8, - 0u8, 71u8, 69u8, 0u8, 67u8, 78u8, 0u8, 84u8, 82u8, 0u8, 73u8, 78u8, 0u8, - 71u8, 78u8, 0u8, 67u8, 78u8, 0u8, 80u8, 75u8, 0u8, 67u8, 78u8, 0u8, 73u8, - 78u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 75u8, 90u8, - 0u8, 78u8, 80u8, 0u8, 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 84u8, 87u8, 0u8, - 84u8, 87u8, 0u8, 84u8, 87u8, 0u8, - ]) - }, - ) - }, - }; diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs new file mode 100644 index 0000000000000..57f7496dcff8b --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/mod.rs @@ -0,0 +1,6 @@ +// @generated +type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackLikelySubtagsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; +pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { + locale.is_empty().then(|| &UND) +} +static UND: DataStruct = include!("und.rs.data"); diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data new file mode 100644 index 0000000000000..4fd177834e9c4 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/fallback/likelysubtags_v1/und.rs.data @@ -0,0 +1,728 @@ +::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1 { + l2s: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap::from_parts_unchecked( + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 97u8, 98u8, 0u8, 97u8, 98u8, 113u8, 97u8, 100u8, 112u8, 97u8, 100u8, 121u8, + 97u8, 101u8, 0u8, 97u8, 101u8, 98u8, 97u8, 104u8, 111u8, 97u8, 106u8, + 116u8, 97u8, 107u8, 107u8, 97u8, 108u8, 116u8, 97u8, 109u8, 0u8, 97u8, + 112u8, 99u8, 97u8, 112u8, 100u8, 97u8, 114u8, 0u8, 97u8, 114u8, 99u8, 97u8, + 114u8, 113u8, 97u8, 114u8, 115u8, 97u8, 114u8, 121u8, 97u8, 114u8, 122u8, + 97u8, 115u8, 0u8, 97u8, 115u8, 101u8, 97u8, 118u8, 0u8, 97u8, 118u8, 108u8, + 97u8, 119u8, 97u8, 98u8, 97u8, 0u8, 98u8, 97u8, 108u8, 98u8, 97u8, 112u8, + 98u8, 97u8, 120u8, 98u8, 99u8, 113u8, 98u8, 101u8, 0u8, 98u8, 101u8, 106u8, + 98u8, 102u8, 113u8, 98u8, 102u8, 116u8, 98u8, 102u8, 121u8, 98u8, 103u8, + 0u8, 98u8, 103u8, 99u8, 98u8, 103u8, 110u8, 98u8, 103u8, 120u8, 98u8, + 104u8, 98u8, 98u8, 104u8, 105u8, 98u8, 104u8, 111u8, 98u8, 106u8, 105u8, + 98u8, 106u8, 106u8, 98u8, 108u8, 116u8, 98u8, 110u8, 0u8, 98u8, 111u8, 0u8, + 98u8, 112u8, 121u8, 98u8, 113u8, 105u8, 98u8, 114u8, 97u8, 98u8, 114u8, + 104u8, 98u8, 114u8, 120u8, 98u8, 115u8, 113u8, 98u8, 115u8, 116u8, 98u8, + 116u8, 118u8, 98u8, 117u8, 97u8, 98u8, 121u8, 110u8, 99u8, 99u8, 112u8, + 99u8, 101u8, 0u8, 99u8, 104u8, 109u8, 99u8, 104u8, 114u8, 99u8, 106u8, + 97u8, 99u8, 106u8, 109u8, 99u8, 107u8, 98u8, 99u8, 109u8, 103u8, 99u8, + 111u8, 112u8, 99u8, 114u8, 0u8, 99u8, 114u8, 104u8, 99u8, 114u8, 107u8, + 99u8, 114u8, 108u8, 99u8, 115u8, 119u8, 99u8, 116u8, 100u8, 99u8, 117u8, + 0u8, 99u8, 118u8, 0u8, 100u8, 97u8, 114u8, 100u8, 99u8, 99u8, 100u8, 103u8, + 108u8, 100u8, 109u8, 102u8, 100u8, 111u8, 105u8, 100u8, 114u8, 104u8, + 100u8, 114u8, 115u8, 100u8, 116u8, 121u8, 100u8, 118u8, 0u8, 100u8, 122u8, + 0u8, 101u8, 103u8, 121u8, 101u8, 107u8, 121u8, 101u8, 108u8, 0u8, 101u8, + 115u8, 103u8, 101u8, 116u8, 116u8, 102u8, 97u8, 0u8, 102u8, 105u8, 97u8, + 102u8, 117u8, 98u8, 103u8, 97u8, 110u8, 103u8, 98u8, 109u8, 103u8, 98u8, + 122u8, 103u8, 101u8, 122u8, 103u8, 103u8, 110u8, 103u8, 106u8, 107u8, + 103u8, 106u8, 117u8, 103u8, 108u8, 107u8, 103u8, 109u8, 118u8, 103u8, + 111u8, 102u8, 103u8, 111u8, 109u8, 103u8, 111u8, 110u8, 103u8, 111u8, + 116u8, 103u8, 114u8, 99u8, 103u8, 114u8, 116u8, 103u8, 117u8, 0u8, 103u8, + 118u8, 114u8, 103u8, 119u8, 99u8, 103u8, 119u8, 116u8, 104u8, 97u8, 107u8, + 104u8, 97u8, 122u8, 104u8, 100u8, 121u8, 104u8, 101u8, 0u8, 104u8, 105u8, + 0u8, 104u8, 108u8, 117u8, 104u8, 109u8, 100u8, 104u8, 110u8, 100u8, 104u8, + 110u8, 101u8, 104u8, 110u8, 106u8, 104u8, 110u8, 111u8, 104u8, 111u8, 99u8, + 104u8, 111u8, 106u8, 104u8, 115u8, 110u8, 104u8, 121u8, 0u8, 105u8, 105u8, + 0u8, 105u8, 110u8, 104u8, 105u8, 117u8, 0u8, 105u8, 119u8, 0u8, 106u8, + 97u8, 0u8, 106u8, 105u8, 0u8, 106u8, 109u8, 108u8, 107u8, 97u8, 0u8, 107u8, + 97u8, 97u8, 107u8, 97u8, 119u8, 107u8, 98u8, 100u8, 107u8, 98u8, 121u8, + 107u8, 100u8, 116u8, 107u8, 102u8, 114u8, 107u8, 102u8, 121u8, 107u8, + 104u8, 98u8, 107u8, 104u8, 110u8, 107u8, 104u8, 116u8, 107u8, 104u8, 119u8, + 107u8, 106u8, 103u8, 107u8, 107u8, 0u8, 107u8, 109u8, 0u8, 107u8, 110u8, + 0u8, 107u8, 111u8, 0u8, 107u8, 111u8, 105u8, 107u8, 111u8, 107u8, 107u8, + 113u8, 121u8, 107u8, 114u8, 99u8, 107u8, 114u8, 117u8, 107u8, 115u8, 0u8, + 107u8, 116u8, 98u8, 107u8, 117u8, 109u8, 107u8, 118u8, 0u8, 107u8, 118u8, + 120u8, 107u8, 120u8, 99u8, 107u8, 120u8, 108u8, 107u8, 120u8, 109u8, 107u8, + 120u8, 112u8, 107u8, 121u8, 0u8, 107u8, 122u8, 104u8, 108u8, 97u8, 98u8, + 108u8, 97u8, 100u8, 108u8, 97u8, 104u8, 108u8, 98u8, 101u8, 108u8, 99u8, + 112u8, 108u8, 101u8, 112u8, 108u8, 101u8, 122u8, 108u8, 105u8, 102u8, + 108u8, 105u8, 115u8, 108u8, 107u8, 105u8, 108u8, 109u8, 110u8, 108u8, + 111u8, 0u8, 108u8, 114u8, 99u8, 108u8, 117u8, 122u8, 108u8, 119u8, 108u8, + 108u8, 122u8, 104u8, 109u8, 97u8, 103u8, 109u8, 97u8, 105u8, 109u8, 100u8, + 101u8, 109u8, 100u8, 102u8, 109u8, 100u8, 120u8, 109u8, 102u8, 97u8, 109u8, + 103u8, 112u8, 109u8, 107u8, 0u8, 109u8, 107u8, 105u8, 109u8, 108u8, 0u8, + 109u8, 110u8, 0u8, 109u8, 110u8, 105u8, 109u8, 110u8, 119u8, 109u8, 114u8, + 0u8, 109u8, 114u8, 100u8, 109u8, 114u8, 106u8, 109u8, 114u8, 111u8, 109u8, + 116u8, 114u8, 109u8, 118u8, 121u8, 109u8, 119u8, 114u8, 109u8, 119u8, + 119u8, 109u8, 121u8, 0u8, 109u8, 121u8, 109u8, 109u8, 121u8, 118u8, 109u8, + 121u8, 122u8, 109u8, 122u8, 110u8, 110u8, 97u8, 110u8, 110u8, 101u8, 0u8, + 110u8, 101u8, 119u8, 110u8, 110u8, 112u8, 110u8, 111u8, 100u8, 110u8, + 111u8, 101u8, 110u8, 111u8, 110u8, 110u8, 113u8, 111u8, 110u8, 115u8, + 107u8, 110u8, 115u8, 116u8, 111u8, 106u8, 0u8, 111u8, 106u8, 115u8, 111u8, + 114u8, 0u8, 111u8, 114u8, 117u8, 111u8, 115u8, 0u8, 111u8, 115u8, 97u8, + 111u8, 116u8, 97u8, 111u8, 116u8, 107u8, 111u8, 117u8, 105u8, 112u8, 97u8, + 0u8, 112u8, 97u8, 108u8, 112u8, 101u8, 111u8, 112u8, 104u8, 108u8, 112u8, + 104u8, 110u8, 112u8, 107u8, 97u8, 112u8, 110u8, 116u8, 112u8, 112u8, 97u8, + 112u8, 114u8, 97u8, 112u8, 114u8, 100u8, 112u8, 115u8, 0u8, 114u8, 97u8, + 106u8, 114u8, 104u8, 103u8, 114u8, 105u8, 102u8, 114u8, 106u8, 115u8, + 114u8, 107u8, 116u8, 114u8, 109u8, 116u8, 114u8, 117u8, 0u8, 114u8, 117u8, + 101u8, 114u8, 121u8, 117u8, 115u8, 97u8, 0u8, 115u8, 97u8, 104u8, 115u8, + 97u8, 116u8, 115u8, 97u8, 122u8, 115u8, 99u8, 107u8, 115u8, 99u8, 108u8, + 115u8, 100u8, 0u8, 115u8, 100u8, 104u8, 115u8, 103u8, 97u8, 115u8, 103u8, + 119u8, 115u8, 104u8, 105u8, 115u8, 104u8, 110u8, 115u8, 104u8, 117u8, + 115u8, 105u8, 0u8, 115u8, 107u8, 114u8, 115u8, 109u8, 112u8, 115u8, 111u8, + 103u8, 115u8, 111u8, 117u8, 115u8, 114u8, 0u8, 115u8, 114u8, 98u8, 115u8, + 114u8, 120u8, 115u8, 119u8, 98u8, 115u8, 119u8, 118u8, 115u8, 121u8, 108u8, + 115u8, 121u8, 114u8, 116u8, 97u8, 0u8, 116u8, 97u8, 106u8, 116u8, 99u8, + 121u8, 116u8, 100u8, 100u8, 116u8, 100u8, 103u8, 116u8, 100u8, 104u8, + 116u8, 101u8, 0u8, 116u8, 103u8, 0u8, 116u8, 104u8, 0u8, 116u8, 104u8, + 108u8, 116u8, 104u8, 113u8, 116u8, 104u8, 114u8, 116u8, 105u8, 0u8, 116u8, + 105u8, 103u8, 116u8, 107u8, 116u8, 116u8, 114u8, 119u8, 116u8, 115u8, + 100u8, 116u8, 115u8, 102u8, 116u8, 115u8, 106u8, 116u8, 116u8, 0u8, 116u8, + 116u8, 115u8, 116u8, 120u8, 103u8, 116u8, 120u8, 111u8, 116u8, 121u8, + 118u8, 117u8, 100u8, 105u8, 117u8, 100u8, 109u8, 117u8, 103u8, 0u8, 117u8, + 103u8, 97u8, 117u8, 107u8, 0u8, 117u8, 110u8, 114u8, 117u8, 110u8, 120u8, + 117u8, 114u8, 0u8, 118u8, 97u8, 105u8, 119u8, 97u8, 108u8, 119u8, 98u8, + 113u8, 119u8, 98u8, 114u8, 119u8, 110u8, 105u8, 119u8, 115u8, 103u8, 119u8, + 116u8, 109u8, 119u8, 117u8, 117u8, 120u8, 99u8, 111u8, 120u8, 99u8, 114u8, + 120u8, 108u8, 99u8, 120u8, 108u8, 100u8, 120u8, 109u8, 102u8, 120u8, 109u8, + 110u8, 120u8, 109u8, 114u8, 120u8, 110u8, 97u8, 120u8, 110u8, 114u8, 120u8, + 112u8, 114u8, 120u8, 115u8, 97u8, 120u8, 115u8, 114u8, 121u8, 105u8, 0u8, + 121u8, 117u8, 101u8, 122u8, 100u8, 106u8, 122u8, 103u8, 104u8, 122u8, + 104u8, 0u8, 122u8, 104u8, 120u8, 122u8, 107u8, 116u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 84u8, 105u8, 98u8, + 116u8, 67u8, 121u8, 114u8, 108u8, 65u8, 118u8, 115u8, 116u8, 65u8, 114u8, + 97u8, 98u8, 65u8, 104u8, 111u8, 109u8, 65u8, 114u8, 97u8, 98u8, 88u8, + 115u8, 117u8, 120u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, 105u8, + 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, + 65u8, 114u8, 109u8, 105u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, + 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 66u8, 101u8, 110u8, + 103u8, 83u8, 103u8, 110u8, 119u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, + 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 65u8, + 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 66u8, 97u8, 109u8, 117u8, + 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, + 98u8, 84u8, 97u8, 109u8, 108u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, + 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, 118u8, 97u8, 65u8, + 114u8, 97u8, 98u8, 71u8, 114u8, 101u8, 107u8, 68u8, 101u8, 118u8, 97u8, + 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 69u8, 116u8, 104u8, + 105u8, 68u8, 101u8, 118u8, 97u8, 84u8, 97u8, 118u8, 116u8, 66u8, 101u8, + 110u8, 103u8, 84u8, 105u8, 98u8, 116u8, 66u8, 101u8, 110u8, 103u8, 65u8, + 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 68u8, + 101u8, 118u8, 97u8, 66u8, 97u8, 115u8, 115u8, 69u8, 116u8, 104u8, 105u8, + 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, + 105u8, 67u8, 97u8, 107u8, 109u8, 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, + 114u8, 108u8, 67u8, 104u8, 101u8, 114u8, 65u8, 114u8, 97u8, 98u8, 67u8, + 104u8, 97u8, 109u8, 65u8, 114u8, 97u8, 98u8, 83u8, 111u8, 121u8, 111u8, + 67u8, 111u8, 112u8, 116u8, 67u8, 97u8, 110u8, 115u8, 67u8, 121u8, 114u8, + 108u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, + 110u8, 115u8, 80u8, 97u8, 117u8, 99u8, 67u8, 121u8, 114u8, 108u8, 67u8, + 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, + 65u8, 114u8, 97u8, 98u8, 77u8, 101u8, 100u8, 102u8, 68u8, 101u8, 118u8, + 97u8, 77u8, 111u8, 110u8, 103u8, 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, + 118u8, 97u8, 84u8, 104u8, 97u8, 97u8, 84u8, 105u8, 98u8, 116u8, 69u8, + 103u8, 121u8, 112u8, 75u8, 97u8, 108u8, 105u8, 71u8, 114u8, 101u8, 107u8, + 71u8, 111u8, 110u8, 109u8, 73u8, 116u8, 97u8, 108u8, 65u8, 114u8, 97u8, + 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, + 115u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, + 104u8, 105u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 65u8, + 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, + 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, 118u8, 97u8, 84u8, 101u8, 108u8, + 117u8, 71u8, 111u8, 116u8, 104u8, 67u8, 112u8, 114u8, 116u8, 66u8, 101u8, + 110u8, 103u8, 71u8, 117u8, 106u8, 114u8, 68u8, 101u8, 118u8, 97u8, 65u8, + 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, 115u8, 65u8, + 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, 72u8, 101u8, 98u8, 114u8, + 68u8, 101u8, 118u8, 97u8, 72u8, 108u8, 117u8, 119u8, 80u8, 108u8, 114u8, + 100u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 109u8, + 110u8, 112u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 68u8, + 101u8, 118u8, 97u8, 72u8, 97u8, 110u8, 115u8, 65u8, 114u8, 109u8, 110u8, + 89u8, 105u8, 105u8, 105u8, 67u8, 121u8, 114u8, 108u8, 67u8, 97u8, 110u8, + 115u8, 72u8, 101u8, 98u8, 114u8, 74u8, 112u8, 97u8, 110u8, 72u8, 101u8, + 98u8, 114u8, 68u8, 101u8, 118u8, 97u8, 71u8, 101u8, 111u8, 114u8, 67u8, + 121u8, 114u8, 108u8, 75u8, 97u8, 119u8, 105u8, 67u8, 121u8, 114u8, 108u8, + 65u8, 114u8, 97u8, 98u8, 84u8, 104u8, 97u8, 105u8, 68u8, 101u8, 118u8, + 97u8, 68u8, 101u8, 118u8, 97u8, 84u8, 97u8, 108u8, 117u8, 68u8, 101u8, + 118u8, 97u8, 77u8, 121u8, 109u8, 114u8, 65u8, 114u8, 97u8, 98u8, 76u8, + 97u8, 111u8, 111u8, 67u8, 121u8, 114u8, 108u8, 75u8, 104u8, 109u8, 114u8, + 75u8, 110u8, 100u8, 97u8, 75u8, 111u8, 114u8, 101u8, 67u8, 121u8, 114u8, + 108u8, 68u8, 101u8, 118u8, 97u8, 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, + 114u8, 108u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 69u8, + 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, + 65u8, 114u8, 97u8, 98u8, 69u8, 116u8, 104u8, 105u8, 68u8, 101u8, 118u8, + 97u8, 84u8, 104u8, 97u8, 105u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, + 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 76u8, 105u8, 110u8, 97u8, 72u8, + 101u8, 98u8, 114u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, + 84u8, 104u8, 97u8, 105u8, 76u8, 101u8, 112u8, 99u8, 67u8, 121u8, 114u8, + 108u8, 68u8, 101u8, 118u8, 97u8, 76u8, 105u8, 115u8, 117u8, 65u8, 114u8, + 97u8, 98u8, 84u8, 101u8, 108u8, 117u8, 76u8, 97u8, 111u8, 111u8, 65u8, + 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 84u8, 104u8, 97u8, 105u8, 72u8, + 97u8, 110u8, 115u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, + 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 69u8, 116u8, 104u8, + 105u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, + 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 77u8, 108u8, 121u8, 109u8, 67u8, + 121u8, 114u8, 108u8, 66u8, 101u8, 110u8, 103u8, 77u8, 121u8, 109u8, 114u8, + 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 67u8, 121u8, 114u8, + 108u8, 77u8, 114u8, 111u8, 111u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, + 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 109u8, 110u8, 112u8, 77u8, + 121u8, 109u8, 114u8, 69u8, 116u8, 104u8, 105u8, 67u8, 121u8, 114u8, 108u8, + 77u8, 97u8, 110u8, 100u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, + 115u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 87u8, 99u8, + 104u8, 111u8, 76u8, 97u8, 110u8, 97u8, 68u8, 101u8, 118u8, 97u8, 82u8, + 117u8, 110u8, 114u8, 78u8, 107u8, 111u8, 111u8, 67u8, 97u8, 110u8, 115u8, + 84u8, 110u8, 115u8, 97u8, 67u8, 97u8, 110u8, 115u8, 67u8, 97u8, 110u8, + 115u8, 79u8, 114u8, 121u8, 97u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, + 114u8, 108u8, 79u8, 115u8, 103u8, 101u8, 65u8, 114u8, 97u8, 98u8, 79u8, + 114u8, 107u8, 104u8, 79u8, 117u8, 103u8, 114u8, 71u8, 117u8, 114u8, 117u8, + 80u8, 104u8, 108u8, 105u8, 88u8, 112u8, 101u8, 111u8, 65u8, 114u8, 97u8, + 98u8, 80u8, 104u8, 110u8, 120u8, 66u8, 114u8, 97u8, 104u8, 71u8, 114u8, + 101u8, 107u8, 68u8, 101u8, 118u8, 97u8, 75u8, 104u8, 97u8, 114u8, 65u8, + 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, 97u8, 82u8, + 111u8, 104u8, 103u8, 84u8, 102u8, 110u8, 103u8, 68u8, 101u8, 118u8, 97u8, + 66u8, 101u8, 110u8, 103u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, + 108u8, 67u8, 121u8, 114u8, 108u8, 75u8, 97u8, 110u8, 97u8, 68u8, 101u8, + 118u8, 97u8, 67u8, 121u8, 114u8, 108u8, 79u8, 108u8, 99u8, 107u8, 83u8, + 97u8, 117u8, 114u8, 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, + 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 79u8, 103u8, 97u8, 109u8, + 69u8, 116u8, 104u8, 105u8, 84u8, 102u8, 110u8, 103u8, 77u8, 121u8, 109u8, + 114u8, 65u8, 114u8, 97u8, 98u8, 83u8, 105u8, 110u8, 104u8, 65u8, 114u8, + 97u8, 98u8, 83u8, 97u8, 109u8, 114u8, 83u8, 111u8, 103u8, 100u8, 84u8, + 104u8, 97u8, 105u8, 67u8, 121u8, 114u8, 108u8, 83u8, 111u8, 114u8, 97u8, + 68u8, 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 68u8, 101u8, 118u8, + 97u8, 66u8, 101u8, 110u8, 103u8, 83u8, 121u8, 114u8, 99u8, 84u8, 97u8, + 109u8, 108u8, 68u8, 101u8, 118u8, 97u8, 75u8, 110u8, 100u8, 97u8, 84u8, + 97u8, 108u8, 101u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, + 84u8, 101u8, 108u8, 117u8, 67u8, 121u8, 114u8, 108u8, 84u8, 104u8, 97u8, + 105u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, 118u8, 97u8, 68u8, 101u8, + 118u8, 97u8, 69u8, 116u8, 104u8, 105u8, 69u8, 116u8, 104u8, 105u8, 68u8, + 101u8, 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 71u8, 114u8, 101u8, 107u8, + 68u8, 101u8, 118u8, 97u8, 84u8, 105u8, 98u8, 116u8, 67u8, 121u8, 114u8, + 108u8, 84u8, 104u8, 97u8, 105u8, 84u8, 97u8, 110u8, 103u8, 84u8, 111u8, + 116u8, 111u8, 67u8, 121u8, 114u8, 108u8, 65u8, 103u8, 104u8, 98u8, 67u8, + 121u8, 114u8, 108u8, 65u8, 114u8, 97u8, 98u8, 85u8, 103u8, 97u8, 114u8, + 67u8, 121u8, 114u8, 108u8, 66u8, 101u8, 110u8, 103u8, 66u8, 101u8, 110u8, + 103u8, 65u8, 114u8, 97u8, 98u8, 86u8, 97u8, 105u8, 105u8, 69u8, 116u8, + 104u8, 105u8, 84u8, 101u8, 108u8, 117u8, 68u8, 101u8, 118u8, 97u8, 65u8, + 114u8, 97u8, 98u8, 71u8, 111u8, 110u8, 103u8, 68u8, 101u8, 118u8, 97u8, + 72u8, 97u8, 110u8, 115u8, 67u8, 104u8, 114u8, 115u8, 67u8, 97u8, 114u8, + 105u8, 76u8, 121u8, 99u8, 105u8, 76u8, 121u8, 100u8, 105u8, 71u8, 101u8, + 111u8, 114u8, 77u8, 97u8, 110u8, 105u8, 77u8, 101u8, 114u8, 99u8, 78u8, + 97u8, 114u8, 98u8, 68u8, 101u8, 118u8, 97u8, 80u8, 114u8, 116u8, 105u8, + 83u8, 97u8, 114u8, 98u8, 68u8, 101u8, 118u8, 97u8, 72u8, 101u8, 98u8, + 114u8, 72u8, 97u8, 110u8, 116u8, 65u8, 114u8, 97u8, 98u8, 84u8, 102u8, + 110u8, 103u8, 72u8, 97u8, 110u8, 115u8, 78u8, 115u8, 104u8, 117u8, 75u8, + 105u8, 116u8, 115u8, + ]) + }, + ) + }, + lr2s: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap2d::from_parts_unchecked( + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 97u8, 122u8, 0u8, 104u8, 97u8, 0u8, 107u8, 107u8, 0u8, 107u8, 117u8, 0u8, + 107u8, 121u8, 0u8, 109u8, 97u8, 110u8, 109u8, 110u8, 0u8, 109u8, 115u8, + 0u8, 112u8, 97u8, 0u8, 114u8, 105u8, 102u8, 115u8, 100u8, 0u8, 115u8, + 114u8, 0u8, 116u8, 103u8, 0u8, 117u8, 103u8, 0u8, 117u8, 110u8, 114u8, + 117u8, 122u8, 0u8, 121u8, 117u8, 101u8, 122u8, 104u8, 0u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 3u8, 0u8, 0u8, 0u8, 5u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 10u8, 0u8, 0u8, + 0u8, 12u8, 0u8, 0u8, 0u8, 13u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 15u8, + 0u8, 0u8, 0u8, 16u8, 0u8, 0u8, 0u8, 17u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 22u8, 0u8, 0u8, 0u8, 23u8, 0u8, 0u8, 0u8, 25u8, 0u8, 0u8, 0u8, 26u8, + 0u8, 0u8, 0u8, 28u8, 0u8, 0u8, 0u8, 29u8, 0u8, 0u8, 0u8, 44u8, 0u8, 0u8, + 0u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 73u8, 81u8, 0u8, 73u8, 82u8, 0u8, 82u8, 85u8, 0u8, 67u8, 77u8, 0u8, 83u8, + 68u8, 0u8, 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 73u8, 82u8, 0u8, 77u8, 78u8, + 0u8, 76u8, 66u8, 0u8, 67u8, 78u8, 0u8, 84u8, 82u8, 0u8, 71u8, 78u8, 0u8, + 67u8, 78u8, 0u8, 67u8, 67u8, 0u8, 80u8, 75u8, 0u8, 78u8, 76u8, 0u8, 73u8, + 78u8, 0u8, 77u8, 69u8, 0u8, 82u8, 79u8, 0u8, 82u8, 85u8, 0u8, 84u8, 82u8, + 0u8, 80u8, 75u8, 0u8, 75u8, 90u8, 0u8, 77u8, 78u8, 0u8, 78u8, 80u8, 0u8, + 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 67u8, 78u8, 0u8, 65u8, 85u8, 0u8, 66u8, + 78u8, 0u8, 71u8, 66u8, 0u8, 71u8, 70u8, 0u8, 72u8, 75u8, 0u8, 73u8, 68u8, + 0u8, 77u8, 79u8, 0u8, 80u8, 65u8, 0u8, 80u8, 70u8, 0u8, 80u8, 72u8, 0u8, + 83u8, 82u8, 0u8, 84u8, 72u8, 0u8, 84u8, 87u8, 0u8, 85u8, 83u8, 0u8, 86u8, + 78u8, 0u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, + 108u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, + 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, + 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, 116u8, + 110u8, 78u8, 107u8, 111u8, 111u8, 77u8, 111u8, 110u8, 103u8, 65u8, 114u8, + 97u8, 98u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, 116u8, 110u8, 68u8, 101u8, + 118u8, 97u8, 76u8, 97u8, 116u8, 110u8, 76u8, 97u8, 116u8, 110u8, 76u8, + 97u8, 116u8, 110u8, 76u8, 97u8, 116u8, 110u8, 65u8, 114u8, 97u8, 98u8, + 67u8, 121u8, 114u8, 108u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, 118u8, + 97u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 72u8, 97u8, + 110u8, 115u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, + 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, + 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, + 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, + 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, 72u8, + 97u8, 110u8, 116u8, 72u8, 97u8, 110u8, 116u8, + ]) + }, + ) + }, + l2r: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap::from_parts_unchecked( + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 97u8, 97u8, 0u8, 97u8, 98u8, 0u8, 97u8, 98u8, 114u8, 97u8, 99u8, 101u8, + 97u8, 99u8, 104u8, 97u8, 100u8, 97u8, 97u8, 100u8, 112u8, 97u8, 100u8, + 121u8, 97u8, 101u8, 0u8, 97u8, 101u8, 98u8, 97u8, 102u8, 0u8, 97u8, 103u8, + 113u8, 97u8, 104u8, 111u8, 97u8, 106u8, 116u8, 97u8, 107u8, 0u8, 97u8, + 107u8, 107u8, 97u8, 108u8, 110u8, 97u8, 108u8, 116u8, 97u8, 109u8, 0u8, + 97u8, 109u8, 111u8, 97u8, 110u8, 0u8, 97u8, 110u8, 110u8, 97u8, 111u8, + 122u8, 97u8, 112u8, 100u8, 97u8, 114u8, 0u8, 97u8, 114u8, 99u8, 97u8, + 114u8, 110u8, 97u8, 114u8, 111u8, 97u8, 114u8, 113u8, 97u8, 114u8, 115u8, + 97u8, 114u8, 121u8, 97u8, 114u8, 122u8, 97u8, 115u8, 0u8, 97u8, 115u8, + 97u8, 97u8, 115u8, 101u8, 97u8, 115u8, 116u8, 97u8, 116u8, 106u8, 97u8, + 118u8, 0u8, 97u8, 119u8, 97u8, 97u8, 121u8, 0u8, 97u8, 122u8, 0u8, 98u8, + 97u8, 0u8, 98u8, 97u8, 108u8, 98u8, 97u8, 110u8, 98u8, 97u8, 112u8, 98u8, + 97u8, 114u8, 98u8, 97u8, 115u8, 98u8, 97u8, 120u8, 98u8, 98u8, 99u8, 98u8, + 98u8, 106u8, 98u8, 99u8, 105u8, 98u8, 101u8, 0u8, 98u8, 101u8, 106u8, 98u8, + 101u8, 109u8, 98u8, 101u8, 119u8, 98u8, 101u8, 122u8, 98u8, 102u8, 100u8, + 98u8, 102u8, 113u8, 98u8, 102u8, 116u8, 98u8, 102u8, 121u8, 98u8, 103u8, + 0u8, 98u8, 103u8, 99u8, 98u8, 103u8, 110u8, 98u8, 103u8, 120u8, 98u8, + 104u8, 98u8, 98u8, 104u8, 105u8, 98u8, 104u8, 111u8, 98u8, 105u8, 0u8, + 98u8, 105u8, 107u8, 98u8, 105u8, 110u8, 98u8, 106u8, 106u8, 98u8, 106u8, + 110u8, 98u8, 106u8, 116u8, 98u8, 107u8, 109u8, 98u8, 107u8, 117u8, 98u8, + 108u8, 97u8, 98u8, 108u8, 103u8, 98u8, 108u8, 116u8, 98u8, 109u8, 0u8, + 98u8, 109u8, 113u8, 98u8, 110u8, 0u8, 98u8, 111u8, 0u8, 98u8, 112u8, 121u8, + 98u8, 113u8, 105u8, 98u8, 113u8, 118u8, 98u8, 114u8, 0u8, 98u8, 114u8, + 97u8, 98u8, 114u8, 104u8, 98u8, 114u8, 120u8, 98u8, 115u8, 0u8, 98u8, + 115u8, 113u8, 98u8, 115u8, 115u8, 98u8, 116u8, 111u8, 98u8, 116u8, 118u8, + 98u8, 117u8, 97u8, 98u8, 117u8, 99u8, 98u8, 117u8, 103u8, 98u8, 117u8, + 109u8, 98u8, 118u8, 98u8, 98u8, 121u8, 110u8, 98u8, 121u8, 118u8, 98u8, + 122u8, 101u8, 99u8, 97u8, 0u8, 99u8, 97u8, 100u8, 99u8, 99u8, 104u8, 99u8, + 99u8, 112u8, 99u8, 101u8, 0u8, 99u8, 101u8, 98u8, 99u8, 103u8, 103u8, 99u8, + 104u8, 0u8, 99u8, 104u8, 107u8, 99u8, 104u8, 109u8, 99u8, 104u8, 111u8, + 99u8, 104u8, 112u8, 99u8, 104u8, 114u8, 99u8, 105u8, 99u8, 99u8, 106u8, + 97u8, 99u8, 106u8, 109u8, 99u8, 107u8, 98u8, 99u8, 108u8, 99u8, 99u8, + 109u8, 103u8, 99u8, 111u8, 0u8, 99u8, 111u8, 112u8, 99u8, 112u8, 115u8, + 99u8, 114u8, 0u8, 99u8, 114u8, 103u8, 99u8, 114u8, 104u8, 99u8, 114u8, + 107u8, 99u8, 114u8, 108u8, 99u8, 114u8, 115u8, 99u8, 115u8, 0u8, 99u8, + 115u8, 98u8, 99u8, 115u8, 119u8, 99u8, 116u8, 100u8, 99u8, 117u8, 0u8, + 99u8, 118u8, 0u8, 99u8, 121u8, 0u8, 100u8, 97u8, 0u8, 100u8, 97u8, 102u8, + 100u8, 97u8, 107u8, 100u8, 97u8, 114u8, 100u8, 97u8, 118u8, 100u8, 99u8, + 99u8, 100u8, 101u8, 0u8, 100u8, 101u8, 110u8, 100u8, 103u8, 114u8, 100u8, + 106u8, 101u8, 100u8, 109u8, 102u8, 100u8, 110u8, 106u8, 100u8, 111u8, + 105u8, 100u8, 114u8, 104u8, 100u8, 115u8, 98u8, 100u8, 116u8, 109u8, 100u8, + 116u8, 112u8, 100u8, 116u8, 121u8, 100u8, 117u8, 97u8, 100u8, 118u8, 0u8, + 100u8, 121u8, 111u8, 100u8, 121u8, 117u8, 100u8, 122u8, 0u8, 101u8, 98u8, + 117u8, 101u8, 101u8, 0u8, 101u8, 102u8, 105u8, 101u8, 103u8, 108u8, 101u8, + 103u8, 121u8, 101u8, 107u8, 121u8, 101u8, 108u8, 0u8, 101u8, 110u8, 0u8, + 101u8, 111u8, 0u8, 101u8, 115u8, 0u8, 101u8, 115u8, 103u8, 101u8, 115u8, + 117u8, 101u8, 116u8, 0u8, 101u8, 116u8, 116u8, 101u8, 117u8, 0u8, 101u8, + 119u8, 111u8, 101u8, 120u8, 116u8, 102u8, 97u8, 0u8, 102u8, 97u8, 110u8, + 102u8, 102u8, 0u8, 102u8, 102u8, 109u8, 102u8, 105u8, 0u8, 102u8, 105u8, + 97u8, 102u8, 105u8, 108u8, 102u8, 105u8, 116u8, 102u8, 106u8, 0u8, 102u8, + 111u8, 0u8, 102u8, 111u8, 110u8, 102u8, 114u8, 0u8, 102u8, 114u8, 99u8, + 102u8, 114u8, 112u8, 102u8, 114u8, 114u8, 102u8, 114u8, 115u8, 102u8, + 117u8, 98u8, 102u8, 117u8, 100u8, 102u8, 117u8, 102u8, 102u8, 117u8, 113u8, + 102u8, 117u8, 114u8, 102u8, 117u8, 118u8, 102u8, 118u8, 114u8, 102u8, + 121u8, 0u8, 103u8, 97u8, 0u8, 103u8, 97u8, 97u8, 103u8, 97u8, 103u8, 103u8, + 97u8, 110u8, 103u8, 97u8, 121u8, 103u8, 98u8, 109u8, 103u8, 98u8, 122u8, + 103u8, 99u8, 114u8, 103u8, 100u8, 0u8, 103u8, 101u8, 122u8, 103u8, 103u8, + 110u8, 103u8, 105u8, 108u8, 103u8, 106u8, 107u8, 103u8, 106u8, 117u8, + 103u8, 108u8, 0u8, 103u8, 108u8, 107u8, 103u8, 110u8, 0u8, 103u8, 111u8, + 109u8, 103u8, 111u8, 110u8, 103u8, 111u8, 114u8, 103u8, 111u8, 115u8, + 103u8, 111u8, 116u8, 103u8, 114u8, 99u8, 103u8, 114u8, 116u8, 103u8, 115u8, + 119u8, 103u8, 117u8, 0u8, 103u8, 117u8, 98u8, 103u8, 117u8, 99u8, 103u8, + 117u8, 114u8, 103u8, 117u8, 122u8, 103u8, 118u8, 0u8, 103u8, 118u8, 114u8, + 103u8, 119u8, 105u8, 104u8, 97u8, 0u8, 104u8, 97u8, 107u8, 104u8, 97u8, + 119u8, 104u8, 97u8, 122u8, 104u8, 101u8, 0u8, 104u8, 105u8, 0u8, 104u8, + 105u8, 102u8, 104u8, 105u8, 108u8, 104u8, 108u8, 117u8, 104u8, 109u8, + 100u8, 104u8, 110u8, 100u8, 104u8, 110u8, 101u8, 104u8, 110u8, 106u8, + 104u8, 110u8, 110u8, 104u8, 110u8, 111u8, 104u8, 111u8, 0u8, 104u8, 111u8, + 99u8, 104u8, 111u8, 106u8, 104u8, 114u8, 0u8, 104u8, 115u8, 98u8, 104u8, + 115u8, 110u8, 104u8, 116u8, 0u8, 104u8, 117u8, 0u8, 104u8, 117u8, 114u8, + 104u8, 121u8, 0u8, 104u8, 122u8, 0u8, 105u8, 97u8, 0u8, 105u8, 98u8, 97u8, + 105u8, 98u8, 98u8, 105u8, 100u8, 0u8, 105u8, 102u8, 101u8, 105u8, 103u8, + 0u8, 105u8, 105u8, 0u8, 105u8, 107u8, 0u8, 105u8, 108u8, 111u8, 105u8, + 110u8, 0u8, 105u8, 110u8, 104u8, 105u8, 111u8, 0u8, 105u8, 115u8, 0u8, + 105u8, 116u8, 0u8, 105u8, 117u8, 0u8, 105u8, 119u8, 0u8, 105u8, 122u8, + 104u8, 106u8, 97u8, 0u8, 106u8, 97u8, 109u8, 106u8, 98u8, 111u8, 106u8, + 103u8, 111u8, 106u8, 105u8, 0u8, 106u8, 109u8, 99u8, 106u8, 109u8, 108u8, + 106u8, 117u8, 116u8, 106u8, 118u8, 0u8, 106u8, 119u8, 0u8, 107u8, 97u8, + 0u8, 107u8, 97u8, 97u8, 107u8, 97u8, 98u8, 107u8, 97u8, 99u8, 107u8, 97u8, + 106u8, 107u8, 97u8, 109u8, 107u8, 97u8, 111u8, 107u8, 97u8, 119u8, 107u8, + 98u8, 100u8, 107u8, 98u8, 121u8, 107u8, 99u8, 103u8, 107u8, 99u8, 107u8, + 107u8, 100u8, 101u8, 107u8, 100u8, 104u8, 107u8, 100u8, 116u8, 107u8, + 101u8, 97u8, 107u8, 101u8, 110u8, 107u8, 102u8, 111u8, 107u8, 102u8, 114u8, + 107u8, 102u8, 121u8, 107u8, 103u8, 0u8, 107u8, 103u8, 101u8, 107u8, 103u8, + 112u8, 107u8, 104u8, 97u8, 107u8, 104u8, 98u8, 107u8, 104u8, 110u8, 107u8, + 104u8, 113u8, 107u8, 104u8, 116u8, 107u8, 104u8, 119u8, 107u8, 105u8, 0u8, + 107u8, 105u8, 117u8, 107u8, 106u8, 0u8, 107u8, 106u8, 103u8, 107u8, 107u8, + 0u8, 107u8, 107u8, 106u8, 107u8, 108u8, 0u8, 107u8, 108u8, 110u8, 107u8, + 109u8, 0u8, 107u8, 109u8, 98u8, 107u8, 110u8, 0u8, 107u8, 110u8, 102u8, + 107u8, 111u8, 0u8, 107u8, 111u8, 105u8, 107u8, 111u8, 107u8, 107u8, 111u8, + 115u8, 107u8, 112u8, 101u8, 107u8, 114u8, 99u8, 107u8, 114u8, 105u8, 107u8, + 114u8, 106u8, 107u8, 114u8, 108u8, 107u8, 114u8, 117u8, 107u8, 115u8, 0u8, + 107u8, 115u8, 98u8, 107u8, 115u8, 102u8, 107u8, 115u8, 104u8, 107u8, 116u8, + 114u8, 107u8, 117u8, 0u8, 107u8, 117u8, 109u8, 107u8, 118u8, 0u8, 107u8, + 118u8, 114u8, 107u8, 118u8, 120u8, 107u8, 119u8, 0u8, 107u8, 119u8, 107u8, + 107u8, 120u8, 108u8, 107u8, 120u8, 109u8, 107u8, 120u8, 112u8, 107u8, + 121u8, 0u8, 107u8, 122u8, 106u8, 107u8, 122u8, 116u8, 108u8, 97u8, 0u8, + 108u8, 97u8, 98u8, 108u8, 97u8, 100u8, 108u8, 97u8, 103u8, 108u8, 97u8, + 104u8, 108u8, 97u8, 106u8, 108u8, 98u8, 0u8, 108u8, 98u8, 101u8, 108u8, + 98u8, 119u8, 108u8, 99u8, 112u8, 108u8, 101u8, 112u8, 108u8, 101u8, 122u8, + 108u8, 103u8, 0u8, 108u8, 105u8, 0u8, 108u8, 105u8, 102u8, 108u8, 105u8, + 106u8, 108u8, 105u8, 108u8, 108u8, 105u8, 115u8, 108u8, 106u8, 112u8, + 108u8, 107u8, 105u8, 108u8, 107u8, 116u8, 108u8, 109u8, 110u8, 108u8, + 109u8, 111u8, 108u8, 110u8, 0u8, 108u8, 111u8, 0u8, 108u8, 111u8, 108u8, + 108u8, 111u8, 122u8, 108u8, 114u8, 99u8, 108u8, 116u8, 0u8, 108u8, 116u8, + 103u8, 108u8, 117u8, 0u8, 108u8, 117u8, 97u8, 108u8, 117u8, 111u8, 108u8, + 117u8, 121u8, 108u8, 117u8, 122u8, 108u8, 118u8, 0u8, 108u8, 119u8, 108u8, + 108u8, 122u8, 104u8, 108u8, 122u8, 122u8, 109u8, 97u8, 100u8, 109u8, 97u8, + 102u8, 109u8, 97u8, 103u8, 109u8, 97u8, 105u8, 109u8, 97u8, 107u8, 109u8, + 97u8, 110u8, 109u8, 97u8, 115u8, 109u8, 97u8, 122u8, 109u8, 100u8, 102u8, + 109u8, 100u8, 104u8, 109u8, 100u8, 114u8, 109u8, 101u8, 110u8, 109u8, + 101u8, 114u8, 109u8, 102u8, 97u8, 109u8, 102u8, 101u8, 109u8, 103u8, 0u8, + 109u8, 103u8, 104u8, 109u8, 103u8, 111u8, 109u8, 103u8, 112u8, 109u8, + 103u8, 121u8, 109u8, 104u8, 0u8, 109u8, 105u8, 0u8, 109u8, 105u8, 99u8, + 109u8, 105u8, 110u8, 109u8, 107u8, 0u8, 109u8, 108u8, 0u8, 109u8, 108u8, + 115u8, 109u8, 110u8, 0u8, 109u8, 110u8, 105u8, 109u8, 110u8, 119u8, 109u8, + 111u8, 0u8, 109u8, 111u8, 101u8, 109u8, 111u8, 104u8, 109u8, 111u8, 115u8, + 109u8, 114u8, 0u8, 109u8, 114u8, 100u8, 109u8, 114u8, 106u8, 109u8, 114u8, + 111u8, 109u8, 115u8, 0u8, 109u8, 116u8, 0u8, 109u8, 116u8, 114u8, 109u8, + 117u8, 97u8, 109u8, 117u8, 115u8, 109u8, 118u8, 121u8, 109u8, 119u8, 107u8, + 109u8, 119u8, 114u8, 109u8, 119u8, 118u8, 109u8, 119u8, 119u8, 109u8, + 120u8, 99u8, 109u8, 121u8, 0u8, 109u8, 121u8, 118u8, 109u8, 121u8, 120u8, + 109u8, 121u8, 122u8, 109u8, 122u8, 110u8, 110u8, 97u8, 0u8, 110u8, 97u8, + 110u8, 110u8, 97u8, 112u8, 110u8, 97u8, 113u8, 110u8, 98u8, 0u8, 110u8, + 99u8, 104u8, 110u8, 100u8, 0u8, 110u8, 100u8, 99u8, 110u8, 100u8, 115u8, + 110u8, 101u8, 0u8, 110u8, 101u8, 119u8, 110u8, 103u8, 0u8, 110u8, 103u8, + 108u8, 110u8, 104u8, 101u8, 110u8, 104u8, 119u8, 110u8, 105u8, 106u8, + 110u8, 105u8, 117u8, 110u8, 106u8, 111u8, 110u8, 108u8, 0u8, 110u8, 109u8, + 103u8, 110u8, 110u8, 0u8, 110u8, 110u8, 104u8, 110u8, 110u8, 112u8, 110u8, + 111u8, 0u8, 110u8, 111u8, 100u8, 110u8, 111u8, 101u8, 110u8, 111u8, 110u8, + 110u8, 113u8, 111u8, 110u8, 114u8, 0u8, 110u8, 115u8, 107u8, 110u8, 115u8, + 111u8, 110u8, 115u8, 116u8, 110u8, 117u8, 115u8, 110u8, 118u8, 0u8, 110u8, + 120u8, 113u8, 110u8, 121u8, 0u8, 110u8, 121u8, 109u8, 110u8, 121u8, 110u8, + 110u8, 122u8, 105u8, 111u8, 99u8, 0u8, 111u8, 106u8, 0u8, 111u8, 106u8, + 115u8, 111u8, 107u8, 97u8, 111u8, 109u8, 0u8, 111u8, 114u8, 0u8, 111u8, + 115u8, 0u8, 111u8, 115u8, 97u8, 111u8, 116u8, 107u8, 111u8, 117u8, 105u8, + 112u8, 97u8, 0u8, 112u8, 97u8, 103u8, 112u8, 97u8, 108u8, 112u8, 97u8, + 109u8, 112u8, 97u8, 112u8, 112u8, 97u8, 117u8, 112u8, 99u8, 100u8, 112u8, + 99u8, 109u8, 112u8, 100u8, 99u8, 112u8, 100u8, 116u8, 112u8, 101u8, 111u8, + 112u8, 102u8, 108u8, 112u8, 104u8, 110u8, 112u8, 105u8, 115u8, 112u8, + 107u8, 97u8, 112u8, 107u8, 111u8, 112u8, 108u8, 0u8, 112u8, 109u8, 115u8, + 112u8, 110u8, 116u8, 112u8, 111u8, 110u8, 112u8, 112u8, 97u8, 112u8, 113u8, + 109u8, 112u8, 114u8, 97u8, 112u8, 114u8, 100u8, 112u8, 114u8, 103u8, 112u8, + 115u8, 0u8, 112u8, 116u8, 0u8, 112u8, 117u8, 117u8, 113u8, 117u8, 0u8, + 113u8, 117u8, 99u8, 113u8, 117u8, 103u8, 114u8, 97u8, 106u8, 114u8, 99u8, + 102u8, 114u8, 101u8, 106u8, 114u8, 103u8, 110u8, 114u8, 104u8, 103u8, + 114u8, 105u8, 97u8, 114u8, 105u8, 102u8, 114u8, 106u8, 115u8, 114u8, 107u8, + 116u8, 114u8, 109u8, 0u8, 114u8, 109u8, 102u8, 114u8, 109u8, 111u8, 114u8, + 109u8, 116u8, 114u8, 109u8, 117u8, 114u8, 110u8, 0u8, 114u8, 110u8, 103u8, + 114u8, 111u8, 0u8, 114u8, 111u8, 98u8, 114u8, 111u8, 102u8, 114u8, 116u8, + 109u8, 114u8, 117u8, 0u8, 114u8, 117u8, 101u8, 114u8, 117u8, 103u8, 114u8, + 119u8, 0u8, 114u8, 119u8, 107u8, 114u8, 121u8, 117u8, 115u8, 97u8, 0u8, + 115u8, 97u8, 102u8, 115u8, 97u8, 104u8, 115u8, 97u8, 113u8, 115u8, 97u8, + 115u8, 115u8, 97u8, 116u8, 115u8, 97u8, 118u8, 115u8, 97u8, 122u8, 115u8, + 98u8, 112u8, 115u8, 99u8, 0u8, 115u8, 99u8, 107u8, 115u8, 99u8, 110u8, + 115u8, 99u8, 111u8, 115u8, 100u8, 0u8, 115u8, 100u8, 99u8, 115u8, 100u8, + 104u8, 115u8, 101u8, 0u8, 115u8, 101u8, 102u8, 115u8, 101u8, 104u8, 115u8, + 101u8, 105u8, 115u8, 101u8, 115u8, 115u8, 103u8, 0u8, 115u8, 103u8, 97u8, + 115u8, 103u8, 115u8, 115u8, 104u8, 105u8, 115u8, 104u8, 110u8, 115u8, + 105u8, 0u8, 115u8, 105u8, 100u8, 115u8, 107u8, 0u8, 115u8, 107u8, 114u8, + 115u8, 108u8, 0u8, 115u8, 108u8, 105u8, 115u8, 108u8, 121u8, 115u8, 109u8, + 0u8, 115u8, 109u8, 97u8, 115u8, 109u8, 100u8, 115u8, 109u8, 106u8, 115u8, + 109u8, 110u8, 115u8, 109u8, 112u8, 115u8, 109u8, 115u8, 115u8, 110u8, 0u8, + 115u8, 110u8, 98u8, 115u8, 110u8, 107u8, 115u8, 111u8, 0u8, 115u8, 111u8, + 103u8, 115u8, 111u8, 117u8, 115u8, 113u8, 0u8, 115u8, 114u8, 0u8, 115u8, + 114u8, 98u8, 115u8, 114u8, 110u8, 115u8, 114u8, 114u8, 115u8, 114u8, 120u8, + 115u8, 115u8, 0u8, 115u8, 115u8, 121u8, 115u8, 116u8, 0u8, 115u8, 116u8, + 113u8, 115u8, 117u8, 0u8, 115u8, 117u8, 107u8, 115u8, 117u8, 115u8, 115u8, + 118u8, 0u8, 115u8, 119u8, 0u8, 115u8, 119u8, 98u8, 115u8, 119u8, 99u8, + 115u8, 119u8, 103u8, 115u8, 119u8, 118u8, 115u8, 120u8, 110u8, 115u8, + 121u8, 108u8, 115u8, 121u8, 114u8, 115u8, 122u8, 108u8, 116u8, 97u8, 0u8, + 116u8, 97u8, 106u8, 116u8, 98u8, 119u8, 116u8, 99u8, 121u8, 116u8, 100u8, + 100u8, 116u8, 100u8, 103u8, 116u8, 100u8, 104u8, 116u8, 100u8, 117u8, + 116u8, 101u8, 0u8, 116u8, 101u8, 109u8, 116u8, 101u8, 111u8, 116u8, 101u8, + 116u8, 116u8, 103u8, 0u8, 116u8, 104u8, 0u8, 116u8, 104u8, 108u8, 116u8, + 104u8, 113u8, 116u8, 104u8, 114u8, 116u8, 105u8, 0u8, 116u8, 105u8, 103u8, + 116u8, 105u8, 118u8, 116u8, 107u8, 0u8, 116u8, 107u8, 108u8, 116u8, 107u8, + 114u8, 116u8, 107u8, 116u8, 116u8, 108u8, 0u8, 116u8, 108u8, 121u8, 116u8, + 109u8, 104u8, 116u8, 110u8, 0u8, 116u8, 111u8, 0u8, 116u8, 111u8, 103u8, + 116u8, 111u8, 107u8, 116u8, 112u8, 105u8, 116u8, 114u8, 0u8, 116u8, 114u8, + 117u8, 116u8, 114u8, 118u8, 116u8, 114u8, 119u8, 116u8, 115u8, 0u8, 116u8, + 115u8, 100u8, 116u8, 115u8, 102u8, 116u8, 115u8, 103u8, 116u8, 115u8, + 106u8, 116u8, 116u8, 0u8, 116u8, 116u8, 106u8, 116u8, 116u8, 115u8, 116u8, + 116u8, 116u8, 116u8, 117u8, 109u8, 116u8, 118u8, 108u8, 116u8, 119u8, + 113u8, 116u8, 120u8, 103u8, 116u8, 120u8, 111u8, 116u8, 121u8, 0u8, 116u8, + 121u8, 118u8, 116u8, 122u8, 109u8, 117u8, 100u8, 105u8, 117u8, 100u8, + 109u8, 117u8, 103u8, 0u8, 117u8, 103u8, 97u8, 117u8, 107u8, 0u8, 117u8, + 108u8, 105u8, 117u8, 109u8, 98u8, 117u8, 110u8, 114u8, 117u8, 110u8, 120u8, + 117u8, 114u8, 0u8, 117u8, 122u8, 0u8, 118u8, 97u8, 105u8, 118u8, 101u8, + 0u8, 118u8, 101u8, 99u8, 118u8, 101u8, 112u8, 118u8, 105u8, 0u8, 118u8, + 105u8, 99u8, 118u8, 108u8, 115u8, 118u8, 109u8, 102u8, 118u8, 109u8, 119u8, + 118u8, 111u8, 0u8, 118u8, 111u8, 116u8, 118u8, 114u8, 111u8, 118u8, 117u8, + 110u8, 119u8, 97u8, 0u8, 119u8, 97u8, 101u8, 119u8, 97u8, 108u8, 119u8, + 97u8, 114u8, 119u8, 98u8, 112u8, 119u8, 98u8, 113u8, 119u8, 98u8, 114u8, + 119u8, 108u8, 115u8, 119u8, 110u8, 105u8, 119u8, 111u8, 0u8, 119u8, 115u8, + 103u8, 119u8, 116u8, 109u8, 119u8, 117u8, 117u8, 120u8, 97u8, 118u8, 120u8, + 99u8, 111u8, 120u8, 99u8, 114u8, 120u8, 104u8, 0u8, 120u8, 108u8, 99u8, + 120u8, 108u8, 100u8, 120u8, 109u8, 102u8, 120u8, 109u8, 110u8, 120u8, + 109u8, 114u8, 120u8, 110u8, 97u8, 120u8, 110u8, 114u8, 120u8, 111u8, 103u8, + 120u8, 112u8, 114u8, 120u8, 115u8, 97u8, 120u8, 115u8, 114u8, 121u8, 97u8, + 111u8, 121u8, 97u8, 112u8, 121u8, 97u8, 118u8, 121u8, 98u8, 98u8, 121u8, + 105u8, 0u8, 121u8, 111u8, 0u8, 121u8, 114u8, 108u8, 121u8, 117u8, 97u8, + 121u8, 117u8, 101u8, 122u8, 97u8, 0u8, 122u8, 97u8, 103u8, 122u8, 100u8, + 106u8, 122u8, 101u8, 97u8, 122u8, 103u8, 104u8, 122u8, 104u8, 0u8, 122u8, + 104u8, 120u8, 122u8, 107u8, 116u8, 122u8, 108u8, 109u8, 122u8, 109u8, + 105u8, 122u8, 117u8, 0u8, 122u8, 122u8, 97u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 69u8, 84u8, 0u8, 71u8, 69u8, 0u8, 71u8, 72u8, 0u8, 73u8, 68u8, 0u8, 85u8, + 71u8, 0u8, 71u8, 72u8, 0u8, 66u8, 84u8, 0u8, 82u8, 85u8, 0u8, 73u8, 82u8, + 0u8, 84u8, 78u8, 0u8, 90u8, 65u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, + 84u8, 78u8, 0u8, 71u8, 72u8, 0u8, 73u8, 81u8, 0u8, 88u8, 75u8, 0u8, 82u8, + 85u8, 0u8, 69u8, 84u8, 0u8, 78u8, 71u8, 0u8, 69u8, 83u8, 0u8, 78u8, 71u8, + 0u8, 73u8, 68u8, 0u8, 84u8, 71u8, 0u8, 69u8, 71u8, 0u8, 73u8, 82u8, 0u8, + 67u8, 76u8, 0u8, 66u8, 79u8, 0u8, 68u8, 90u8, 0u8, 83u8, 65u8, 0u8, 77u8, + 65u8, 0u8, 69u8, 71u8, 0u8, 73u8, 78u8, 0u8, 84u8, 90u8, 0u8, 85u8, 83u8, + 0u8, 69u8, 83u8, 0u8, 67u8, 65u8, 0u8, 82u8, 85u8, 0u8, 73u8, 78u8, 0u8, + 66u8, 79u8, 0u8, 65u8, 90u8, 0u8, 82u8, 85u8, 0u8, 80u8, 75u8, 0u8, 73u8, + 68u8, 0u8, 78u8, 80u8, 0u8, 65u8, 84u8, 0u8, 67u8, 77u8, 0u8, 67u8, 77u8, + 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 67u8, 73u8, 0u8, 66u8, 89u8, 0u8, + 83u8, 68u8, 0u8, 90u8, 77u8, 0u8, 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 67u8, + 77u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 66u8, 71u8, + 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 84u8, 82u8, 0u8, 73u8, 78u8, 0u8, + 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 86u8, 85u8, 0u8, 80u8, 72u8, 0u8, 78u8, + 71u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, 83u8, 78u8, 0u8, 67u8, 77u8, + 0u8, 80u8, 72u8, 0u8, 67u8, 65u8, 0u8, 77u8, 89u8, 0u8, 86u8, 78u8, 0u8, + 77u8, 76u8, 0u8, 77u8, 76u8, 0u8, 66u8, 68u8, 0u8, 67u8, 78u8, 0u8, 73u8, + 78u8, 0u8, 73u8, 82u8, 0u8, 67u8, 73u8, 0u8, 70u8, 82u8, 0u8, 73u8, 78u8, + 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 66u8, 65u8, 0u8, 76u8, 82u8, 0u8, + 67u8, 77u8, 0u8, 80u8, 72u8, 0u8, 80u8, 75u8, 0u8, 82u8, 85u8, 0u8, 89u8, + 84u8, 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 71u8, 81u8, 0u8, 69u8, 82u8, + 0u8, 67u8, 77u8, 0u8, 77u8, 76u8, 0u8, 69u8, 83u8, 0u8, 85u8, 83u8, 0u8, + 78u8, 71u8, 0u8, 66u8, 68u8, 0u8, 82u8, 85u8, 0u8, 80u8, 72u8, 0u8, 85u8, + 71u8, 0u8, 71u8, 85u8, 0u8, 70u8, 77u8, 0u8, 82u8, 85u8, 0u8, 85u8, 83u8, + 0u8, 67u8, 65u8, 0u8, 85u8, 83u8, 0u8, 85u8, 83u8, 0u8, 75u8, 72u8, 0u8, + 86u8, 78u8, 0u8, 73u8, 81u8, 0u8, 67u8, 65u8, 0u8, 77u8, 78u8, 0u8, 70u8, + 82u8, 0u8, 69u8, 71u8, 0u8, 80u8, 72u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, + 0u8, 85u8, 65u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 83u8, 67u8, 0u8, + 67u8, 90u8, 0u8, 80u8, 76u8, 0u8, 67u8, 65u8, 0u8, 77u8, 77u8, 0u8, 82u8, + 85u8, 0u8, 82u8, 85u8, 0u8, 71u8, 66u8, 0u8, 68u8, 75u8, 0u8, 67u8, 73u8, + 0u8, 85u8, 83u8, 0u8, 82u8, 85u8, 0u8, 75u8, 69u8, 0u8, 73u8, 78u8, 0u8, + 68u8, 69u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 78u8, 69u8, 0u8, 78u8, + 71u8, 0u8, 67u8, 73u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, 68u8, 69u8, + 0u8, 77u8, 76u8, 0u8, 77u8, 89u8, 0u8, 78u8, 80u8, 0u8, 67u8, 77u8, 0u8, + 77u8, 86u8, 0u8, 83u8, 78u8, 0u8, 66u8, 70u8, 0u8, 66u8, 84u8, 0u8, 75u8, + 69u8, 0u8, 71u8, 72u8, 0u8, 78u8, 71u8, 0u8, 73u8, 84u8, 0u8, 69u8, 71u8, + 0u8, 77u8, 77u8, 0u8, 71u8, 82u8, 0u8, 85u8, 83u8, 0u8, 48u8, 48u8, 49u8, + 69u8, 83u8, 0u8, 73u8, 78u8, 0u8, 85u8, 83u8, 0u8, 69u8, 69u8, 0u8, 73u8, + 84u8, 0u8, 69u8, 83u8, 0u8, 67u8, 77u8, 0u8, 69u8, 83u8, 0u8, 73u8, 82u8, + 0u8, 71u8, 81u8, 0u8, 83u8, 78u8, 0u8, 77u8, 76u8, 0u8, 70u8, 73u8, 0u8, + 83u8, 68u8, 0u8, 80u8, 72u8, 0u8, 83u8, 69u8, 0u8, 70u8, 74u8, 0u8, 70u8, + 79u8, 0u8, 66u8, 74u8, 0u8, 70u8, 82u8, 0u8, 85u8, 83u8, 0u8, 70u8, 82u8, + 0u8, 68u8, 69u8, 0u8, 68u8, 69u8, 0u8, 67u8, 77u8, 0u8, 87u8, 70u8, 0u8, + 71u8, 78u8, 0u8, 78u8, 69u8, 0u8, 73u8, 84u8, 0u8, 78u8, 71u8, 0u8, 83u8, + 68u8, 0u8, 78u8, 76u8, 0u8, 73u8, 69u8, 0u8, 71u8, 72u8, 0u8, 77u8, 68u8, + 0u8, 67u8, 78u8, 0u8, 73u8, 68u8, 0u8, 73u8, 78u8, 0u8, 73u8, 82u8, 0u8, + 71u8, 70u8, 0u8, 71u8, 66u8, 0u8, 69u8, 84u8, 0u8, 78u8, 80u8, 0u8, 75u8, + 73u8, 0u8, 80u8, 75u8, 0u8, 80u8, 75u8, 0u8, 69u8, 83u8, 0u8, 73u8, 82u8, + 0u8, 80u8, 89u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, + 78u8, 76u8, 0u8, 85u8, 65u8, 0u8, 67u8, 89u8, 0u8, 73u8, 78u8, 0u8, 67u8, + 72u8, 0u8, 73u8, 78u8, 0u8, 66u8, 82u8, 0u8, 67u8, 79u8, 0u8, 71u8, 72u8, + 0u8, 75u8, 69u8, 0u8, 73u8, 77u8, 0u8, 78u8, 80u8, 0u8, 67u8, 65u8, 0u8, + 78u8, 71u8, 0u8, 67u8, 78u8, 0u8, 85u8, 83u8, 0u8, 65u8, 70u8, 0u8, 73u8, + 76u8, 0u8, 73u8, 78u8, 0u8, 70u8, 74u8, 0u8, 80u8, 72u8, 0u8, 84u8, 82u8, + 0u8, 67u8, 78u8, 0u8, 80u8, 75u8, 0u8, 73u8, 78u8, 0u8, 85u8, 83u8, 0u8, + 80u8, 72u8, 0u8, 80u8, 75u8, 0u8, 80u8, 71u8, 0u8, 73u8, 78u8, 0u8, 73u8, + 78u8, 0u8, 72u8, 82u8, 0u8, 68u8, 69u8, 0u8, 67u8, 78u8, 0u8, 72u8, 84u8, + 0u8, 72u8, 85u8, 0u8, 67u8, 65u8, 0u8, 65u8, 77u8, 0u8, 78u8, 65u8, 0u8, + 48u8, 48u8, 49u8, 77u8, 89u8, 0u8, 78u8, 71u8, 0u8, 73u8, 68u8, 0u8, 84u8, + 71u8, 0u8, 78u8, 71u8, 0u8, 67u8, 78u8, 0u8, 85u8, 83u8, 0u8, 80u8, 72u8, + 0u8, 73u8, 68u8, 0u8, 82u8, 85u8, 0u8, 48u8, 48u8, 49u8, 73u8, 83u8, 0u8, + 73u8, 84u8, 0u8, 67u8, 65u8, 0u8, 73u8, 76u8, 0u8, 82u8, 85u8, 0u8, 74u8, + 80u8, 0u8, 74u8, 77u8, 0u8, 48u8, 48u8, 49u8, 67u8, 77u8, 0u8, 85u8, 65u8, + 0u8, 84u8, 90u8, 0u8, 78u8, 80u8, 0u8, 68u8, 75u8, 0u8, 73u8, 68u8, 0u8, + 73u8, 68u8, 0u8, 71u8, 69u8, 0u8, 85u8, 90u8, 0u8, 68u8, 90u8, 0u8, 77u8, + 77u8, 0u8, 78u8, 71u8, 0u8, 75u8, 69u8, 0u8, 77u8, 76u8, 0u8, 73u8, 68u8, + 0u8, 82u8, 85u8, 0u8, 78u8, 69u8, 0u8, 78u8, 71u8, 0u8, 90u8, 87u8, 0u8, + 84u8, 90u8, 0u8, 84u8, 71u8, 0u8, 84u8, 72u8, 0u8, 67u8, 86u8, 0u8, 67u8, + 77u8, 0u8, 67u8, 73u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 67u8, 68u8, + 0u8, 73u8, 68u8, 0u8, 66u8, 82u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, + 73u8, 78u8, 0u8, 77u8, 76u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 75u8, + 69u8, 0u8, 84u8, 82u8, 0u8, 78u8, 65u8, 0u8, 76u8, 65u8, 0u8, 75u8, 90u8, + 0u8, 67u8, 77u8, 0u8, 71u8, 76u8, 0u8, 75u8, 69u8, 0u8, 75u8, 72u8, 0u8, + 65u8, 79u8, 0u8, 73u8, 78u8, 0u8, 71u8, 87u8, 0u8, 75u8, 82u8, 0u8, 82u8, + 85u8, 0u8, 73u8, 78u8, 0u8, 70u8, 77u8, 0u8, 76u8, 82u8, 0u8, 82u8, 85u8, + 0u8, 83u8, 76u8, 0u8, 80u8, 72u8, 0u8, 82u8, 85u8, 0u8, 73u8, 78u8, 0u8, + 73u8, 78u8, 0u8, 84u8, 90u8, 0u8, 67u8, 77u8, 0u8, 68u8, 69u8, 0u8, 77u8, + 89u8, 0u8, 84u8, 82u8, 0u8, 82u8, 85u8, 0u8, 82u8, 85u8, 0u8, 73u8, 68u8, + 0u8, 80u8, 75u8, 0u8, 71u8, 66u8, 0u8, 67u8, 65u8, 0u8, 73u8, 78u8, 0u8, + 84u8, 72u8, 0u8, 80u8, 75u8, 0u8, 75u8, 71u8, 0u8, 77u8, 89u8, 0u8, 77u8, + 89u8, 0u8, 86u8, 65u8, 0u8, 71u8, 82u8, 0u8, 73u8, 76u8, 0u8, 84u8, 90u8, + 0u8, 80u8, 75u8, 0u8, 85u8, 71u8, 0u8, 76u8, 85u8, 0u8, 82u8, 85u8, 0u8, + 73u8, 68u8, 0u8, 67u8, 78u8, 0u8, 73u8, 78u8, 0u8, 82u8, 85u8, 0u8, 85u8, + 71u8, 0u8, 78u8, 76u8, 0u8, 78u8, 80u8, 0u8, 73u8, 84u8, 0u8, 67u8, 65u8, + 0u8, 67u8, 78u8, 0u8, 73u8, 68u8, 0u8, 73u8, 82u8, 0u8, 85u8, 83u8, 0u8, + 73u8, 78u8, 0u8, 73u8, 84u8, 0u8, 67u8, 68u8, 0u8, 76u8, 65u8, 0u8, 67u8, + 68u8, 0u8, 90u8, 77u8, 0u8, 73u8, 82u8, 0u8, 76u8, 84u8, 0u8, 76u8, 86u8, + 0u8, 67u8, 68u8, 0u8, 67u8, 68u8, 0u8, 75u8, 69u8, 0u8, 75u8, 69u8, 0u8, + 73u8, 82u8, 0u8, 76u8, 86u8, 0u8, 84u8, 72u8, 0u8, 67u8, 78u8, 0u8, 84u8, + 82u8, 0u8, 73u8, 68u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, + 0u8, 73u8, 68u8, 0u8, 71u8, 77u8, 0u8, 75u8, 69u8, 0u8, 77u8, 88u8, 0u8, + 82u8, 85u8, 0u8, 80u8, 72u8, 0u8, 73u8, 68u8, 0u8, 83u8, 76u8, 0u8, 75u8, + 69u8, 0u8, 84u8, 72u8, 0u8, 77u8, 85u8, 0u8, 77u8, 71u8, 0u8, 77u8, 90u8, + 0u8, 67u8, 77u8, 0u8, 78u8, 80u8, 0u8, 84u8, 90u8, 0u8, 77u8, 72u8, 0u8, + 78u8, 90u8, 0u8, 67u8, 65u8, 0u8, 73u8, 68u8, 0u8, 77u8, 75u8, 0u8, 73u8, + 78u8, 0u8, 83u8, 68u8, 0u8, 77u8, 78u8, 0u8, 73u8, 78u8, 0u8, 77u8, 77u8, + 0u8, 82u8, 79u8, 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 66u8, 70u8, 0u8, + 73u8, 78u8, 0u8, 78u8, 80u8, 0u8, 82u8, 85u8, 0u8, 66u8, 68u8, 0u8, 77u8, + 89u8, 0u8, 77u8, 84u8, 0u8, 73u8, 78u8, 0u8, 67u8, 77u8, 0u8, 85u8, 83u8, + 0u8, 80u8, 75u8, 0u8, 77u8, 76u8, 0u8, 73u8, 78u8, 0u8, 73u8, 68u8, 0u8, + 85u8, 83u8, 0u8, 90u8, 87u8, 0u8, 77u8, 77u8, 0u8, 82u8, 85u8, 0u8, 85u8, + 71u8, 0u8, 73u8, 82u8, 0u8, 73u8, 82u8, 0u8, 78u8, 82u8, 0u8, 67u8, 78u8, + 0u8, 73u8, 84u8, 0u8, 78u8, 65u8, 0u8, 78u8, 79u8, 0u8, 77u8, 88u8, 0u8, + 90u8, 87u8, 0u8, 77u8, 90u8, 0u8, 68u8, 69u8, 0u8, 78u8, 80u8, 0u8, 78u8, + 80u8, 0u8, 78u8, 65u8, 0u8, 77u8, 90u8, 0u8, 77u8, 88u8, 0u8, 77u8, 88u8, + 0u8, 73u8, 68u8, 0u8, 78u8, 85u8, 0u8, 73u8, 78u8, 0u8, 78u8, 76u8, 0u8, + 67u8, 77u8, 0u8, 78u8, 79u8, 0u8, 67u8, 77u8, 0u8, 73u8, 78u8, 0u8, 78u8, + 79u8, 0u8, 84u8, 72u8, 0u8, 73u8, 78u8, 0u8, 83u8, 69u8, 0u8, 71u8, 78u8, + 0u8, 90u8, 65u8, 0u8, 67u8, 65u8, 0u8, 90u8, 65u8, 0u8, 73u8, 78u8, 0u8, + 83u8, 83u8, 0u8, 85u8, 83u8, 0u8, 67u8, 78u8, 0u8, 77u8, 87u8, 0u8, 84u8, + 90u8, 0u8, 85u8, 71u8, 0u8, 71u8, 72u8, 0u8, 70u8, 82u8, 0u8, 67u8, 65u8, + 0u8, 67u8, 65u8, 0u8, 67u8, 65u8, 0u8, 69u8, 84u8, 0u8, 73u8, 78u8, 0u8, + 71u8, 69u8, 0u8, 85u8, 83u8, 0u8, 77u8, 78u8, 0u8, 49u8, 52u8, 51u8, 73u8, + 78u8, 0u8, 80u8, 72u8, 0u8, 73u8, 82u8, 0u8, 80u8, 72u8, 0u8, 65u8, 87u8, + 0u8, 80u8, 87u8, 0u8, 70u8, 82u8, 0u8, 78u8, 71u8, 0u8, 85u8, 83u8, 0u8, + 67u8, 65u8, 0u8, 73u8, 82u8, 0u8, 68u8, 69u8, 0u8, 76u8, 66u8, 0u8, 83u8, + 66u8, 0u8, 73u8, 78u8, 0u8, 75u8, 69u8, 0u8, 80u8, 76u8, 0u8, 73u8, 84u8, + 0u8, 71u8, 82u8, 0u8, 70u8, 77u8, 0u8, 73u8, 78u8, 0u8, 67u8, 65u8, 0u8, + 80u8, 75u8, 0u8, 73u8, 82u8, 0u8, 48u8, 48u8, 49u8, 65u8, 70u8, 0u8, 66u8, + 82u8, 0u8, 71u8, 65u8, 0u8, 80u8, 69u8, 0u8, 71u8, 84u8, 0u8, 69u8, 67u8, + 0u8, 73u8, 78u8, 0u8, 82u8, 69u8, 0u8, 73u8, 68u8, 0u8, 73u8, 84u8, 0u8, + 77u8, 77u8, 0u8, 73u8, 78u8, 0u8, 77u8, 65u8, 0u8, 78u8, 80u8, 0u8, 66u8, + 68u8, 0u8, 67u8, 72u8, 0u8, 70u8, 73u8, 0u8, 67u8, 72u8, 0u8, 73u8, 82u8, + 0u8, 83u8, 69u8, 0u8, 66u8, 73u8, 0u8, 77u8, 90u8, 0u8, 82u8, 79u8, 0u8, + 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 70u8, 74u8, 0u8, 82u8, 85u8, 0u8, 85u8, + 65u8, 0u8, 83u8, 66u8, 0u8, 82u8, 87u8, 0u8, 84u8, 90u8, 0u8, 74u8, 80u8, + 0u8, 73u8, 78u8, 0u8, 71u8, 72u8, 0u8, 82u8, 85u8, 0u8, 75u8, 69u8, 0u8, + 73u8, 68u8, 0u8, 73u8, 78u8, 0u8, 83u8, 78u8, 0u8, 73u8, 78u8, 0u8, 84u8, + 90u8, 0u8, 73u8, 84u8, 0u8, 73u8, 78u8, 0u8, 73u8, 84u8, 0u8, 71u8, 66u8, + 0u8, 80u8, 75u8, 0u8, 73u8, 84u8, 0u8, 73u8, 82u8, 0u8, 78u8, 79u8, 0u8, + 67u8, 73u8, 0u8, 77u8, 90u8, 0u8, 77u8, 88u8, 0u8, 77u8, 76u8, 0u8, 67u8, + 70u8, 0u8, 73u8, 69u8, 0u8, 76u8, 84u8, 0u8, 77u8, 65u8, 0u8, 77u8, 77u8, + 0u8, 76u8, 75u8, 0u8, 69u8, 84u8, 0u8, 83u8, 75u8, 0u8, 80u8, 75u8, 0u8, + 83u8, 73u8, 0u8, 80u8, 76u8, 0u8, 73u8, 68u8, 0u8, 87u8, 83u8, 0u8, 83u8, + 69u8, 0u8, 65u8, 79u8, 0u8, 83u8, 69u8, 0u8, 70u8, 73u8, 0u8, 73u8, 76u8, + 0u8, 70u8, 73u8, 0u8, 90u8, 87u8, 0u8, 77u8, 89u8, 0u8, 77u8, 76u8, 0u8, + 83u8, 79u8, 0u8, 85u8, 90u8, 0u8, 84u8, 72u8, 0u8, 65u8, 76u8, 0u8, 82u8, + 83u8, 0u8, 73u8, 78u8, 0u8, 83u8, 82u8, 0u8, 83u8, 78u8, 0u8, 73u8, 78u8, + 0u8, 90u8, 65u8, 0u8, 69u8, 82u8, 0u8, 90u8, 65u8, 0u8, 68u8, 69u8, 0u8, + 73u8, 68u8, 0u8, 84u8, 90u8, 0u8, 71u8, 78u8, 0u8, 83u8, 69u8, 0u8, 84u8, + 90u8, 0u8, 89u8, 84u8, 0u8, 67u8, 68u8, 0u8, 68u8, 69u8, 0u8, 73u8, 78u8, + 0u8, 73u8, 68u8, 0u8, 66u8, 68u8, 0u8, 73u8, 81u8, 0u8, 80u8, 76u8, 0u8, + 73u8, 78u8, 0u8, 78u8, 80u8, 0u8, 80u8, 72u8, 0u8, 73u8, 78u8, 0u8, 67u8, + 78u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 77u8, 89u8, 0u8, 73u8, 78u8, + 0u8, 83u8, 76u8, 0u8, 85u8, 71u8, 0u8, 84u8, 76u8, 0u8, 84u8, 74u8, 0u8, + 84u8, 72u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 78u8, 80u8, 0u8, 69u8, + 84u8, 0u8, 69u8, 82u8, 0u8, 78u8, 71u8, 0u8, 84u8, 77u8, 0u8, 84u8, 75u8, + 0u8, 65u8, 90u8, 0u8, 78u8, 80u8, 0u8, 80u8, 72u8, 0u8, 65u8, 90u8, 0u8, + 78u8, 69u8, 0u8, 90u8, 65u8, 0u8, 84u8, 79u8, 0u8, 77u8, 87u8, 0u8, 48u8, + 48u8, 49u8, 80u8, 71u8, 0u8, 84u8, 82u8, 0u8, 84u8, 82u8, 0u8, 84u8, 87u8, + 0u8, 80u8, 75u8, 0u8, 90u8, 65u8, 0u8, 71u8, 82u8, 0u8, 78u8, 80u8, 0u8, + 80u8, 72u8, 0u8, 66u8, 84u8, 0u8, 82u8, 85u8, 0u8, 85u8, 71u8, 0u8, 84u8, + 72u8, 0u8, 65u8, 90u8, 0u8, 77u8, 87u8, 0u8, 84u8, 86u8, 0u8, 78u8, 69u8, + 0u8, 67u8, 78u8, 0u8, 73u8, 78u8, 0u8, 80u8, 70u8, 0u8, 82u8, 85u8, 0u8, + 77u8, 65u8, 0u8, 82u8, 85u8, 0u8, 82u8, 85u8, 0u8, 67u8, 78u8, 0u8, 83u8, + 89u8, 0u8, 85u8, 65u8, 0u8, 70u8, 77u8, 0u8, 65u8, 79u8, 0u8, 73u8, 78u8, + 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 85u8, 90u8, 0u8, 76u8, 82u8, 0u8, + 90u8, 65u8, 0u8, 73u8, 84u8, 0u8, 82u8, 85u8, 0u8, 86u8, 78u8, 0u8, 83u8, + 88u8, 0u8, 66u8, 69u8, 0u8, 68u8, 69u8, 0u8, 77u8, 90u8, 0u8, 48u8, 48u8, + 49u8, 82u8, 85u8, 0u8, 69u8, 69u8, 0u8, 84u8, 90u8, 0u8, 66u8, 69u8, 0u8, + 67u8, 72u8, 0u8, 69u8, 84u8, 0u8, 80u8, 72u8, 0u8, 65u8, 85u8, 0u8, 73u8, + 78u8, 0u8, 73u8, 78u8, 0u8, 87u8, 70u8, 0u8, 75u8, 77u8, 0u8, 83u8, 78u8, + 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 67u8, 78u8, 0u8, 66u8, 82u8, 0u8, + 85u8, 90u8, 0u8, 84u8, 82u8, 0u8, 90u8, 65u8, 0u8, 84u8, 82u8, 0u8, 84u8, + 82u8, 0u8, 71u8, 69u8, 0u8, 67u8, 78u8, 0u8, 83u8, 68u8, 0u8, 83u8, 65u8, + 0u8, 73u8, 78u8, 0u8, 85u8, 71u8, 0u8, 73u8, 82u8, 0u8, 89u8, 69u8, 0u8, + 78u8, 80u8, 0u8, 77u8, 90u8, 0u8, 70u8, 77u8, 0u8, 67u8, 77u8, 0u8, 67u8, + 77u8, 0u8, 48u8, 48u8, 49u8, 78u8, 71u8, 0u8, 66u8, 82u8, 0u8, 77u8, 88u8, + 0u8, 72u8, 75u8, 0u8, 67u8, 78u8, 0u8, 83u8, 68u8, 0u8, 75u8, 77u8, 0u8, + 78u8, 76u8, 0u8, 77u8, 65u8, 0u8, 67u8, 78u8, 0u8, 67u8, 78u8, 0u8, 67u8, + 78u8, 0u8, 84u8, 71u8, 0u8, 77u8, 89u8, 0u8, 90u8, 65u8, 0u8, 84u8, 82u8, + 0u8, + ]) + }, + ) + }, + ls2r: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap2d::from_parts_unchecked( + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 97u8, 114u8, 99u8, 97u8, 122u8, 0u8, 99u8, 117u8, 0u8, 101u8, 110u8, 0u8, + 102u8, 102u8, 0u8, 103u8, 114u8, 99u8, 107u8, 107u8, 0u8, 107u8, 117u8, + 0u8, 107u8, 121u8, 0u8, 108u8, 105u8, 102u8, 109u8, 97u8, 110u8, 109u8, + 110u8, 0u8, 112u8, 97u8, 0u8, 112u8, 97u8, 108u8, 115u8, 100u8, 0u8, 116u8, + 103u8, 0u8, 117u8, 103u8, 0u8, 117u8, 110u8, 114u8, 117u8, 122u8, 0u8, + 121u8, 117u8, 101u8, 122u8, 104u8, 0u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 2u8, 0u8, 0u8, 0u8, 3u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 5u8, 0u8, 0u8, + 0u8, 6u8, 0u8, 0u8, 0u8, 7u8, 0u8, 0u8, 0u8, 8u8, 0u8, 0u8, 0u8, 10u8, 0u8, + 0u8, 0u8, 12u8, 0u8, 0u8, 0u8, 13u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, + 15u8, 0u8, 0u8, 0u8, 16u8, 0u8, 0u8, 0u8, 17u8, 0u8, 0u8, 0u8, 20u8, 0u8, + 0u8, 0u8, 21u8, 0u8, 0u8, 0u8, 22u8, 0u8, 0u8, 0u8, 23u8, 0u8, 0u8, 0u8, + 24u8, 0u8, 0u8, 0u8, 25u8, 0u8, 0u8, 0u8, 28u8, 0u8, 0u8, 0u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 78u8, 98u8, 97u8, 116u8, 80u8, 97u8, 108u8, 109u8, 65u8, 114u8, 97u8, 98u8, + 71u8, 108u8, 97u8, 103u8, 83u8, 104u8, 97u8, 119u8, 65u8, 100u8, 108u8, + 109u8, 76u8, 105u8, 110u8, 98u8, 65u8, 114u8, 97u8, 98u8, 65u8, 114u8, + 97u8, 98u8, 89u8, 101u8, 122u8, 105u8, 65u8, 114u8, 97u8, 98u8, 76u8, 97u8, + 116u8, 110u8, 76u8, 105u8, 109u8, 98u8, 78u8, 107u8, 111u8, 111u8, 77u8, + 111u8, 110u8, 103u8, 65u8, 114u8, 97u8, 98u8, 80u8, 104u8, 108u8, 112u8, + 68u8, 101u8, 118u8, 97u8, 75u8, 104u8, 111u8, 106u8, 83u8, 105u8, 110u8, + 100u8, 65u8, 114u8, 97u8, 98u8, 67u8, 121u8, 114u8, 108u8, 68u8, 101u8, + 118u8, 97u8, 65u8, 114u8, 97u8, 98u8, 72u8, 97u8, 110u8, 115u8, 66u8, + 111u8, 112u8, 111u8, 72u8, 97u8, 110u8, 98u8, 72u8, 97u8, 110u8, 116u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 74u8, 79u8, 0u8, 83u8, 89u8, 0u8, 73u8, 82u8, 0u8, 66u8, 71u8, 0u8, 71u8, + 66u8, 0u8, 71u8, 78u8, 0u8, 71u8, 82u8, 0u8, 67u8, 78u8, 0u8, 73u8, 81u8, + 0u8, 71u8, 69u8, 0u8, 67u8, 78u8, 0u8, 84u8, 82u8, 0u8, 73u8, 78u8, 0u8, + 71u8, 78u8, 0u8, 67u8, 78u8, 0u8, 80u8, 75u8, 0u8, 67u8, 78u8, 0u8, 73u8, + 78u8, 0u8, 73u8, 78u8, 0u8, 73u8, 78u8, 0u8, 80u8, 75u8, 0u8, 75u8, 90u8, + 0u8, 78u8, 80u8, 0u8, 65u8, 70u8, 0u8, 67u8, 78u8, 0u8, 84u8, 87u8, 0u8, + 84u8, 87u8, 0u8, 84u8, 87u8, 0u8, + ]) + }, + ) + }, +} diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs deleted file mode 100644 index f07b4b80649ef..0000000000000 --- a/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1.rs +++ /dev/null @@ -1,207 +0,0 @@ -// @generated -type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackParentsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; -pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> = - litemap::LiteMap::from_sorted_store_unchecked(&[("und", UND)]); -static UND: &DataStruct = &::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1 { - parents: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - ::zerovec::VarZeroVec::from_bytes_unchecked(&[ - 131u8, 0u8, 0u8, 0u8, 0u8, 0u8, 6u8, 0u8, 11u8, 0u8, 16u8, 0u8, 21u8, 0u8, - 26u8, 0u8, 31u8, 0u8, 36u8, 0u8, 41u8, 0u8, 46u8, 0u8, 51u8, 0u8, 56u8, 0u8, - 61u8, 0u8, 66u8, 0u8, 71u8, 0u8, 76u8, 0u8, 81u8, 0u8, 86u8, 0u8, 91u8, 0u8, - 96u8, 0u8, 101u8, 0u8, 106u8, 0u8, 111u8, 0u8, 116u8, 0u8, 121u8, 0u8, 126u8, - 0u8, 131u8, 0u8, 136u8, 0u8, 141u8, 0u8, 146u8, 0u8, 151u8, 0u8, 156u8, 0u8, - 161u8, 0u8, 166u8, 0u8, 171u8, 0u8, 176u8, 0u8, 181u8, 0u8, 186u8, 0u8, 191u8, - 0u8, 196u8, 0u8, 201u8, 0u8, 206u8, 0u8, 211u8, 0u8, 216u8, 0u8, 221u8, 0u8, - 226u8, 0u8, 231u8, 0u8, 236u8, 0u8, 241u8, 0u8, 246u8, 0u8, 251u8, 0u8, 0u8, - 1u8, 5u8, 1u8, 10u8, 1u8, 15u8, 1u8, 20u8, 1u8, 25u8, 1u8, 30u8, 1u8, 35u8, - 1u8, 40u8, 1u8, 45u8, 1u8, 50u8, 1u8, 55u8, 1u8, 60u8, 1u8, 65u8, 1u8, 70u8, - 1u8, 75u8, 1u8, 80u8, 1u8, 85u8, 1u8, 90u8, 1u8, 95u8, 1u8, 100u8, 1u8, 105u8, - 1u8, 110u8, 1u8, 115u8, 1u8, 120u8, 1u8, 125u8, 1u8, 130u8, 1u8, 135u8, 1u8, - 140u8, 1u8, 145u8, 1u8, 150u8, 1u8, 155u8, 1u8, 160u8, 1u8, 165u8, 1u8, 170u8, - 1u8, 175u8, 1u8, 180u8, 1u8, 185u8, 1u8, 190u8, 1u8, 195u8, 1u8, 200u8, 1u8, - 205u8, 1u8, 210u8, 1u8, 215u8, 1u8, 220u8, 1u8, 225u8, 1u8, 230u8, 1u8, 235u8, - 1u8, 240u8, 1u8, 245u8, 1u8, 250u8, 1u8, 255u8, 1u8, 4u8, 2u8, 9u8, 2u8, 14u8, - 2u8, 19u8, 2u8, 24u8, 2u8, 29u8, 2u8, 34u8, 2u8, 39u8, 2u8, 44u8, 2u8, 49u8, - 2u8, 54u8, 2u8, 59u8, 2u8, 64u8, 2u8, 71u8, 2u8, 73u8, 2u8, 75u8, 2u8, 77u8, - 2u8, 82u8, 2u8, 87u8, 2u8, 92u8, 2u8, 97u8, 2u8, 102u8, 2u8, 107u8, 2u8, 112u8, - 2u8, 117u8, 2u8, 122u8, 2u8, 127u8, 2u8, 132u8, 2u8, 101u8, 110u8, 45u8, 49u8, - 53u8, 48u8, 101u8, 110u8, 45u8, 65u8, 71u8, 101u8, 110u8, 45u8, 65u8, 73u8, - 101u8, 110u8, 45u8, 65u8, 84u8, 101u8, 110u8, 45u8, 65u8, 85u8, 101u8, 110u8, - 45u8, 66u8, 66u8, 101u8, 110u8, 45u8, 66u8, 69u8, 101u8, 110u8, 45u8, 66u8, - 77u8, 101u8, 110u8, 45u8, 66u8, 83u8, 101u8, 110u8, 45u8, 66u8, 87u8, 101u8, - 110u8, 45u8, 66u8, 90u8, 101u8, 110u8, 45u8, 67u8, 67u8, 101u8, 110u8, 45u8, - 67u8, 72u8, 101u8, 110u8, 45u8, 67u8, 75u8, 101u8, 110u8, 45u8, 67u8, 77u8, - 101u8, 110u8, 45u8, 67u8, 88u8, 101u8, 110u8, 45u8, 67u8, 89u8, 101u8, 110u8, - 45u8, 68u8, 69u8, 101u8, 110u8, 45u8, 68u8, 71u8, 101u8, 110u8, 45u8, 68u8, - 75u8, 101u8, 110u8, 45u8, 68u8, 77u8, 101u8, 110u8, 45u8, 69u8, 82u8, 101u8, - 110u8, 45u8, 70u8, 73u8, 101u8, 110u8, 45u8, 70u8, 74u8, 101u8, 110u8, 45u8, - 70u8, 75u8, 101u8, 110u8, 45u8, 70u8, 77u8, 101u8, 110u8, 45u8, 71u8, 66u8, - 101u8, 110u8, 45u8, 71u8, 68u8, 101u8, 110u8, 45u8, 71u8, 71u8, 101u8, 110u8, - 45u8, 71u8, 72u8, 101u8, 110u8, 45u8, 71u8, 73u8, 101u8, 110u8, 45u8, 71u8, - 77u8, 101u8, 110u8, 45u8, 71u8, 89u8, 101u8, 110u8, 45u8, 72u8, 75u8, 101u8, - 110u8, 45u8, 73u8, 69u8, 101u8, 110u8, 45u8, 73u8, 76u8, 101u8, 110u8, 45u8, - 73u8, 77u8, 101u8, 110u8, 45u8, 73u8, 78u8, 101u8, 110u8, 45u8, 73u8, 79u8, - 101u8, 110u8, 45u8, 74u8, 69u8, 101u8, 110u8, 45u8, 74u8, 77u8, 101u8, 110u8, - 45u8, 75u8, 69u8, 101u8, 110u8, 45u8, 75u8, 73u8, 101u8, 110u8, 45u8, 75u8, - 78u8, 101u8, 110u8, 45u8, 75u8, 89u8, 101u8, 110u8, 45u8, 76u8, 67u8, 101u8, - 110u8, 45u8, 76u8, 82u8, 101u8, 110u8, 45u8, 76u8, 83u8, 101u8, 110u8, 45u8, - 77u8, 71u8, 101u8, 110u8, 45u8, 77u8, 79u8, 101u8, 110u8, 45u8, 77u8, 83u8, - 101u8, 110u8, 45u8, 77u8, 84u8, 101u8, 110u8, 45u8, 77u8, 85u8, 101u8, 110u8, - 45u8, 77u8, 86u8, 101u8, 110u8, 45u8, 77u8, 87u8, 101u8, 110u8, 45u8, 77u8, - 89u8, 101u8, 110u8, 45u8, 78u8, 65u8, 101u8, 110u8, 45u8, 78u8, 70u8, 101u8, - 110u8, 45u8, 78u8, 71u8, 101u8, 110u8, 45u8, 78u8, 76u8, 101u8, 110u8, 45u8, - 78u8, 82u8, 101u8, 110u8, 45u8, 78u8, 85u8, 101u8, 110u8, 45u8, 78u8, 90u8, - 101u8, 110u8, 45u8, 80u8, 71u8, 101u8, 110u8, 45u8, 80u8, 75u8, 101u8, 110u8, - 45u8, 80u8, 78u8, 101u8, 110u8, 45u8, 80u8, 87u8, 101u8, 110u8, 45u8, 82u8, - 87u8, 101u8, 110u8, 45u8, 83u8, 66u8, 101u8, 110u8, 45u8, 83u8, 67u8, 101u8, - 110u8, 45u8, 83u8, 68u8, 101u8, 110u8, 45u8, 83u8, 69u8, 101u8, 110u8, 45u8, - 83u8, 71u8, 101u8, 110u8, 45u8, 83u8, 72u8, 101u8, 110u8, 45u8, 83u8, 73u8, - 101u8, 110u8, 45u8, 83u8, 76u8, 101u8, 110u8, 45u8, 83u8, 83u8, 101u8, 110u8, - 45u8, 83u8, 88u8, 101u8, 110u8, 45u8, 83u8, 90u8, 101u8, 110u8, 45u8, 84u8, - 67u8, 101u8, 110u8, 45u8, 84u8, 75u8, 101u8, 110u8, 45u8, 84u8, 79u8, 101u8, - 110u8, 45u8, 84u8, 84u8, 101u8, 110u8, 45u8, 84u8, 86u8, 101u8, 110u8, 45u8, - 84u8, 90u8, 101u8, 110u8, 45u8, 85u8, 71u8, 101u8, 110u8, 45u8, 86u8, 67u8, - 101u8, 110u8, 45u8, 86u8, 71u8, 101u8, 110u8, 45u8, 86u8, 85u8, 101u8, 110u8, - 45u8, 87u8, 83u8, 101u8, 110u8, 45u8, 90u8, 65u8, 101u8, 110u8, 45u8, 90u8, - 77u8, 101u8, 110u8, 45u8, 90u8, 87u8, 101u8, 115u8, 45u8, 65u8, 82u8, 101u8, - 115u8, 45u8, 66u8, 79u8, 101u8, 115u8, 45u8, 66u8, 82u8, 101u8, 115u8, 45u8, - 66u8, 90u8, 101u8, 115u8, 45u8, 67u8, 76u8, 101u8, 115u8, 45u8, 67u8, 79u8, - 101u8, 115u8, 45u8, 67u8, 82u8, 101u8, 115u8, 45u8, 67u8, 85u8, 101u8, 115u8, - 45u8, 68u8, 79u8, 101u8, 115u8, 45u8, 69u8, 67u8, 101u8, 115u8, 45u8, 71u8, - 84u8, 101u8, 115u8, 45u8, 72u8, 78u8, 101u8, 115u8, 45u8, 77u8, 88u8, 101u8, - 115u8, 45u8, 78u8, 73u8, 101u8, 115u8, 45u8, 80u8, 65u8, 101u8, 115u8, 45u8, - 80u8, 69u8, 101u8, 115u8, 45u8, 80u8, 82u8, 101u8, 115u8, 45u8, 80u8, 89u8, - 101u8, 115u8, 45u8, 83u8, 86u8, 101u8, 115u8, 45u8, 85u8, 83u8, 101u8, 115u8, - 45u8, 85u8, 89u8, 101u8, 115u8, 45u8, 86u8, 69u8, 104u8, 105u8, 45u8, 76u8, - 97u8, 116u8, 110u8, 104u8, 116u8, 110u8, 98u8, 110u8, 110u8, 112u8, 116u8, - 45u8, 65u8, 79u8, 112u8, 116u8, 45u8, 67u8, 72u8, 112u8, 116u8, 45u8, 67u8, - 86u8, 112u8, 116u8, 45u8, 70u8, 82u8, 112u8, 116u8, 45u8, 71u8, 81u8, 112u8, - 116u8, 45u8, 71u8, 87u8, 112u8, 116u8, 45u8, 76u8, 85u8, 112u8, 116u8, 45u8, - 77u8, 79u8, 112u8, 116u8, 45u8, 77u8, 90u8, 112u8, 116u8, 45u8, 83u8, 84u8, - 112u8, 116u8, 45u8, 84u8, 76u8, 122u8, 104u8, 45u8, 72u8, 97u8, 110u8, 116u8, - 45u8, 77u8, 79u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, - 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, - 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, - 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 115u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, - 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, - 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, - 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, - 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, - 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, - 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, - 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 110u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 73u8, 78u8, 0u8, 102u8, 114u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 1u8, 72u8, 84u8, 0u8, 110u8, 111u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 110u8, 111u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, - 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, - 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 122u8, 104u8, 0u8, 1u8, 72u8, 97u8, 110u8, - 116u8, 1u8, 72u8, 75u8, 0u8, - ]) - }, - ) - }, -}; diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs new file mode 100644 index 0000000000000..ce04af868aa54 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/mod.rs @@ -0,0 +1,6 @@ +// @generated +type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: LocaleFallbackParentsV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; +pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { + locale.is_empty().then(|| &UND) +} +static UND: DataStruct = include!("und.rs.data"); diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data new file mode 100644 index 0000000000000..5ead959083cc8 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/fallback/parents_v1/und.rs.data @@ -0,0 +1,216 @@ +::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1 { + parents: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap::from_parts_unchecked( + unsafe { + ::zerovec::VarZeroVec::from_bytes_unchecked(&[ + 131u8, 0u8, 0u8, 0u8, 0u8, 0u8, 6u8, 0u8, 11u8, 0u8, 16u8, 0u8, 21u8, 0u8, + 26u8, 0u8, 31u8, 0u8, 36u8, 0u8, 41u8, 0u8, 46u8, 0u8, 51u8, 0u8, 56u8, + 0u8, 61u8, 0u8, 66u8, 0u8, 71u8, 0u8, 76u8, 0u8, 81u8, 0u8, 86u8, 0u8, + 91u8, 0u8, 96u8, 0u8, 101u8, 0u8, 106u8, 0u8, 111u8, 0u8, 116u8, 0u8, + 121u8, 0u8, 126u8, 0u8, 131u8, 0u8, 136u8, 0u8, 141u8, 0u8, 146u8, 0u8, + 151u8, 0u8, 156u8, 0u8, 161u8, 0u8, 166u8, 0u8, 171u8, 0u8, 176u8, 0u8, + 181u8, 0u8, 186u8, 0u8, 191u8, 0u8, 196u8, 0u8, 201u8, 0u8, 206u8, 0u8, + 211u8, 0u8, 216u8, 0u8, 221u8, 0u8, 226u8, 0u8, 231u8, 0u8, 236u8, 0u8, + 241u8, 0u8, 246u8, 0u8, 251u8, 0u8, 0u8, 1u8, 5u8, 1u8, 10u8, 1u8, 15u8, + 1u8, 20u8, 1u8, 25u8, 1u8, 30u8, 1u8, 35u8, 1u8, 40u8, 1u8, 45u8, 1u8, + 50u8, 1u8, 55u8, 1u8, 60u8, 1u8, 65u8, 1u8, 70u8, 1u8, 75u8, 1u8, 80u8, + 1u8, 85u8, 1u8, 90u8, 1u8, 95u8, 1u8, 100u8, 1u8, 105u8, 1u8, 110u8, 1u8, + 115u8, 1u8, 120u8, 1u8, 125u8, 1u8, 130u8, 1u8, 135u8, 1u8, 140u8, 1u8, + 145u8, 1u8, 150u8, 1u8, 155u8, 1u8, 160u8, 1u8, 165u8, 1u8, 170u8, 1u8, + 175u8, 1u8, 180u8, 1u8, 185u8, 1u8, 190u8, 1u8, 195u8, 1u8, 200u8, 1u8, + 205u8, 1u8, 210u8, 1u8, 215u8, 1u8, 220u8, 1u8, 225u8, 1u8, 230u8, 1u8, + 235u8, 1u8, 240u8, 1u8, 245u8, 1u8, 250u8, 1u8, 255u8, 1u8, 4u8, 2u8, 9u8, + 2u8, 14u8, 2u8, 19u8, 2u8, 24u8, 2u8, 29u8, 2u8, 34u8, 2u8, 39u8, 2u8, + 44u8, 2u8, 49u8, 2u8, 54u8, 2u8, 59u8, 2u8, 64u8, 2u8, 71u8, 2u8, 73u8, + 2u8, 75u8, 2u8, 77u8, 2u8, 82u8, 2u8, 87u8, 2u8, 92u8, 2u8, 97u8, 2u8, + 102u8, 2u8, 107u8, 2u8, 112u8, 2u8, 117u8, 2u8, 122u8, 2u8, 127u8, 2u8, + 132u8, 2u8, 101u8, 110u8, 45u8, 49u8, 53u8, 48u8, 101u8, 110u8, 45u8, 65u8, + 71u8, 101u8, 110u8, 45u8, 65u8, 73u8, 101u8, 110u8, 45u8, 65u8, 84u8, + 101u8, 110u8, 45u8, 65u8, 85u8, 101u8, 110u8, 45u8, 66u8, 66u8, 101u8, + 110u8, 45u8, 66u8, 69u8, 101u8, 110u8, 45u8, 66u8, 77u8, 101u8, 110u8, + 45u8, 66u8, 83u8, 101u8, 110u8, 45u8, 66u8, 87u8, 101u8, 110u8, 45u8, 66u8, + 90u8, 101u8, 110u8, 45u8, 67u8, 67u8, 101u8, 110u8, 45u8, 67u8, 72u8, + 101u8, 110u8, 45u8, 67u8, 75u8, 101u8, 110u8, 45u8, 67u8, 77u8, 101u8, + 110u8, 45u8, 67u8, 88u8, 101u8, 110u8, 45u8, 67u8, 89u8, 101u8, 110u8, + 45u8, 68u8, 69u8, 101u8, 110u8, 45u8, 68u8, 71u8, 101u8, 110u8, 45u8, 68u8, + 75u8, 101u8, 110u8, 45u8, 68u8, 77u8, 101u8, 110u8, 45u8, 69u8, 82u8, + 101u8, 110u8, 45u8, 70u8, 73u8, 101u8, 110u8, 45u8, 70u8, 74u8, 101u8, + 110u8, 45u8, 70u8, 75u8, 101u8, 110u8, 45u8, 70u8, 77u8, 101u8, 110u8, + 45u8, 71u8, 66u8, 101u8, 110u8, 45u8, 71u8, 68u8, 101u8, 110u8, 45u8, 71u8, + 71u8, 101u8, 110u8, 45u8, 71u8, 72u8, 101u8, 110u8, 45u8, 71u8, 73u8, + 101u8, 110u8, 45u8, 71u8, 77u8, 101u8, 110u8, 45u8, 71u8, 89u8, 101u8, + 110u8, 45u8, 72u8, 75u8, 101u8, 110u8, 45u8, 73u8, 69u8, 101u8, 110u8, + 45u8, 73u8, 76u8, 101u8, 110u8, 45u8, 73u8, 77u8, 101u8, 110u8, 45u8, 73u8, + 78u8, 101u8, 110u8, 45u8, 73u8, 79u8, 101u8, 110u8, 45u8, 74u8, 69u8, + 101u8, 110u8, 45u8, 74u8, 77u8, 101u8, 110u8, 45u8, 75u8, 69u8, 101u8, + 110u8, 45u8, 75u8, 73u8, 101u8, 110u8, 45u8, 75u8, 78u8, 101u8, 110u8, + 45u8, 75u8, 89u8, 101u8, 110u8, 45u8, 76u8, 67u8, 101u8, 110u8, 45u8, 76u8, + 82u8, 101u8, 110u8, 45u8, 76u8, 83u8, 101u8, 110u8, 45u8, 77u8, 71u8, + 101u8, 110u8, 45u8, 77u8, 79u8, 101u8, 110u8, 45u8, 77u8, 83u8, 101u8, + 110u8, 45u8, 77u8, 84u8, 101u8, 110u8, 45u8, 77u8, 85u8, 101u8, 110u8, + 45u8, 77u8, 86u8, 101u8, 110u8, 45u8, 77u8, 87u8, 101u8, 110u8, 45u8, 77u8, + 89u8, 101u8, 110u8, 45u8, 78u8, 65u8, 101u8, 110u8, 45u8, 78u8, 70u8, + 101u8, 110u8, 45u8, 78u8, 71u8, 101u8, 110u8, 45u8, 78u8, 76u8, 101u8, + 110u8, 45u8, 78u8, 82u8, 101u8, 110u8, 45u8, 78u8, 85u8, 101u8, 110u8, + 45u8, 78u8, 90u8, 101u8, 110u8, 45u8, 80u8, 71u8, 101u8, 110u8, 45u8, 80u8, + 75u8, 101u8, 110u8, 45u8, 80u8, 78u8, 101u8, 110u8, 45u8, 80u8, 87u8, + 101u8, 110u8, 45u8, 82u8, 87u8, 101u8, 110u8, 45u8, 83u8, 66u8, 101u8, + 110u8, 45u8, 83u8, 67u8, 101u8, 110u8, 45u8, 83u8, 68u8, 101u8, 110u8, + 45u8, 83u8, 69u8, 101u8, 110u8, 45u8, 83u8, 71u8, 101u8, 110u8, 45u8, 83u8, + 72u8, 101u8, 110u8, 45u8, 83u8, 73u8, 101u8, 110u8, 45u8, 83u8, 76u8, + 101u8, 110u8, 45u8, 83u8, 83u8, 101u8, 110u8, 45u8, 83u8, 88u8, 101u8, + 110u8, 45u8, 83u8, 90u8, 101u8, 110u8, 45u8, 84u8, 67u8, 101u8, 110u8, + 45u8, 84u8, 75u8, 101u8, 110u8, 45u8, 84u8, 79u8, 101u8, 110u8, 45u8, 84u8, + 84u8, 101u8, 110u8, 45u8, 84u8, 86u8, 101u8, 110u8, 45u8, 84u8, 90u8, + 101u8, 110u8, 45u8, 85u8, 71u8, 101u8, 110u8, 45u8, 86u8, 67u8, 101u8, + 110u8, 45u8, 86u8, 71u8, 101u8, 110u8, 45u8, 86u8, 85u8, 101u8, 110u8, + 45u8, 87u8, 83u8, 101u8, 110u8, 45u8, 90u8, 65u8, 101u8, 110u8, 45u8, 90u8, + 77u8, 101u8, 110u8, 45u8, 90u8, 87u8, 101u8, 115u8, 45u8, 65u8, 82u8, + 101u8, 115u8, 45u8, 66u8, 79u8, 101u8, 115u8, 45u8, 66u8, 82u8, 101u8, + 115u8, 45u8, 66u8, 90u8, 101u8, 115u8, 45u8, 67u8, 76u8, 101u8, 115u8, + 45u8, 67u8, 79u8, 101u8, 115u8, 45u8, 67u8, 82u8, 101u8, 115u8, 45u8, 67u8, + 85u8, 101u8, 115u8, 45u8, 68u8, 79u8, 101u8, 115u8, 45u8, 69u8, 67u8, + 101u8, 115u8, 45u8, 71u8, 84u8, 101u8, 115u8, 45u8, 72u8, 78u8, 101u8, + 115u8, 45u8, 77u8, 88u8, 101u8, 115u8, 45u8, 78u8, 73u8, 101u8, 115u8, + 45u8, 80u8, 65u8, 101u8, 115u8, 45u8, 80u8, 69u8, 101u8, 115u8, 45u8, 80u8, + 82u8, 101u8, 115u8, 45u8, 80u8, 89u8, 101u8, 115u8, 45u8, 83u8, 86u8, + 101u8, 115u8, 45u8, 85u8, 83u8, 101u8, 115u8, 45u8, 85u8, 89u8, 101u8, + 115u8, 45u8, 86u8, 69u8, 104u8, 105u8, 45u8, 76u8, 97u8, 116u8, 110u8, + 104u8, 116u8, 110u8, 98u8, 110u8, 110u8, 112u8, 116u8, 45u8, 65u8, 79u8, + 112u8, 116u8, 45u8, 67u8, 72u8, 112u8, 116u8, 45u8, 67u8, 86u8, 112u8, + 116u8, 45u8, 70u8, 82u8, 112u8, 116u8, 45u8, 71u8, 81u8, 112u8, 116u8, + 45u8, 71u8, 87u8, 112u8, 116u8, 45u8, 76u8, 85u8, 112u8, 116u8, 45u8, 77u8, + 79u8, 112u8, 116u8, 45u8, 77u8, 90u8, 112u8, 116u8, 45u8, 83u8, 84u8, + 112u8, 116u8, 45u8, 84u8, 76u8, 122u8, 104u8, 45u8, 72u8, 97u8, 110u8, + 116u8, 45u8, 77u8, 79u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 49u8, 53u8, 48u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 48u8, 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, + 48u8, 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, + 49u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, + 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, + 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 48u8, 48u8, 49u8, 101u8, 115u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, + 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, + 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, + 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, + 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 1u8, 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 52u8, 49u8, 57u8, 101u8, 115u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 52u8, + 49u8, 57u8, 101u8, 110u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 73u8, 78u8, + 0u8, 102u8, 114u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 72u8, 84u8, 0u8, + 110u8, 111u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 110u8, + 111u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 112u8, 116u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, + 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, + 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, + 0u8, 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, + 112u8, 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, + 116u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 112u8, 116u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 1u8, 80u8, 84u8, 0u8, 122u8, 104u8, 0u8, 1u8, 72u8, + 97u8, 110u8, 116u8, 1u8, 72u8, 75u8, 0u8, + ]) + }, + ) + }, +} diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs deleted file mode 100644 index 7df33c12e3d5a..0000000000000 --- a/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1.rs +++ /dev/null @@ -1,41 +0,0 @@ -// @generated -type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: CollationFallbackSupplementV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; -pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> = - litemap::LiteMap::from_sorted_store_unchecked(&[("und", UND)]); -static UND: &DataStruct = - &::icu_provider_adapters::fallback::provider::LocaleFallbackSupplementV1 { - parents: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap::from_parts_unchecked( - unsafe { - ::zerovec::VarZeroVec::from_bytes_unchecked(&[ - 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 121u8, 117u8, 101u8, - ]) - }, - unsafe { - ::zerovec::ZeroVec::from_bytes_unchecked(&[ - 122u8, 104u8, 0u8, 1u8, 72u8, 97u8, 110u8, 116u8, 0u8, 0u8, 0u8, 0u8, - ]) - }, - ) - }, - unicode_extension_defaults: unsafe { - #[allow(unused_unsafe)] - ::zerovec::ZeroMap2d::from_parts_unchecked( - unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(&[99u8, 111u8]) }, - unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(&[2u8, 0u8, 0u8, 0u8]) }, - unsafe { - ::zerovec::VarZeroVec::from_bytes_unchecked(&[ - 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 122u8, 104u8, 122u8, 104u8, 45u8, - 72u8, 97u8, 110u8, 116u8, - ]) - }, - unsafe { - ::zerovec::VarZeroVec::from_bytes_unchecked(&[ - 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 6u8, 0u8, 112u8, 105u8, 110u8, 121u8, 105u8, - 110u8, 115u8, 116u8, 114u8, 111u8, 107u8, 101u8, - ]) - }, - ) - }, - }; diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs new file mode 100644 index 0000000000000..9023647138b1b --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/mod.rs @@ -0,0 +1,6 @@ +// @generated +type DataStruct = < :: icu_provider_adapters :: fallback :: provider :: CollationFallbackSupplementV1Marker as :: icu_provider :: DataMarker > :: Yokeable ; +pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { + locale.is_empty().then(|| &UND) +} +static UND: DataStruct = include!("und.rs.data"); diff --git a/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data new file mode 100644 index 0000000000000..7d70e78c32750 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/fallback/supplement/co_v1/und.rs.data @@ -0,0 +1,36 @@ +::icu_provider_adapters::fallback::provider::LocaleFallbackSupplementV1 { + parents: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap::from_parts_unchecked( + unsafe { + ::zerovec::VarZeroVec::from_bytes_unchecked(&[ + 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 121u8, 117u8, 101u8, + ]) + }, + unsafe { + ::zerovec::ZeroVec::from_bytes_unchecked(&[ + 122u8, 104u8, 0u8, 1u8, 72u8, 97u8, 110u8, 116u8, 0u8, 0u8, 0u8, 0u8, + ]) + }, + ) + }, + unicode_extension_defaults: unsafe { + #[allow(unused_unsafe)] + ::zerovec::ZeroMap2d::from_parts_unchecked( + unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(&[99u8, 111u8]) }, + unsafe { ::zerovec::ZeroVec::from_bytes_unchecked(&[2u8, 0u8, 0u8, 0u8]) }, + unsafe { + ::zerovec::VarZeroVec::from_bytes_unchecked(&[ + 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 122u8, 104u8, 122u8, 104u8, 45u8, + 72u8, 97u8, 110u8, 116u8, + ]) + }, + unsafe { + ::zerovec::VarZeroVec::from_bytes_unchecked(&[ + 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 6u8, 0u8, 112u8, 105u8, 110u8, 121u8, 105u8, + 110u8, 115u8, 116u8, 114u8, 111u8, 107u8, 101u8, + ]) + }, + ) + }, +} diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1.rs b/compiler/rustc_baked_icu_data/src/data/list/and_v1.rs deleted file mode 100644 index 9cae549e118d5..0000000000000 --- a/compiler/rustc_baked_icu_data/src/data/list/and_v1.rs +++ /dev/null @@ -1,1161 +0,0 @@ -// @generated -type DataStruct = <::icu_list::provider::AndListV1Marker as ::icu_provider::DataMarker>::Yokeable; -pub static DATA: litemap::LiteMap<&str, &DataStruct, &[(&str, &DataStruct)]> = - litemap::LiteMap::from_sorted_store_unchecked(&[ - ("en", EN), - ("es", ES), - ("fr", FR), - ("it", IT), - ("ja", JA), - ("pt", PT), - ("ru", RU), - ("tr", TR), - ("und", UND), - ("zh", ZH_ZH_HANS), - ("zh-Hans", ZH_ZH_HANS), - ("zh-Hant", ZH_HANT), - ]); -static EN: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", and ", 6u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" and ", 5u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", & ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" & ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, -]); -static ES: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) - }, - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[ - 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8, - 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8, - 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, - 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, - 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, - 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, - 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8, - 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, - 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, - 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, - 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, - 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, - 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, - 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8, - 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, - 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, - 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, - 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, - 0u8, 35u8, 0u8, 0u8, 0u8, - ]) - }, - pattern: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) - }, - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[ - 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8, - 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8, - 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, - 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, - 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, - 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, - 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8, - 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, - 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, - 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, - 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, - 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, - 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, - 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8, - 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, - 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, - 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, - 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, - 0u8, 35u8, 0u8, 0u8, 0u8, - ]) - }, - pattern: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) - }, - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[ - 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8, - 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8, - 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, - 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, - 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, - 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, - 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8, - 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, - 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, - 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, - 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, - 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, - 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, - 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8, - 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, - 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, - 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, - 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, - 0u8, 35u8, 0u8, 0u8, 0u8, - ]) - }, - pattern: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) - }, - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[ - 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8, - 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8, - 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, - 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, - 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, - 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, - 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8, - 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, - 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, - 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, - 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, - 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, - 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, - 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8, - 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, - 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, - 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, - 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, - 0u8, 35u8, 0u8, 0u8, 0u8, - ]) - }, - pattern: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) - }, - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[ - 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8, - 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8, - 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, - 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, - 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, - 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, - 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8, - 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, - 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, - 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, - 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, - 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, - 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, - 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8, - 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, - 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, - 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, - 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, - 0u8, 35u8, 0u8, 0u8, 0u8, - ]) - }, - pattern: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - }), - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) - }, - special_case: Some(::icu_list::provider::SpecialCasePattern { - condition: unsafe { - ::icu_list::provider::StringMatcher::from_dfa_bytes_unchecked(&[ - 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, 120u8, 45u8, - 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, 45u8, 100u8, 102u8, 97u8, - 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, - 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 1u8, 2u8, 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, - 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, 13u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, - 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, - 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, - 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, - 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, - 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, - 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 22u8, - 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, - 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, - 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, - 0u8, 5u8, 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, - 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, - 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, - 4u8, 7u8, 9u8, 9u8, 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, - 23u8, 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, - 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 236u8, - 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, - 25u8, 1u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 68u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 16u8, 0u8, 0u8, - 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, 0u8, - 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, 0u8, 0u8, 0u8, - 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, - 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, - 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, - 0u8, 35u8, 0u8, 0u8, 0u8, - ]) - }, - pattern: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - }), - }, -]); -static FR: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, -]); -static IT: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, -]); -static JA: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, -]); -static PT: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, -]); -static RU: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, -]); -static TR: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, -]); -static UND: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) - }, - special_case: None, - }, -]); -static ZH_HANT: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, -]); -static ZH_ZH_HANS: &DataStruct = &::icu_list::provider::ListFormatterPatternsV1([ - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, - ::icu_list::provider::ConditionalListJoinerPattern { - default: unsafe { - ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) - }, - special_case: None, - }, -]); diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data new file mode 100644 index 0000000000000..cb5cbfa87c239 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/en.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", and ", 6u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" and ", 5u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", & ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" & ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data new file mode 100644 index 0000000000000..51f9109751818 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/es.rs.data @@ -0,0 +1,836 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) + }, + special_case: Some(::icu_list::provider::SpecialCasePattern { + condition: unsafe { + ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + if cfg!(target_endian = "little") { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, + 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, + 0u8, + ] + } else { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 0u8, 0u8, 254u8, 255u8, 0u8, 0u8, 0u8, 2u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 0u8, 0u8, 1u8, + 40u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 4u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, + 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, + ] + }, + ) + }, + pattern: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + }), + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) + }, + special_case: Some(::icu_list::provider::SpecialCasePattern { + condition: unsafe { + ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + if cfg!(target_endian = "little") { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, + 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, + 0u8, + ] + } else { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 0u8, 0u8, 254u8, 255u8, 0u8, 0u8, 0u8, 2u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 0u8, 0u8, 1u8, + 40u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 4u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, + 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, + ] + }, + ) + }, + pattern: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + }), + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) + }, + special_case: Some(::icu_list::provider::SpecialCasePattern { + condition: unsafe { + ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + if cfg!(target_endian = "little") { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, + 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, + 0u8, + ] + } else { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 0u8, 0u8, 254u8, 255u8, 0u8, 0u8, 0u8, 2u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 0u8, 0u8, 1u8, + 40u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 4u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, + 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, + ] + }, + ) + }, + pattern: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + }), + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) + }, + special_case: Some(::icu_list::provider::SpecialCasePattern { + condition: unsafe { + ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + if cfg!(target_endian = "little") { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, + 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, + 0u8, + ] + } else { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 0u8, 0u8, 254u8, 255u8, 0u8, 0u8, 0u8, 2u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 0u8, 0u8, 1u8, + 40u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 4u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, + 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, + ] + }, + ) + }, + pattern: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + }), + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) + }, + special_case: Some(::icu_list::provider::SpecialCasePattern { + condition: unsafe { + ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + if cfg!(target_endian = "little") { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, + 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, + 0u8, + ] + } else { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 0u8, 0u8, 254u8, 255u8, 0u8, 0u8, 0u8, 2u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 0u8, 0u8, 1u8, + 40u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 4u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, + 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, + ] + }, + ) + }, + pattern: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + }), + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" y ", 3u8) + }, + special_case: Some(::icu_list::provider::SpecialCasePattern { + condition: unsafe { + ::icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked( + if cfg!(target_endian = "little") { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 255u8, 254u8, 0u8, 0u8, 2u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 40u8, 1u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 4u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 9u8, + 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, + 0u8, + ] + } else { + &[ + 114u8, 117u8, 115u8, 116u8, 45u8, 114u8, 101u8, 103u8, 101u8, + 120u8, 45u8, 97u8, 117u8, 116u8, 111u8, 109u8, 97u8, 116u8, 97u8, + 45u8, 100u8, 102u8, 97u8, 45u8, 115u8, 112u8, 97u8, 114u8, 115u8, + 101u8, 0u8, 0u8, 0u8, 0u8, 254u8, 255u8, 0u8, 0u8, 0u8, 2u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 2u8, + 2u8, 2u8, 3u8, 4u8, 4u8, 5u8, 6u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, 7u8, + 7u8, 7u8, 7u8, 7u8, 8u8, 9u8, 9u8, 9u8, 10u8, 11u8, 11u8, 12u8, + 13u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, 14u8, + 14u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, + 15u8, 15u8, 15u8, 15u8, 15u8, 15u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, 16u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, + 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 17u8, 18u8, + 18u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, + 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 19u8, 20u8, 21u8, + 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, 21u8, + 22u8, 23u8, 23u8, 24u8, 25u8, 25u8, 25u8, 26u8, 27u8, 27u8, 27u8, + 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 27u8, 0u8, 0u8, 1u8, + 40u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 1u8, 128u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 1u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 5u8, 0u8, 5u8, + 5u8, 6u8, 6u8, 12u8, 12u8, 13u8, 13u8, 0u8, 0u8, 83u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 83u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 0u8, 27u8, 0u8, 0u8, 18u8, 0u8, + 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 0u8, 3u8, 0u8, 6u8, 6u8, 13u8, 13u8, + 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 104u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 14u8, 0u8, 0u8, 0u8, 2u8, 2u8, 4u8, 7u8, 9u8, 9u8, + 11u8, 14u8, 19u8, 19u8, 20u8, 20u8, 21u8, 21u8, 22u8, 22u8, 23u8, + 23u8, 24u8, 24u8, 25u8, 25u8, 26u8, 26u8, 0u8, 0u8, 68u8, 0u8, 0u8, + 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, 68u8, 0u8, 0u8, 0u8, + 68u8, 0u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 206u8, 0u8, 0u8, 0u8, + 221u8, 0u8, 0u8, 0u8, 236u8, 0u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 251u8, 0u8, 0u8, 0u8, 10u8, 1u8, 0u8, 0u8, 25u8, 1u8, 0u8, 0u8, + 18u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 68u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 17u8, 17u8, 0u8, + 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, + 17u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, + 0u8, 15u8, 16u8, 0u8, 0u8, 191u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 2u8, 0u8, 16u8, 17u8, 0u8, 0u8, 221u8, 0u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 17u8, 0u8, 0u8, 221u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 2u8, 0u8, 15u8, 15u8, 0u8, + 0u8, 221u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, + 4u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, + 0u8, 0u8, 9u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, 0u8, 18u8, 0u8, 0u8, + 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 0u8, 35u8, 0u8, 0u8, 0u8, + 35u8, + ] + }, + ) + }, + pattern: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + }), + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data new file mode 100644 index 0000000000000..66ec8f600f48a --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/fr.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" et ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data new file mode 100644 index 0000000000000..cbccf1120d2ce --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/it.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data new file mode 100644 index 0000000000000..9fd168375cbed --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/ja.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs b/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs new file mode 100644 index 0000000000000..e20941f0c6767 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/mod.rs @@ -0,0 +1,22 @@ +// @generated +type DataStruct = <::icu_list::provider::AndListV1Marker as ::icu_provider::DataMarker>::Yokeable; +pub fn lookup(locale: &icu_provider::DataLocale) -> Option<&'static DataStruct> { + static KEYS: [&str; 12usize] = + ["en", "es", "fr", "it", "ja", "pt", "ru", "tr", "und", "zh", "zh-Hans", "zh-Hant"]; + static DATA: [&DataStruct; 12usize] = + [&EN, &ES, &FR, &IT, &JA, &PT, &RU, &TR, &UND, &ZH, &ZH, &ZH_HANT]; + KEYS.binary_search_by(|k| locale.strict_cmp(k.as_bytes()).reverse()) + .ok() + .map(|i| unsafe { *DATA.get_unchecked(i) }) +} +static EN: DataStruct = include!("en.rs.data"); +static ES: DataStruct = include!("es.rs.data"); +static FR: DataStruct = include!("fr.rs.data"); +static IT: DataStruct = include!("it.rs.data"); +static JA: DataStruct = include!("ja.rs.data"); +static PT: DataStruct = include!("pt.rs.data"); +static RU: DataStruct = include!("ru.rs.data"); +static TR: DataStruct = include!("tr.rs.data"); +static UND: DataStruct = include!("und.rs.data"); +static ZH_HANT: DataStruct = include!("zh-Hant.rs.data"); +static ZH: DataStruct = include!("zh.rs.data"); diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data new file mode 100644 index 0000000000000..403975213efa3 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/pt.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" e ", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data new file mode 100644 index 0000000000000..933cb85c8fe4a --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/ru.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" и ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data new file mode 100644 index 0000000000000..286eaf69f3478 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/tr.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(" ve ", 4u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data new file mode 100644 index 0000000000000..2d2c9bcecb1b6 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/und.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked(", ", 2u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data new file mode 100644 index 0000000000000..5d96cc85e8c21 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh-Hant.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data b/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data new file mode 100644 index 0000000000000..4a38374caf4c4 --- /dev/null +++ b/compiler/rustc_baked_icu_data/src/data/list/and_v1/zh.rs.data @@ -0,0 +1,74 @@ +::icu_list::provider::ListFormatterPatternsV1([ + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("和", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, + ::icu_list::provider::ConditionalListJoinerPattern { + default: unsafe { + ::icu_list::provider::ListJoinerPattern::from_parts_unchecked("、", 3u8) + }, + special_case: None, + }, +]) diff --git a/compiler/rustc_baked_icu_data/src/data/mod.rs b/compiler/rustc_baked_icu_data/src/data/mod.rs index a6a71c79cd1ba..ce33339ad9983 100644 --- a/compiler/rustc_baked_icu_data/src/data/mod.rs +++ b/compiler/rustc_baked_icu_data/src/data/mod.rs @@ -1,90 +1,113 @@ // @generated mod fallback; mod list; -/// This data provider was programmatically generated by [`icu_datagen`]( -/// https://unicode-org.github.io/icu4x-docs/doc/icu_datagen/enum.Out.html#variant.Module). -#[non_exhaustive] -pub struct BakedDataProvider; use ::icu_provider::prelude::*; -impl DataProvider<::icu_list::provider::AndListV1Marker> for BakedDataProvider { - fn load( - &self, - req: DataRequest, - ) -> Result, DataError> { - Ok(DataResponse { - metadata: Default::default(), - payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from( - *list::and_v1::DATA - .get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()) +/// Implement [`DataProvider`] on the given struct using the data +/// hardcoded in this module. This allows the struct to be used with +/// `icu`'s `_unstable` constructors. +/// +/// This macro can only be called from its definition-site, i.e. right +/// after `include!`-ing the generated module. +/// +/// ```compile_fail +/// struct MyDataProvider; +/// include!("/path/to/generated/mod.rs"); +/// impl_data_provider(MyDataProvider); +/// ``` +#[allow(unused_macros)] +macro_rules! impl_data_provider { + ($ provider : path) => { + impl DataProvider<::icu_list::provider::AndListV1Marker> for $provider { + fn load(&self, req: DataRequest) -> Result, DataError> { + list::and_v1::lookup(&req.locale) + .map(zerofrom::ZeroFrom::zero_from) + .map(DataPayload::from_owned) + .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) + .ok_or_else(|| DataErrorKind::MissingLocale.with_req(::icu_list::provider::AndListV1Marker::KEY, req)) + } + } + impl DataProvider<::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker> for $provider { + fn load( + &self, + req: DataRequest, + ) -> Result, DataError> { + fallback::supplement::co_v1::lookup(&req.locale) + .map(zerofrom::ZeroFrom::zero_from) + .map(DataPayload::from_owned) + .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) .ok_or_else(|| { DataErrorKind::MissingLocale - .with_req(::icu_list::provider::AndListV1Marker::KEY, req) - })?, - ))), - }) - } -} -impl DataProvider<::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker> - for BakedDataProvider -{ - fn load( - &self, - req: DataRequest, - ) -> Result< - DataResponse< - ::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker, - >, - DataError, - > { - Ok(DataResponse { - metadata: Default::default(), - payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from( - *fallback::supplement::co_v1::DATA.get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).ok_or_else(|| { - DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY, req) - })?, - ))), - }) - } -} -impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker> - for BakedDataProvider -{ - fn load( - &self, - req: DataRequest, - ) -> Result< - DataResponse< - ::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker, - >, - DataError, - > { - Ok(DataResponse { - metadata: Default::default(), - payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from( - *fallback::likelysubtags_v1::DATA.get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).ok_or_else(|| { - DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY, req) - })?, - ))), - }) - } + .with_req(::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY, req) + }) + } + } + impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker> for $provider { + fn load( + &self, + req: DataRequest, + ) -> Result, DataError> { + fallback::likelysubtags_v1::lookup(&req.locale) + .map(zerofrom::ZeroFrom::zero_from) + .map(DataPayload::from_owned) + .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) + .ok_or_else(|| { + DataErrorKind::MissingLocale + .with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY, req) + }) + } + } + impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker> for $provider { + fn load( + &self, + req: DataRequest, + ) -> Result, DataError> { + fallback::parents_v1::lookup(&req.locale) + .map(zerofrom::ZeroFrom::zero_from) + .map(DataPayload::from_owned) + .map(|payload| DataResponse { metadata: Default::default(), payload: Some(payload) }) + .ok_or_else(|| { + DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY, req) + }) + } + } + }; } -impl DataProvider<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker> - for BakedDataProvider -{ - fn load( - &self, - req: DataRequest, - ) -> Result< - DataResponse<::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker>, - DataError, - > { - Ok(DataResponse { - metadata: Default::default(), - payload: Some(DataPayload::from_owned(zerofrom::ZeroFrom::zero_from( - *fallback::parents_v1::DATA.get_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).ok_or_else(|| { - DataErrorKind::MissingLocale.with_req(::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY, req) - })?, - ))), - }) - } +/// Implement [`AnyProvider`] on the given struct using the data +/// hardcoded in this module. This allows the struct to be used with +/// `icu`'s `_any` constructors. +/// +/// This macro can only be called from its definition-site, i.e. right +/// after `include!`-ing the generated module. +/// +/// ```compile_fail +/// struct MyAnyProvider; +/// include!("/path/to/generated/mod.rs"); +/// impl_any_provider(MyAnyProvider); +/// ``` +#[allow(unused_macros)] +macro_rules! impl_any_provider { + ($ provider : path) => { + impl AnyProvider for $provider { + fn load_any(&self, key: DataKey, req: DataRequest) -> Result { + const ANDLISTV1MARKER: ::icu_provider::DataKeyHash = ::icu_list::provider::AndListV1Marker::KEY.hashed(); + const COLLATIONFALLBACKSUPPLEMENTV1MARKER: ::icu_provider::DataKeyHash = + ::icu_provider_adapters::fallback::provider::CollationFallbackSupplementV1Marker::KEY.hashed(); + const LOCALEFALLBACKLIKELYSUBTAGSV1MARKER: ::icu_provider::DataKeyHash = + ::icu_provider_adapters::fallback::provider::LocaleFallbackLikelySubtagsV1Marker::KEY.hashed(); + const LOCALEFALLBACKPARENTSV1MARKER: ::icu_provider::DataKeyHash = + ::icu_provider_adapters::fallback::provider::LocaleFallbackParentsV1Marker::KEY.hashed(); + match key.hashed() { + ANDLISTV1MARKER => list::and_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), + COLLATIONFALLBACKSUPPLEMENTV1MARKER => fallback::supplement::co_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), + LOCALEFALLBACKLIKELYSUBTAGSV1MARKER => fallback::likelysubtags_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), + LOCALEFALLBACKPARENTSV1MARKER => fallback::parents_v1::lookup(&req.locale).map(AnyPayload::from_static_ref), + _ => return Err(DataErrorKind::MissingDataKey.with_req(key, req)), + } + .map(|payload| AnyResponse { payload: Some(payload), metadata: Default::default() }) + .ok_or_else(|| DataErrorKind::MissingLocale.with_req(key, req)) + } + } + }; } +pub struct BakedDataProvider; +impl_data_provider!(BakedDataProvider); diff --git a/compiler/rustc_error_messages/Cargo.toml b/compiler/rustc_error_messages/Cargo.toml index 0c705d2ecf5ff..27783d60be46d 100644 --- a/compiler/rustc_error_messages/Cargo.toml +++ b/compiler/rustc_error_messages/Cargo.toml @@ -16,10 +16,9 @@ rustc_span = { path = "../rustc_span" } rustc_macros = { path = "../rustc_macros" } tracing = "0.1" unic-langid = { version = "0.9.0", features = ["macros"] } -icu_list = "1.0.0" -writeable = "0.5.0" -icu_locid = "1.0.0" -icu_provider_adapters = "1.0.0" +icu_list = "1.1.0" +icu_locid = "1.1.0" +icu_provider_adapters = "1.1.0" [features] rustc_use_parallel_compiler = ['rustc_baked_icu_data/rustc_use_parallel_compiler'] From 5a7342c3dde43c96a71bc27995030896342761f6 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Thu, 2 Feb 2023 20:58:22 -0800 Subject: [PATCH 270/501] Stop using `into_iter` in `array::map` --- library/core/src/array/drain.rs | 51 +++++++++++++++++++++++++++++++++ library/core/src/array/mod.rs | 34 +++++++++++++++------- library/core/tests/array.rs | 25 ++++++++++++++++ tests/codegen/array-map.rs | 48 +++++++++++++++++++++++++++++++ 4 files changed, 147 insertions(+), 11 deletions(-) create mode 100644 library/core/src/array/drain.rs create mode 100644 tests/codegen/array-map.rs diff --git a/library/core/src/array/drain.rs b/library/core/src/array/drain.rs new file mode 100644 index 0000000000000..5ca93d54f876f --- /dev/null +++ b/library/core/src/array/drain.rs @@ -0,0 +1,51 @@ +use crate::iter::TrustedLen; +use crate::mem::ManuallyDrop; +use crate::ptr::drop_in_place; +use crate::slice; + +// INVARIANT: It's ok to drop the remainder of the inner iterator. +pub(crate) struct Drain<'a, T>(slice::IterMut<'a, T>); + +pub(crate) fn drain_array_with( + array: [T; N], + func: impl for<'a> FnOnce(Drain<'a, T>) -> R, +) -> R { + let mut array = ManuallyDrop::new(array); + // SAFETY: Now that the local won't drop it, it's ok to construct the `Drain` which will. + let drain = Drain(array.iter_mut()); + func(drain) +} + +impl Drop for Drain<'_, T> { + fn drop(&mut self) { + // SAFETY: By the type invariant, we're allowed to drop all these. + unsafe { drop_in_place(self.0.as_mut_slice()) } + } +} + +impl Iterator for Drain<'_, T> { + type Item = T; + + #[inline] + fn next(&mut self) -> Option { + let p: *const T = self.0.next()?; + // SAFETY: The iterator was already advanced, so we won't drop this later. + Some(unsafe { p.read() }) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let n = self.len(); + (n, Some(n)) + } +} + +impl ExactSizeIterator for Drain<'_, T> { + #[inline] + fn len(&self) -> usize { + self.0.len() + } +} + +// SAFETY: This is a 1:1 wrapper for a slice iterator, which is also `TrustedLen`. +unsafe impl TrustedLen for Drain<'_, T> {} diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index 2825e0bbb4385..ee340f385431d 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -17,9 +17,12 @@ use crate::ops::{ }; use crate::slice::{Iter, IterMut}; +mod drain; mod equality; mod iter; +pub(crate) use drain::drain_array_with; + #[stable(feature = "array_value_iter", since = "1.51.0")] pub use iter::IntoIter; @@ -513,9 +516,12 @@ impl [T; N] { where F: FnMut(T) -> U, { - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut IntoIterator::into_iter(self).map(f)) } + drain_array_with(self, |iter| { + let mut iter = iter.map(f); + // SAFETY: we know for certain that this iterator will yield exactly `N` + // items. + unsafe { collect_into_array_unchecked(&mut iter) } + }) } /// A fallible function `f` applied to each element on array `self` in order to @@ -552,9 +558,12 @@ impl [T; N] { R: Try, R::Residual: Residual<[R::Output; N]>, { - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { try_collect_into_array_unchecked(&mut IntoIterator::into_iter(self).map(f)) } + drain_array_with(self, |iter| { + let mut iter = iter.map(f); + // SAFETY: we know for certain that this iterator will yield exactly `N` + // items. + unsafe { try_collect_into_array_unchecked(&mut iter) } + }) } /// 'Zips up' two arrays into a single array of pairs. @@ -575,11 +584,14 @@ impl [T; N] { /// ``` #[unstable(feature = "array_zip", issue = "80094")] pub fn zip(self, rhs: [U; N]) -> [(T, U); N] { - let mut iter = IntoIterator::into_iter(self).zip(rhs); - - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut iter) } + drain_array_with(self, |lhs| { + drain_array_with(rhs, |rhs| { + let mut iter = crate::iter::zip(lhs, rhs); + // SAFETY: we know for certain that this iterator will yield exactly `N` + // items. + unsafe { collect_into_array_unchecked(&mut iter) } + }) + }) } /// Returns a slice containing the entire array. Equivalent to `&s[..]`. diff --git a/library/core/tests/array.rs b/library/core/tests/array.rs index f268fe3ae7ba8..5327e4f813925 100644 --- a/library/core/tests/array.rs +++ b/library/core/tests/array.rs @@ -700,3 +700,28 @@ fn array_into_iter_rfold() { let s = it.rfold(10, |a, b| 10 * a + b); assert_eq!(s, 10432); } + +#[cfg(not(panic = "abort"))] +#[test] +fn array_map_drops_unmapped_elements_on_panic() { + struct DropCounter<'a>(usize, &'a AtomicUsize); + impl Drop for DropCounter<'_> { + fn drop(&mut self) { + self.1.fetch_add(1, Ordering::SeqCst); + } + } + + const MAX: usize = 11; + for panic_after in 0..MAX { + let counter = AtomicUsize::new(0); + let a = array::from_fn::<_, 11, _>(|i| DropCounter(i, &counter)); + let success = std::panic::catch_unwind(|| { + let _ = a.map(|x| { + assert!(x.0 < panic_after); + assert_eq!(counter.load(Ordering::SeqCst), x.0); + }); + }); + assert!(success.is_err()); + assert_eq!(counter.load(Ordering::SeqCst), MAX); + } +} diff --git a/tests/codegen/array-map.rs b/tests/codegen/array-map.rs new file mode 100644 index 0000000000000..37585371a3222 --- /dev/null +++ b/tests/codegen/array-map.rs @@ -0,0 +1,48 @@ +// compile-flags: -C opt-level=3 -C target-cpu=x86-64-v3 -C llvm-args=-x86-asm-syntax=intel --emit=llvm-ir,asm +// no-system-llvm +// only-x86_64 +// ignore-debug (the extra assertions get in the way) + +#![crate_type = "lib"] +#![feature(array_zip)] + +// CHECK-LABEL: @short_integer_map +#[no_mangle] +pub fn short_integer_map(x: [u32; 8]) -> [u32; 8] { + // CHECK: load <8 x i32> + // CHECK: shl <8 x i32> + // CHECK: or <8 x i32> + // CHECK: store <8 x i32> + x.map(|x| 2 * x + 1) +} + +// CHECK-LABEL: @short_integer_zip_map +#[no_mangle] +pub fn short_integer_zip_map(x: [u32; 8], y: [u32; 8]) -> [u32; 8] { + // CHECK: %[[A:.+]] = load <8 x i32> + // CHECK: %[[B:.+]] = load <8 x i32> + // CHECK: sub <8 x i32> %[[A]], %[[B]] + // CHECK: store <8 x i32> + x.zip(y).map(|(x, y)| x - y) +} + +// This test is checking that LLVM can SRoA away a bunch of the overhead, +// like fully moving the iterators to registers. Notably, previous implementations +// of `map` ended up `alloca`ing the whole `array::IntoIterator`, meaning both a +// hard-to-eliminate `memcpy` and that the iteration counts needed to be written +// out to stack every iteration, even for infallible operations on `Copy` types. +// +// This is still imperfect, as there's more copies than would be ideal, +// but hopefully work like #103830 will improve that in future, +// and update this test to be stricter. +// +// CHECK-LABEL: @long_integer_map +#[no_mangle] +pub fn long_integer_map(x: [u32; 64]) -> [u32; 64] { + // CHECK: start: + // CHECK-NEXT: alloca [{{64|65}} x i32] + // CHECK-NEXT: alloca [{{64|65}} x i32] + // CHECK-NEXT: alloca %"core::mem::manually_drop::ManuallyDrop<[u32; 64]>" + // CHECK-NOT: alloca + x.map(|x| 2 * x + 1) +} From 52df0558ea349fa65036e61f0a647ea8072ec3f5 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Thu, 2 Feb 2023 22:15:23 -0800 Subject: [PATCH 271/501] Stop forcing `array::map` through an unnecessary `Result` --- library/core/src/array/mod.rs | 126 +++++++++++++++++++--------------- tests/codegen/array-map.rs | 5 +- 2 files changed, 71 insertions(+), 60 deletions(-) diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index ee340f385431d..45ec68e6e7aa3 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -825,14 +825,13 @@ impl [T; N] { /// Pulls `N` items from `iter` and returns them as an array. If the iterator /// yields fewer than `N` items, this function exhibits undefined behavior. /// -/// See [`try_collect_into_array`] for more information. -/// -/// /// # Safety /// /// It is up to the caller to guarantee that `iter` yields at least `N` items. /// Violating this condition causes undefined behavior. -unsafe fn try_collect_into_array_unchecked(iter: &mut I) -> R::TryType +unsafe fn try_collect_into_array_unchecked( + iter: &mut I, +) -> ChangeOutputType where // Note: `TrustedLen` here is somewhat of an experiment. This is just an // internal function, so feel free to remove if this bound turns out to be a @@ -845,11 +844,21 @@ where debug_assert!(N <= iter.size_hint().1.unwrap_or(usize::MAX)); debug_assert!(N <= iter.size_hint().0); - // SAFETY: covered by the function contract. - unsafe { try_collect_into_array(iter).unwrap_unchecked() } + let mut array = MaybeUninit::uninit_array::(); + let cf = try_collect_into_array_erased(iter, &mut array); + match cf { + ControlFlow::Break(r) => FromResidual::from_residual(r), + ControlFlow::Continue(initialized) => { + debug_assert_eq!(initialized, N); + // SAFETY: because of our function contract, all the elements + // must have been initialized. + let output = unsafe { MaybeUninit::array_assume_init(array) }; + Try::from_output(output) + } + } } -// Infallible version of `try_collect_into_array_unchecked`. +/// Infallible version of [`try_collect_into_array_unchecked`]. unsafe fn collect_into_array_unchecked(iter: &mut I) -> [I::Item; N] where I: Iterator + TrustedLen, @@ -864,63 +873,48 @@ where } } -/// Pulls `N` items from `iter` and returns them as an array. If the iterator -/// yields fewer than `N` items, `Err` is returned containing an iterator over -/// the already yielded items. +/// Rather than *returning* the array, this fills in a passed-in buffer. +/// If any of the iterator elements short-circuit, it drops everything in the +/// buffer and return the error. Otherwise it returns the number of items +/// which were initialized in the buffer. /// -/// Since the iterator is passed as a mutable reference and this function calls -/// `next` at most `N` times, the iterator can still be used afterwards to -/// retrieve the remaining items. +/// (The caller is responsible for dropping those items on success, but not +/// doing that is just a leak, not UB, so this function is itself safe.) /// -/// If `iter.next()` panicks, all items already yielded by the iterator are -/// dropped. +/// This means less monomorphization, but more importantly it means that the +/// returned array doesn't need to be copied into the `Result`, since returning +/// the result seemed (2023-01) to cause in an extra `N + 1`-length `alloca` +/// even if it's always `unwrap_unchecked` later. #[inline] -fn try_collect_into_array( +fn try_collect_into_array_erased( iter: &mut I, -) -> Result> + buffer: &mut [MaybeUninit], +) -> ControlFlow where I: Iterator, I::Item: Try, - R: Residual<[T; N]>, { - if N == 0 { - // SAFETY: An empty array is always inhabited and has no validity invariants. - return Ok(Try::from_output(unsafe { mem::zeroed() })); - } - - let mut array = MaybeUninit::uninit_array::(); - let mut guard = Guard { array_mut: &mut array, initialized: 0 }; + let n = buffer.len(); + let mut guard = Guard { array_mut: buffer, initialized: 0 }; - for _ in 0..N { + for _ in 0..n { match iter.next() { Some(item_rslt) => { - let item = match item_rslt.branch() { - ControlFlow::Break(r) => { - return Ok(FromResidual::from_residual(r)); - } - ControlFlow::Continue(elem) => elem, - }; + let item = item_rslt.branch()?; // SAFETY: `guard.initialized` starts at 0, which means push can be called - // at most N times, which this loop does. + // at most `n` times, which this loop does. unsafe { guard.push_unchecked(item); } } - None => { - let alive = 0..guard.initialized; - mem::forget(guard); - // SAFETY: `array` was initialized with exactly `initialized` - // number of elements. - return Err(unsafe { IntoIter::new_unchecked(array, alive) }); - } + None => break, } } + let initialized = guard.initialized; mem::forget(guard); - // SAFETY: All elements of the array were populated in the loop above. - let output = unsafe { array.transpose().assume_init() }; - Ok(Try::from_output(output)) + ControlFlow::Continue(initialized) } /// Panic guard for incremental initialization of arrays. @@ -934,14 +928,14 @@ where /// /// To minimize indirection fields are still pub but callers should at least use /// `push_unchecked` to signal that something unsafe is going on. -pub(crate) struct Guard<'a, T, const N: usize> { +pub(crate) struct Guard<'a, T> { /// The array to be initialized. - pub array_mut: &'a mut [MaybeUninit; N], + pub array_mut: &'a mut [MaybeUninit], /// The number of items that have been initialized so far. pub initialized: usize, } -impl Guard<'_, T, N> { +impl Guard<'_, T> { /// Adds an item to the array and updates the initialized item counter. /// /// # Safety @@ -959,9 +953,9 @@ impl Guard<'_, T, N> { } } -impl Drop for Guard<'_, T, N> { +impl Drop for Guard<'_, T> { fn drop(&mut self) { - debug_assert!(self.initialized <= N); + debug_assert!(self.initialized <= self.array_mut.len()); // SAFETY: this slice will contain only initialized objects. unsafe { @@ -972,15 +966,33 @@ impl Drop for Guard<'_, T, N> { } } -/// Returns the next chunk of `N` items from the iterator or errors with an -/// iterator over the remainder. Used for `Iterator::next_chunk`. +/// Pulls `N` items from `iter` and returns them as an array. If the iterator +/// yields fewer than `N` items, `Err` is returned containing an iterator over +/// the already yielded items. +/// +/// Since the iterator is passed as a mutable reference and this function calls +/// `next` at most `N` times, the iterator can still be used afterwards to +/// retrieve the remaining items. +/// +/// If `iter.next()` panicks, all items already yielded by the iterator are +/// dropped. +/// +/// Used for [`Iterator::next_chunk`]. #[inline] -pub(crate) fn iter_next_chunk( - iter: &mut I, -) -> Result<[I::Item; N], IntoIter> -where - I: Iterator, -{ +pub(crate) fn iter_next_chunk( + iter: &mut impl Iterator, +) -> Result<[T; N], IntoIter> { let mut map = iter.map(NeverShortCircuit); - try_collect_into_array(&mut map).map(|NeverShortCircuit(arr)| arr) + let mut array = MaybeUninit::uninit_array::(); + let ControlFlow::Continue(initialized) = try_collect_into_array_erased(&mut map, &mut array); + if initialized == N { + // SAFETY: All elements of the array were populated. + let output = unsafe { MaybeUninit::array_assume_init(array) }; + Ok(output) + } else { + let alive = 0..initialized; + // SAFETY: `array` was initialized with exactly `initialized` + // number of elements. + return Err(unsafe { IntoIter::new_unchecked(array, alive) }); + } } diff --git a/tests/codegen/array-map.rs b/tests/codegen/array-map.rs index 37585371a3222..1154659eea541 100644 --- a/tests/codegen/array-map.rs +++ b/tests/codegen/array-map.rs @@ -1,4 +1,4 @@ -// compile-flags: -C opt-level=3 -C target-cpu=x86-64-v3 -C llvm-args=-x86-asm-syntax=intel --emit=llvm-ir,asm +// compile-flags: -C opt-level=3 -C target-cpu=x86-64-v3 // no-system-llvm // only-x86_64 // ignore-debug (the extra assertions get in the way) @@ -40,8 +40,7 @@ pub fn short_integer_zip_map(x: [u32; 8], y: [u32; 8]) -> [u32; 8] { #[no_mangle] pub fn long_integer_map(x: [u32; 64]) -> [u32; 64] { // CHECK: start: - // CHECK-NEXT: alloca [{{64|65}} x i32] - // CHECK-NEXT: alloca [{{64|65}} x i32] + // CHECK-NEXT: alloca [64 x i32] // CHECK-NEXT: alloca %"core::mem::manually_drop::ManuallyDrop<[u32; 64]>" // CHECK-NOT: alloca x.map(|x| 2 * x + 1) From 5bc328fdeff50b742a8136d0633924514d4d76b8 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 3 Feb 2023 03:27:51 -0800 Subject: [PATCH 272/501] Allow canonicalizing the `array::map` loop in trusted cases --- library/core/src/array/drain.rs | 33 ++- library/core/src/array/mod.rs | 224 ++++++++---------- .../core/src/iter/adapters/array_chunks.rs | 14 +- library/core/src/iter/adapters/cloned.rs | 15 +- library/core/src/iter/adapters/map.rs | 15 +- library/core/src/iter/adapters/zip.rs | 9 +- library/core/src/iter/mod.rs | 1 + library/core/src/iter/traits/mod.rs | 3 + .../src/iter/traits/unchecked_iterator.rs | 36 +++ library/core/src/ops/try_trait.rs | 9 + library/core/src/slice/iter.rs | 4 +- library/core/src/slice/iter/macros.rs | 9 + library/core/tests/iter/traits/iterator.rs | 3 + tests/codegen/array-map.rs | 4 +- 14 files changed, 237 insertions(+), 142 deletions(-) create mode 100644 library/core/src/iter/traits/unchecked_iterator.rs diff --git a/library/core/src/array/drain.rs b/library/core/src/array/drain.rs index 5ca93d54f876f..5fadf907b6219 100644 --- a/library/core/src/array/drain.rs +++ b/library/core/src/array/drain.rs @@ -1,11 +1,21 @@ -use crate::iter::TrustedLen; +use crate::iter::{TrustedLen, UncheckedIterator}; use crate::mem::ManuallyDrop; use crate::ptr::drop_in_place; use crate::slice; -// INVARIANT: It's ok to drop the remainder of the inner iterator. -pub(crate) struct Drain<'a, T>(slice::IterMut<'a, T>); - +/// A situationally-optimized version of `array.into_iter().for_each(func)`. +/// +/// [`crate::array::IntoIter`]s are great when you need an owned iterator, but +/// storing the entire array *inside* the iterator like that can sometimes +/// pessimize code. Notable, it can be more bytes than you really want to move +/// around, and because the array accesses index into it SRoA has a harder time +/// optimizing away the type than it does iterators that just hold a couple pointers. +/// +/// Thus this function exists, which gives a way to get *moved* access to the +/// elements of an array using a small iterator -- no bigger than a slice iterator. +/// +/// The function-taking-a-closure structure makes it safe, as it keeps callers +/// from looking at already-dropped elements. pub(crate) fn drain_array_with( array: [T; N], func: impl for<'a> FnOnce(Drain<'a, T>) -> R, @@ -16,6 +26,11 @@ pub(crate) fn drain_array_with( func(drain) } +/// See [`drain_array_with`] -- this is `pub(crate)` only so it's allowed to be +/// mentioned in the signature of that method. (Otherwise it hits `E0446`.) +// INVARIANT: It's ok to drop the remainder of the inner iterator. +pub(crate) struct Drain<'a, T>(slice::IterMut<'a, T>); + impl Drop for Drain<'_, T> { fn drop(&mut self) { // SAFETY: By the type invariant, we're allowed to drop all these. @@ -49,3 +64,13 @@ impl ExactSizeIterator for Drain<'_, T> { // SAFETY: This is a 1:1 wrapper for a slice iterator, which is also `TrustedLen`. unsafe impl TrustedLen for Drain<'_, T> {} + +impl UncheckedIterator for Drain<'_, T> { + unsafe fn next_unchecked(&mut self) -> T { + // SAFETY: `Drain` is 1:1 with the inner iterator, so if the caller promised + // that there's an element left, the inner iterator has one too. + let p: *const T = unsafe { self.0.next_unchecked() }; + // SAFETY: The iterator was already advanced, so we won't drop this later. + unsafe { p.read() } + } +} diff --git a/library/core/src/array/mod.rs b/library/core/src/array/mod.rs index 45ec68e6e7aa3..ae9f6e70f43c6 100644 --- a/library/core/src/array/mod.rs +++ b/library/core/src/array/mod.rs @@ -10,7 +10,7 @@ use crate::convert::{Infallible, TryFrom}; use crate::error::Error; use crate::fmt; use crate::hash::{self, Hash}; -use crate::iter::TrustedLen; +use crate::iter::UncheckedIterator; use crate::mem::{self, MaybeUninit}; use crate::ops::{ ChangeOutputType, ControlFlow, FromResidual, Index, IndexMut, NeverShortCircuit, Residual, Try, @@ -55,16 +55,11 @@ pub use iter::IntoIter; /// ``` #[inline] #[stable(feature = "array_from_fn", since = "1.63.0")] -pub fn from_fn(mut cb: F) -> [T; N] +pub fn from_fn(cb: F) -> [T; N] where F: FnMut(usize) -> T, { - let mut idx = 0; - [(); N].map(|_| { - let res = cb(idx); - idx += 1; - res - }) + try_from_fn(NeverShortCircuit::wrap_mut_1(cb)).0 } /// Creates an array `[T; N]` where each fallible array element `T` is returned by the `cb` call. @@ -104,9 +99,14 @@ where R: Try, R::Residual: Residual<[R::Output; N]>, { - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { try_collect_into_array_unchecked(&mut (0..N).map(cb)) } + let mut array = MaybeUninit::uninit_array::(); + match try_from_fn_erased(&mut array, cb) { + ControlFlow::Break(r) => FromResidual::from_residual(r), + ControlFlow::Continue(()) => { + // SAFETY: All elements of the array were populated. + try { unsafe { MaybeUninit::array_assume_init(array) } } + } + } } /// Converts a reference to `T` into a reference to an array of length 1 (without copying). @@ -430,9 +430,7 @@ trait SpecArrayClone: Clone { impl SpecArrayClone for T { #[inline] default fn clone(array: &[T; N]) -> [T; N] { - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut array.iter().cloned()) } + from_trusted_iterator(array.iter().cloned()) } } @@ -516,12 +514,7 @@ impl [T; N] { where F: FnMut(T) -> U, { - drain_array_with(self, |iter| { - let mut iter = iter.map(f); - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut iter) } - }) + self.try_map(NeverShortCircuit::wrap_mut_1(f)).0 } /// A fallible function `f` applied to each element on array `self` in order to @@ -558,12 +551,7 @@ impl [T; N] { R: Try, R::Residual: Residual<[R::Output; N]>, { - drain_array_with(self, |iter| { - let mut iter = iter.map(f); - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { try_collect_into_array_unchecked(&mut iter) } - }) + drain_array_with(self, |iter| try_from_trusted_iterator(iter.map(f))) } /// 'Zips up' two arrays into a single array of pairs. @@ -585,12 +573,7 @@ impl [T; N] { #[unstable(feature = "array_zip", issue = "80094")] pub fn zip(self, rhs: [U; N]) -> [(T, U); N] { drain_array_with(self, |lhs| { - drain_array_with(rhs, |rhs| { - let mut iter = crate::iter::zip(lhs, rhs); - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut iter) } - }) + drain_array_with(rhs, |rhs| from_trusted_iterator(crate::iter::zip(lhs, rhs))) }) } @@ -638,9 +621,7 @@ impl [T; N] { /// ``` #[unstable(feature = "array_methods", issue = "76118")] pub fn each_ref(&self) -> [&T; N] { - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut self.iter()) } + from_trusted_iterator(self.iter()) } /// Borrows each element mutably and returns an array of mutable references @@ -660,9 +641,7 @@ impl [T; N] { /// ``` #[unstable(feature = "array_methods", issue = "76118")] pub fn each_mut(&mut self) -> [&mut T; N] { - // SAFETY: we know for certain that this iterator will yield exactly `N` - // items. - unsafe { collect_into_array_unchecked(&mut self.iter_mut()) } + from_trusted_iterator(self.iter_mut()) } /// Divides one array reference into two at an index. @@ -822,99 +801,71 @@ impl [T; N] { } } -/// Pulls `N` items from `iter` and returns them as an array. If the iterator -/// yields fewer than `N` items, this function exhibits undefined behavior. +/// Populate an array from the first `N` elements of `iter` /// -/// # Safety +/// # Panics /// -/// It is up to the caller to guarantee that `iter` yields at least `N` items. -/// Violating this condition causes undefined behavior. -unsafe fn try_collect_into_array_unchecked( - iter: &mut I, -) -> ChangeOutputType -where - // Note: `TrustedLen` here is somewhat of an experiment. This is just an - // internal function, so feel free to remove if this bound turns out to be a - // bad idea. In that case, remember to also remove the lower bound - // `debug_assert!` below! - I: Iterator + TrustedLen, - I::Item: Try, - R: Residual<[T; N]>, -{ - debug_assert!(N <= iter.size_hint().1.unwrap_or(usize::MAX)); - debug_assert!(N <= iter.size_hint().0); - - let mut array = MaybeUninit::uninit_array::(); - let cf = try_collect_into_array_erased(iter, &mut array); - match cf { - ControlFlow::Break(r) => FromResidual::from_residual(r), - ControlFlow::Continue(initialized) => { - debug_assert_eq!(initialized, N); - // SAFETY: because of our function contract, all the elements - // must have been initialized. - let output = unsafe { MaybeUninit::array_assume_init(array) }; - Try::from_output(output) - } - } +/// If the iterator doesn't actually have enough items. +/// +/// By depending on `TrustedLen`, however, we can do that check up-front (where +/// it easily optimizes away) so it doesn't impact the loop that fills the array. +#[inline] +fn from_trusted_iterator(iter: impl UncheckedIterator) -> [T; N] { + try_from_trusted_iterator(iter.map(NeverShortCircuit)).0 } -/// Infallible version of [`try_collect_into_array_unchecked`]. -unsafe fn collect_into_array_unchecked(iter: &mut I) -> [I::Item; N] +#[inline] +fn try_from_trusted_iterator( + iter: impl UncheckedIterator, +) -> ChangeOutputType where - I: Iterator + TrustedLen, + R: Try, + R::Residual: Residual<[T; N]>, { - let mut map = iter.map(NeverShortCircuit); - - // SAFETY: The same safety considerations w.r.t. the iterator length - // apply for `try_collect_into_array_unchecked` as for - // `collect_into_array_unchecked` - match unsafe { try_collect_into_array_unchecked(&mut map) } { - NeverShortCircuit(array) => array, + assert!(iter.size_hint().0 >= N); + fn next(mut iter: impl UncheckedIterator) -> impl FnMut(usize) -> T { + move |_| { + // SAFETY: We know that `from_fn` will call this at most N times, + // and we checked to ensure that we have at least that many items. + unsafe { iter.next_unchecked() } + } } + + try_from_fn(next(iter)) } -/// Rather than *returning* the array, this fills in a passed-in buffer. -/// If any of the iterator elements short-circuit, it drops everything in the -/// buffer and return the error. Otherwise it returns the number of items -/// which were initialized in the buffer. +/// Version of [`try_from_fn`] using a passed-in slice in order to avoid +/// needing to monomorphize for every array length. /// -/// (The caller is responsible for dropping those items on success, but not -/// doing that is just a leak, not UB, so this function is itself safe.) +/// This takes a generator rather than an iterator so that *at the type level* +/// it never needs to worry about running out of items. When combined with +/// an infallible `Try` type, that means the loop canonicalizes easily, allowing +/// it to optimize well. /// -/// This means less monomorphization, but more importantly it means that the -/// returned array doesn't need to be copied into the `Result`, since returning -/// the result seemed (2023-01) to cause in an extra `N + 1`-length `alloca` -/// even if it's always `unwrap_unchecked` later. +/// It would be *possible* to unify this and [`iter_next_chunk_erased`] into one +/// function that does the union of both things, but last time it was that way +/// it resulted in poor codegen from the "are there enough source items?" checks +/// not optimizing away. So if you give it a shot, make sure to watch what +/// happens in the codegen tests. #[inline] -fn try_collect_into_array_erased( - iter: &mut I, +fn try_from_fn_erased( buffer: &mut [MaybeUninit], -) -> ControlFlow + mut generator: impl FnMut(usize) -> R, +) -> ControlFlow where - I: Iterator, - I::Item: Try, + R: Try, { - let n = buffer.len(); let mut guard = Guard { array_mut: buffer, initialized: 0 }; - for _ in 0..n { - match iter.next() { - Some(item_rslt) => { - let item = item_rslt.branch()?; + while guard.initialized < guard.array_mut.len() { + let item = generator(guard.initialized).branch()?; - // SAFETY: `guard.initialized` starts at 0, which means push can be called - // at most `n` times, which this loop does. - unsafe { - guard.push_unchecked(item); - } - } - None => break, - } + // SAFETY: The loop condition ensures we have space to push the item + unsafe { guard.push_unchecked(item) }; } - let initialized = guard.initialized; mem::forget(guard); - ControlFlow::Continue(initialized) + ControlFlow::Continue(()) } /// Panic guard for incremental initialization of arrays. @@ -928,7 +879,7 @@ where /// /// To minimize indirection fields are still pub but callers should at least use /// `push_unchecked` to signal that something unsafe is going on. -pub(crate) struct Guard<'a, T> { +struct Guard<'a, T> { /// The array to be initialized. pub array_mut: &'a mut [MaybeUninit], /// The number of items that have been initialized so far. @@ -960,7 +911,7 @@ impl Drop for Guard<'_, T> { // SAFETY: this slice will contain only initialized objects. unsafe { crate::ptr::drop_in_place(MaybeUninit::slice_assume_init_mut( - &mut self.array_mut.get_unchecked_mut(..self.initialized), + self.array_mut.get_unchecked_mut(..self.initialized), )); } } @@ -982,17 +933,44 @@ impl Drop for Guard<'_, T> { pub(crate) fn iter_next_chunk( iter: &mut impl Iterator, ) -> Result<[T; N], IntoIter> { - let mut map = iter.map(NeverShortCircuit); let mut array = MaybeUninit::uninit_array::(); - let ControlFlow::Continue(initialized) = try_collect_into_array_erased(&mut map, &mut array); - if initialized == N { - // SAFETY: All elements of the array were populated. - let output = unsafe { MaybeUninit::array_assume_init(array) }; - Ok(output) - } else { - let alive = 0..initialized; - // SAFETY: `array` was initialized with exactly `initialized` - // number of elements. - return Err(unsafe { IntoIter::new_unchecked(array, alive) }); + let r = iter_next_chunk_erased(&mut array, iter); + match r { + Ok(()) => { + // SAFETY: All elements of `array` were populated. + Ok(unsafe { MaybeUninit::array_assume_init(array) }) + } + Err(initialized) => { + // SAFETY: Only the first `initialized` elements were populated + Err(unsafe { IntoIter::new_unchecked(array, 0..initialized) }) + } + } +} + +/// Version of [`iter_next_chunk`] using a passed-in slice in order to avoid +/// needing to monomorphize for every array length. +/// +/// Unfortunately this loop has two exit conditions, the buffer filling up +/// or the iterator running out of items, making it tend to optimize poorly. +#[inline] +fn iter_next_chunk_erased( + buffer: &mut [MaybeUninit], + iter: &mut impl Iterator, +) -> Result<(), usize> { + let mut guard = Guard { array_mut: buffer, initialized: 0 }; + while guard.initialized < guard.array_mut.len() { + let Some(item) = iter.next() else { + // Unlike `try_from_fn_erased`, we want to keep the partial results, + // so we need to defuse the guard instead of using `?`. + let initialized = guard.initialized; + mem::forget(guard); + return Err(initialized) + }; + + // SAFETY: The loop condition ensures we have space to push the item + unsafe { guard.push_unchecked(item) }; } + + mem::forget(guard); + Ok(()) } diff --git a/library/core/src/iter/adapters/array_chunks.rs b/library/core/src/iter/adapters/array_chunks.rs index af786609757b1..13719c727e93f 100644 --- a/library/core/src/iter/adapters/array_chunks.rs +++ b/library/core/src/iter/adapters/array_chunks.rs @@ -1,6 +1,5 @@ use crate::array; use crate::iter::{ByRefSized, FusedIterator, Iterator, TrustedRandomAccessNoCoerce}; -use crate::mem::{self, MaybeUninit}; use crate::ops::{ControlFlow, NeverShortCircuit, Try}; /// An iterator over `N` elements of the iterator at a time. @@ -212,19 +211,14 @@ where let mut i = 0; // Use a while loop because (0..len).step_by(N) doesn't optimize well. while inner_len - i >= N { - let mut chunk = MaybeUninit::uninit_array(); - let mut guard = array::Guard { array_mut: &mut chunk, initialized: 0 }; - while guard.initialized < N { + let chunk = crate::array::from_fn(|local| { // SAFETY: The method consumes the iterator and the loop condition ensures that // all accesses are in bounds and only happen once. unsafe { - let idx = i + guard.initialized; - guard.push_unchecked(self.iter.__iterator_get_unchecked(idx)); + let idx = i + local; + self.iter.__iterator_get_unchecked(idx) } - } - mem::forget(guard); - // SAFETY: The loop above initialized all elements - let chunk = unsafe { MaybeUninit::array_assume_init(chunk) }; + }); accum = f(accum, chunk); i += N; } diff --git a/library/core/src/iter/adapters/cloned.rs b/library/core/src/iter/adapters/cloned.rs index aba24a79dcf79..914ff86c1a959 100644 --- a/library/core/src/iter/adapters/cloned.rs +++ b/library/core/src/iter/adapters/cloned.rs @@ -1,7 +1,7 @@ use crate::iter::adapters::{ zip::try_get_unchecked, TrustedRandomAccess, TrustedRandomAccessNoCoerce, }; -use crate::iter::{FusedIterator, TrustedLen}; +use crate::iter::{FusedIterator, TrustedLen, UncheckedIterator}; use crate::ops::Try; /// An iterator that clones the elements of an underlying iterator. @@ -140,3 +140,16 @@ where T: Clone, { } + +impl<'a, I, T: 'a> UncheckedIterator for Cloned +where + I: UncheckedIterator, + T: Clone, +{ + unsafe fn next_unchecked(&mut self) -> T { + // SAFETY: `Cloned` is 1:1 with the inner iterator, so if the caller promised + // that there's an element left, the inner iterator has one too. + let item = unsafe { self.it.next_unchecked() }; + item.clone() + } +} diff --git a/library/core/src/iter/adapters/map.rs b/library/core/src/iter/adapters/map.rs index 9e25dbe462c91..31d02a4da6ea5 100644 --- a/library/core/src/iter/adapters/map.rs +++ b/library/core/src/iter/adapters/map.rs @@ -2,7 +2,7 @@ use crate::fmt; use crate::iter::adapters::{ zip::try_get_unchecked, SourceIter, TrustedRandomAccess, TrustedRandomAccessNoCoerce, }; -use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen}; +use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen, UncheckedIterator}; use crate::ops::Try; /// An iterator that maps the values of `iter` with `f`. @@ -187,6 +187,19 @@ where { } +impl UncheckedIterator for Map +where + I: UncheckedIterator, + F: FnMut(I::Item) -> B, +{ + unsafe fn next_unchecked(&mut self) -> B { + // SAFETY: `Map` is 1:1 with the inner iterator, so if the caller promised + // that there's an element left, the inner iterator has one too. + let item = unsafe { self.iter.next_unchecked() }; + (self.f)(item) + } +} + #[doc(hidden)] #[unstable(feature = "trusted_random_access", issue = "none")] unsafe impl TrustedRandomAccess for Map where I: TrustedRandomAccess {} diff --git a/library/core/src/iter/adapters/zip.rs b/library/core/src/iter/adapters/zip.rs index 8153c8cfef133..b6b0c90cb7d14 100644 --- a/library/core/src/iter/adapters/zip.rs +++ b/library/core/src/iter/adapters/zip.rs @@ -1,7 +1,7 @@ use crate::cmp; use crate::fmt::{self, Debug}; use crate::iter::{DoubleEndedIterator, ExactSizeIterator, FusedIterator, Iterator}; -use crate::iter::{InPlaceIterable, SourceIter, TrustedLen}; +use crate::iter::{InPlaceIterable, SourceIter, TrustedLen, UncheckedIterator}; /// An iterator that iterates two other iterators simultaneously. /// @@ -417,6 +417,13 @@ where { } +impl UncheckedIterator for Zip +where + A: UncheckedIterator, + B: UncheckedIterator, +{ +} + // Arbitrarily selects the left side of the zip iteration as extractable "source" // it would require negative trait bounds to be able to try both #[unstable(issue = "none", feature = "inplace_iteration")] diff --git a/library/core/src/iter/mod.rs b/library/core/src/iter/mod.rs index 00f57fbcc6162..156b925de773f 100644 --- a/library/core/src/iter/mod.rs +++ b/library/core/src/iter/mod.rs @@ -450,6 +450,7 @@ pub use self::adapters::{ pub use self::adapters::{Intersperse, IntersperseWith}; pub(crate) use self::adapters::try_process; +pub(crate) use self::traits::UncheckedIterator; mod adapters; mod range; diff --git a/library/core/src/iter/traits/mod.rs b/library/core/src/iter/traits/mod.rs index ed0fb634dbf05..41ea29e6a84d9 100644 --- a/library/core/src/iter/traits/mod.rs +++ b/library/core/src/iter/traits/mod.rs @@ -4,6 +4,7 @@ mod double_ended; mod exact_size; mod iterator; mod marker; +mod unchecked_iterator; #[stable(feature = "rust1", since = "1.0.0")] pub use self::{ @@ -19,3 +20,5 @@ pub use self::{ pub use self::marker::InPlaceIterable; #[unstable(feature = "trusted_step", issue = "85731")] pub use self::marker::TrustedStep; + +pub(crate) use self::unchecked_iterator::UncheckedIterator; diff --git a/library/core/src/iter/traits/unchecked_iterator.rs b/library/core/src/iter/traits/unchecked_iterator.rs new file mode 100644 index 0000000000000..ae4bfcad4e68f --- /dev/null +++ b/library/core/src/iter/traits/unchecked_iterator.rs @@ -0,0 +1,36 @@ +use crate::iter::TrustedLen; + +/// [`TrustedLen`] cannot have methods, so this allows augmenting it. +/// +/// It currently requires `TrustedLen` because it's unclear whether it's +/// reasonably possible to depend on the `size_hint` of anything else. +pub(crate) trait UncheckedIterator: TrustedLen { + /// Gets the next item from a non-empty iterator. + /// + /// Because there's always a value to return, that means it can return + /// the `Item` type directly, without wrapping it in an `Option`. + /// + /// # Safety + /// + /// This can only be called if `size_hint().0 != 0`, guaranteeing that + /// there's at least one item available. + /// + /// Otherwise (aka when `size_hint().1 == Some(0)`), this is UB. + /// + /// # Note to Implementers + /// + /// This has a default implementation using [`Option::unwrap_unchecked`]. + /// That's probably sufficient if your `next` *always* returns `Some`, + /// such as for infinite iterators. In more complicated situations, however, + /// sometimes there can still be `insertvalue`/`assume`/`extractvalue` + /// instructions remaining in the IR from the `Option` handling, at which + /// point you might want to implement this manually instead. + #[unstable(feature = "trusted_len_next_unchecked", issue = "37572")] + #[inline] + unsafe fn next_unchecked(&mut self) -> Self::Item { + let opt = self.next(); + // SAFETY: The caller promised that we're not empty, and + // `Self: TrustedLen` so we can actually trust the `size_hint`. + unsafe { opt.unwrap_unchecked() } + } +} diff --git a/library/core/src/ops/try_trait.rs b/library/core/src/ops/try_trait.rs index 9108fc6304525..86aa1e4fd20ba 100644 --- a/library/core/src/ops/try_trait.rs +++ b/library/core/src/ops/try_trait.rs @@ -379,6 +379,15 @@ pub(crate) type ChangeOutputType = <::Residual as Residual>:: pub(crate) struct NeverShortCircuit(pub T); impl NeverShortCircuit { + /// Wraps a unary function to produce one that wraps the output into a `NeverShortCircuit`. + /// + /// This is useful for implementing infallible functions in terms of the `try_` ones, + /// without accidentally capturing extra generic parameters in a closure. + #[inline] + pub fn wrap_mut_1(mut f: impl FnMut(A) -> T) -> impl FnMut(A) -> NeverShortCircuit { + move |a| NeverShortCircuit(f(a)) + } + #[inline] pub fn wrap_mut_2( mut f: impl ~const FnMut(A, B) -> T, diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index 90ab43d1289f0..c4317799bcc68 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -7,7 +7,9 @@ use crate::cmp; use crate::cmp::Ordering; use crate::fmt; use crate::intrinsics::assume; -use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce}; +use crate::iter::{ + FusedIterator, TrustedLen, TrustedRandomAccess, TrustedRandomAccessNoCoerce, UncheckedIterator, +}; use crate::marker::{PhantomData, Send, Sized, Sync}; use crate::mem::{self, SizedTypeProperties}; use crate::num::NonZeroUsize; diff --git a/library/core/src/slice/iter/macros.rs b/library/core/src/slice/iter/macros.rs index 0fd57b197aa97..89b92a7d5975f 100644 --- a/library/core/src/slice/iter/macros.rs +++ b/library/core/src/slice/iter/macros.rs @@ -384,6 +384,15 @@ macro_rules! iterator { #[unstable(feature = "trusted_len", issue = "37572")] unsafe impl TrustedLen for $name<'_, T> {} + + impl<'a, T> UncheckedIterator for $name<'a, T> { + unsafe fn next_unchecked(&mut self) -> $elem { + // SAFETY: The caller promised there's at least one more item. + unsafe { + next_unchecked!(self) + } + } + } } } diff --git a/library/core/tests/iter/traits/iterator.rs b/library/core/tests/iter/traits/iterator.rs index 37345c1d38142..62566a9502d04 100644 --- a/library/core/tests/iter/traits/iterator.rs +++ b/library/core/tests/iter/traits/iterator.rs @@ -582,6 +582,9 @@ fn test_next_chunk() { assert_eq!(it.next_chunk().unwrap(), []); assert_eq!(it.next_chunk().unwrap(), [4, 5, 6, 7, 8, 9]); assert_eq!(it.next_chunk::<4>().unwrap_err().as_slice(), &[10, 11]); + + let mut it = std::iter::repeat_with(|| panic!()); + assert_eq!(it.next_chunk::<0>().unwrap(), []); } // just tests by whether or not this compiles diff --git a/tests/codegen/array-map.rs b/tests/codegen/array-map.rs index 1154659eea541..9298e89e397d0 100644 --- a/tests/codegen/array-map.rs +++ b/tests/codegen/array-map.rs @@ -43,5 +43,7 @@ pub fn long_integer_map(x: [u32; 64]) -> [u32; 64] { // CHECK-NEXT: alloca [64 x i32] // CHECK-NEXT: alloca %"core::mem::manually_drop::ManuallyDrop<[u32; 64]>" // CHECK-NOT: alloca - x.map(|x| 2 * x + 1) + // CHECK: mul <{{[0-9]+}} x i32> + // CHECK: add <{{[0-9]+}} x i32> + x.map(|x| 13 * x + 7) } From bb77860d9ccdc6a920edeedce313446545294c04 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sat, 4 Feb 2023 16:33:37 -0800 Subject: [PATCH 273/501] Add another autovectorization codegen test using array zip-map --- tests/codegen/autovectorize-f32x4.rs | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/codegen/autovectorize-f32x4.rs b/tests/codegen/autovectorize-f32x4.rs index 6b09c8fc99860..9ecea53f1c05c 100644 --- a/tests/codegen/autovectorize-f32x4.rs +++ b/tests/codegen/autovectorize-f32x4.rs @@ -1,6 +1,7 @@ -// compile-flags: -C opt-level=3 +// compile-flags: -C opt-level=3 -Z merge-functions=disabled // only-x86_64 #![crate_type = "lib"] +#![feature(array_zip)] // CHECK-LABEL: @auto_vectorize_direct #[no_mangle] @@ -30,3 +31,13 @@ pub fn auto_vectorize_loop(a: [f32; 4], b: [f32; 4]) -> [f32; 4] { } c } + +// CHECK-LABEL: @auto_vectorize_array_zip_map +#[no_mangle] +pub fn auto_vectorize_array_zip_map(a: [f32; 4], b: [f32; 4]) -> [f32; 4] { +// CHECK: load <4 x float> +// CHECK: load <4 x float> +// CHECK: fadd <4 x float> +// CHECK: store <4 x float> + a.zip(b).map(|(a, b)| a + b) +} From 3674502a98982ca3fdade848274e6ab2b7d55c4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Sat, 4 Feb 2023 17:46:11 -0800 Subject: [PATCH 274/501] Unify language config markers with server --- editors/code/language-configuration.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/editors/code/language-configuration.json b/editors/code/language-configuration.json index b1ee0843e3e69..51f0e65f4fd39 100644 --- a/editors/code/language-configuration.json +++ b/editors/code/language-configuration.json @@ -35,8 +35,8 @@ }, "folding": { "markers": { - "start": "^\\s*//\\s*#?region\\b", - "end": "^\\s*//\\s*#?endregion\\b" + "start": "^\\s*// region:\\b", + "end": "^\\s*// endregion\\b" } } } From 0ec2911857eeba97358445582ee3484a6d81d19c Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 25 Jan 2023 23:47:29 +0900 Subject: [PATCH 275/501] fix: consider relative offset to fake ident token in expansion for completion --- crates/ide-completion/src/context/analysis.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index e34824e22eac1..f606d79ad2040 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -48,7 +48,9 @@ pub(super) fn expand_and_analyze( // make the offset point to the start of the original token, as that is what the // intermediate offsets calculated in expansion always points to let offset = offset - relative_offset; - let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token); + let expansion = + expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset); + // add the relative offset back, so that left_biased finds the proper token let offset = expansion.offset + relative_offset; let token = expansion.original_file.token_at_offset(offset).left_biased()?; @@ -67,6 +69,7 @@ fn expand( mut speculative_file: SyntaxNode, mut offset: TextSize, mut fake_ident_token: SyntaxToken, + relative_offset: TextSize, ) -> ExpansionResult { let _p = profile::span("CompletionContext::expand"); let mut derive_ctx = None; @@ -97,7 +100,7 @@ fn expand( // successful expansions (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { let new_offset = fake_mapped_token.text_range().start(); - if new_offset > actual_expansion.text_range().end() { + if new_offset + relative_offset > actual_expansion.text_range().end() { // offset outside of bounds from the original expansion, // stop here to prevent problems from happening break 'expansion; @@ -176,7 +179,7 @@ fn expand( // successful expansions (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { let new_offset = fake_mapped_token.text_range().start(); - if new_offset > actual_expansion.text_range().end() { + if new_offset + relative_offset > actual_expansion.text_range().end() { // offset outside of bounds from the original expansion, // stop here to prevent problems from happening break 'expansion; From a4d0b5c522405fd2351c56f48c68544b3130a513 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Sun, 5 Feb 2023 20:02:16 +0900 Subject: [PATCH 276/501] Add regression tests --- crates/base-db/src/fixture.rs | 62 ++++++++++++++++++- .../ide-completion/src/tests/proc_macros.rs | 30 +++++++++ 2 files changed, 90 insertions(+), 2 deletions(-) diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 60d1e488d8a43..8a7e9dfadfed2 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashMap; use test_utils::{ extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER, }; -use tt::token_id::Subtree; +use tt::token_id::{Leaf, Subtree, TokenTree}; use vfs::{file_set::FileSet, VfsPath}; use crate::{ @@ -310,7 +310,7 @@ impl ChangeFixture { } } -fn default_test_proc_macros() -> [(String, ProcMacro); 4] { +fn default_test_proc_macros() -> [(String, ProcMacro); 5] { [ ( r#" @@ -368,6 +368,20 @@ pub fn mirror(input: TokenStream) -> TokenStream { expander: Arc::new(MirrorProcMacroExpander), }, ), + ( + r#" +#[proc_macro] +pub fn shorten(input: TokenStream) -> TokenStream { + loop {} +} +"# + .into(), + ProcMacro { + name: "shorten".into(), + kind: crate::ProcMacroKind::FuncLike, + expander: Arc::new(ShortenProcMacroExpander), + }, + ), ] } @@ -508,3 +522,47 @@ impl ProcMacroExpander for MirrorProcMacroExpander { Ok(traverse(input)) } } + +// Replaces every literal with an empty string literal and every identifier with its first letter, +// but retains all tokens' span. Useful for testing we don't assume token hasn't been modified by +// macros even if it retains its span. +#[derive(Debug)] +struct ShortenProcMacroExpander; +impl ProcMacroExpander for ShortenProcMacroExpander { + fn expand( + &self, + input: &Subtree, + _: Option<&Subtree>, + _: &Env, + ) -> Result { + return Ok(traverse(input)); + + fn traverse(input: &Subtree) -> Subtree { + let token_trees = input + .token_trees + .iter() + .map(|it| match it { + TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(modify_leaf(leaf)), + TokenTree::Subtree(subtree) => tt::TokenTree::Subtree(traverse(subtree)), + }) + .collect(); + Subtree { delimiter: input.delimiter, token_trees } + } + + fn modify_leaf(leaf: &Leaf) -> Leaf { + let mut leaf = leaf.clone(); + match &mut leaf { + Leaf::Literal(it) => { + // XXX Currently replaces any literals with an empty string, but supporting + // "shortening" other literals would be nice. + it.text = "\"\"".into(); + } + Leaf::Punct(_) => {} + Leaf::Ident(it) => { + it.text = it.text.chars().take(1).collect(); + } + } + leaf + } + } +} diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs index 9eae6f84954b1..fec149e56a9e2 100644 --- a/crates/ide-completion/src/tests/proc_macros.rs +++ b/crates/ide-completion/src/tests/proc_macros.rs @@ -131,3 +131,33 @@ fn main() {} "#]], ) } + +#[test] +fn issue_13836_str() { + check( + r#" +//- proc_macros: shorten +fn main() { + let s = proc_macros::shorten!("text.$0"); +} +"#, + expect![[r#""#]], + ) +} + +#[test] +fn issue_13836_ident() { + check( + r#" +//- proc_macros: shorten +struct S; +impl S { + fn foo(&self) {} +} +fn main() { + let s = proc_macros::shorten!(S.fo$0); +} +"#, + expect![[r#""#]], + ) +} From 044a3a65a00e568b66e4bcdf7ee7393105aff6dd Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Wed, 4 Jan 2023 17:19:52 +0000 Subject: [PATCH 277/501] Support const and sym operands in inline asm --- src/inline_asm.rs | 168 +++++++++++++++++++++++++++++++++------------- 1 file changed, 122 insertions(+), 46 deletions(-) diff --git a/src/inline_asm.rs b/src/inline_asm.rs index 3fcc84d39295f..aa0b51181f40a 100644 --- a/src/inline_asm.rs +++ b/src/inline_asm.rs @@ -6,12 +6,37 @@ use std::fmt::Write; use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_middle::mir::InlineAsmOperand; +use rustc_middle::ty::SymbolName; use rustc_span::sym; use rustc_target::asm::*; +enum CInlineAsmOperand<'tcx> { + In { + reg: InlineAsmRegOrRegClass, + value: CValue<'tcx>, + }, + Out { + reg: InlineAsmRegOrRegClass, + late: bool, + place: Option>, + }, + InOut { + reg: InlineAsmRegOrRegClass, + _late: bool, + in_value: CValue<'tcx>, + out_place: Option>, + }, + Const { + value: String, + }, + Symbol { + symbol: SymbolName<'tcx>, + }, +} + pub(crate) fn codegen_inline_asm<'tcx>( fx: &mut FunctionCx<'_, '_, 'tcx>, - _span: Span, + span: Span, template: &[InlineAsmTemplatePiece], operands: &[InlineAsmOperand<'tcx>], options: InlineAsmOptions, @@ -198,6 +223,59 @@ pub(crate) fn codegen_inline_asm<'tcx>( } } + let operands = operands + .into_iter() + .map(|operand| match *operand { + InlineAsmOperand::In { reg, ref value } => { + CInlineAsmOperand::In { reg, value: crate::base::codegen_operand(fx, value) } + } + InlineAsmOperand::Out { reg, late, ref place } => CInlineAsmOperand::Out { + reg, + late, + place: place.map(|place| crate::base::codegen_place(fx, place)), + }, + InlineAsmOperand::InOut { reg, late, ref in_value, ref out_place } => { + CInlineAsmOperand::InOut { + reg, + _late: late, + in_value: crate::base::codegen_operand(fx, in_value), + out_place: out_place.map(|place| crate::base::codegen_place(fx, place)), + } + } + InlineAsmOperand::Const { ref value } => { + let (const_value, ty) = crate::constant::eval_mir_constant(fx, &*value) + .unwrap_or_else(|| span_bug!(span, "asm const cannot be resolved")); + let value = rustc_codegen_ssa::common::asm_const_to_str( + fx.tcx, + span, + const_value, + fx.layout_of(ty), + ); + CInlineAsmOperand::Const { value } + } + InlineAsmOperand::SymFn { ref value } => { + let literal = fx.monomorphize(value.literal); + if let ty::FnDef(def_id, substs) = *literal.ty().kind() { + let instance = ty::Instance::resolve_for_fn_ptr( + fx.tcx, + ty::ParamEnv::reveal_all(), + def_id, + substs, + ) + .unwrap(); + CInlineAsmOperand::Symbol { symbol: fx.tcx.symbol_name(instance) } + } else { + span_bug!(span, "invalid type for asm sym (fn)"); + } + } + InlineAsmOperand::SymStatic { def_id } => { + assert!(fx.tcx.is_static(def_id)); + let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx); + CInlineAsmOperand::Symbol { symbol: fx.tcx.symbol_name(instance) } + } + }) + .collect::>(); + let mut inputs = Vec::new(); let mut outputs = Vec::new(); @@ -206,7 +284,7 @@ pub(crate) fn codegen_inline_asm<'tcx>( arch: fx.tcx.sess.asm_arch.unwrap(), enclosing_def_id: fx.instance.def_id(), template, - operands, + operands: &operands, options, registers: Vec::new(), stack_slots_clobber: Vec::new(), @@ -229,36 +307,22 @@ pub(crate) fn codegen_inline_asm<'tcx>( fx.cx.global_asm.push_str(&generated_asm); for (i, operand) in operands.iter().enumerate() { - match *operand { - InlineAsmOperand::In { reg: _, ref value } => { - inputs.push(( - asm_gen.stack_slots_input[i].unwrap(), - crate::base::codegen_operand(fx, value).load_scalar(fx), - )); - } - InlineAsmOperand::Out { reg: _, late: _, place } => { + match operand { + CInlineAsmOperand::In { reg: _, value } => { + inputs.push((asm_gen.stack_slots_input[i].unwrap(), value.load_scalar(fx))); + } + CInlineAsmOperand::Out { reg: _, late: _, place } => { if let Some(place) = place { - outputs.push(( - asm_gen.stack_slots_output[i].unwrap(), - crate::base::codegen_place(fx, place), - )); + outputs.push((asm_gen.stack_slots_output[i].unwrap(), place.clone())); } } - InlineAsmOperand::InOut { reg: _, late: _, ref in_value, out_place } => { - inputs.push(( - asm_gen.stack_slots_input[i].unwrap(), - crate::base::codegen_operand(fx, in_value).load_scalar(fx), - )); + CInlineAsmOperand::InOut { reg: _, _late: _, in_value, out_place } => { + inputs.push((asm_gen.stack_slots_input[i].unwrap(), in_value.load_scalar(fx))); if let Some(out_place) = out_place { - outputs.push(( - asm_gen.stack_slots_output[i].unwrap(), - crate::base::codegen_place(fx, out_place), - )); + outputs.push((asm_gen.stack_slots_output[i].unwrap(), out_place.clone())); } } - InlineAsmOperand::Const { value: _ } => todo!(), - InlineAsmOperand::SymFn { value: _ } => todo!(), - InlineAsmOperand::SymStatic { def_id: _ } => todo!(), + CInlineAsmOperand::Const { value: _ } | CInlineAsmOperand::Symbol { symbol: _ } => {} } } @@ -280,7 +344,7 @@ struct InlineAssemblyGenerator<'a, 'tcx> { arch: InlineAsmArch, enclosing_def_id: DefId, template: &'a [InlineAsmTemplatePiece], - operands: &'a [InlineAsmOperand<'tcx>], + operands: &'a [CInlineAsmOperand<'tcx>], options: InlineAsmOptions, registers: Vec>, stack_slots_clobber: Vec>, @@ -304,18 +368,20 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { // Add explicit registers to the allocated set. for (i, operand) in self.operands.iter().enumerate() { match *operand { - InlineAsmOperand::In { reg: InlineAsmRegOrRegClass::Reg(reg), .. } => { + CInlineAsmOperand::In { reg: InlineAsmRegOrRegClass::Reg(reg), .. } => { regs[i] = Some(reg); allocated.entry(reg).or_default().0 = true; } - InlineAsmOperand::Out { - reg: InlineAsmRegOrRegClass::Reg(reg), late: true, .. + CInlineAsmOperand::Out { + reg: InlineAsmRegOrRegClass::Reg(reg), + late: true, + .. } => { regs[i] = Some(reg); allocated.entry(reg).or_default().1 = true; } - InlineAsmOperand::Out { reg: InlineAsmRegOrRegClass::Reg(reg), .. } - | InlineAsmOperand::InOut { reg: InlineAsmRegOrRegClass::Reg(reg), .. } => { + CInlineAsmOperand::Out { reg: InlineAsmRegOrRegClass::Reg(reg), .. } + | CInlineAsmOperand::InOut { reg: InlineAsmRegOrRegClass::Reg(reg), .. } => { regs[i] = Some(reg); allocated.insert(reg, (true, true)); } @@ -326,12 +392,12 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { // Allocate out/inout/inlateout registers first because they are more constrained. for (i, operand) in self.operands.iter().enumerate() { match *operand { - InlineAsmOperand::Out { + CInlineAsmOperand::Out { reg: InlineAsmRegOrRegClass::RegClass(class), late: false, .. } - | InlineAsmOperand::InOut { + | CInlineAsmOperand::InOut { reg: InlineAsmRegOrRegClass::RegClass(class), .. } => { let mut alloc_reg = None; @@ -360,7 +426,7 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { // Allocate in/lateout. for (i, operand) in self.operands.iter().enumerate() { match *operand { - InlineAsmOperand::In { reg: InlineAsmRegOrRegClass::RegClass(class), .. } => { + CInlineAsmOperand::In { reg: InlineAsmRegOrRegClass::RegClass(class), .. } => { let mut alloc_reg = None; for ® in &map[&class] { let mut used = false; @@ -380,7 +446,7 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { regs[i] = Some(reg); allocated.entry(reg).or_default().0 = true; } - InlineAsmOperand::Out { + CInlineAsmOperand::Out { reg: InlineAsmRegOrRegClass::RegClass(class), late: true, .. @@ -455,7 +521,7 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { // Allocate stack slots for inout for (i, operand) in self.operands.iter().enumerate() { match *operand { - InlineAsmOperand::InOut { reg, out_place: Some(_), .. } => { + CInlineAsmOperand::InOut { reg, out_place: Some(_), .. } => { let slot = new_slot(reg.reg_class()); slots_input[i] = Some(slot); slots_output[i] = Some(slot); @@ -470,8 +536,8 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { // Allocate stack slots for input for (i, operand) in self.operands.iter().enumerate() { match *operand { - InlineAsmOperand::In { reg, .. } - | InlineAsmOperand::InOut { reg, out_place: None, .. } => { + CInlineAsmOperand::In { reg, .. } + | CInlineAsmOperand::InOut { reg, out_place: None, .. } => { slots_input[i] = Some(new_slot(reg.reg_class())); } _ => (), @@ -487,7 +553,7 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { // Allocate stack slots for output for (i, operand) in self.operands.iter().enumerate() { match *operand { - InlineAsmOperand::Out { reg, place: Some(_), .. } => { + CInlineAsmOperand::Out { reg, place: Some(_), .. } => { slots_output[i] = Some(new_slot(reg.reg_class())); } _ => (), @@ -549,13 +615,23 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { generated_asm.push_str(s); } InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => { - if self.options.contains(InlineAsmOptions::ATT_SYNTAX) { - generated_asm.push('%'); + match self.operands[*operand_idx] { + CInlineAsmOperand::In { .. } + | CInlineAsmOperand::Out { .. } + | CInlineAsmOperand::InOut { .. } => { + if self.options.contains(InlineAsmOptions::ATT_SYNTAX) { + generated_asm.push('%'); + } + self.registers[*operand_idx] + .unwrap() + .emit(&mut generated_asm, self.arch, *modifier) + .unwrap(); + } + CInlineAsmOperand::Const { ref value } => { + generated_asm.push_str(value); + } + CInlineAsmOperand::Symbol { symbol } => generated_asm.push_str(symbol.name), } - self.registers[*operand_idx] - .unwrap() - .emit(&mut generated_asm, self.arch, *modifier) - .unwrap(); } } } From 2e93be3a4c8f59e3856c22152c3771054577b618 Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 5 Feb 2023 17:24:02 +0000 Subject: [PATCH 278/501] Add create_wrapper_function helper --- src/allocator.rs | 75 +++++++++--------------------------------------- src/common.rs | 38 ++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 61 deletions(-) diff --git a/src/allocator.rs b/src/allocator.rs index 8508227179ac6..1c73957ca571c 100644 --- a/src/allocator.rs +++ b/src/allocator.rs @@ -70,37 +70,13 @@ fn codegen_inner( params: arg_tys.iter().cloned().map(AbiParam::new).collect(), returns: output.into_iter().map(AbiParam::new).collect(), }; - - let caller_name = format!("__rust_{}", method.name); - let callee_name = kind.fn_name(method.name); - - let func_id = module.declare_function(&caller_name, Linkage::Export, &sig).unwrap(); - - let callee_func_id = module.declare_function(&callee_name, Linkage::Import, &sig).unwrap(); - - let mut ctx = Context::new(); - ctx.func.signature = sig.clone(); - { - let mut func_ctx = FunctionBuilderContext::new(); - let mut bcx = FunctionBuilder::new(&mut ctx.func, &mut func_ctx); - - let block = bcx.create_block(); - bcx.switch_to_block(block); - let args = arg_tys - .into_iter() - .map(|ty| bcx.append_block_param(block, ty)) - .collect::>(); - - let callee_func_ref = module.declare_func_in_func(callee_func_id, &mut bcx.func); - let call_inst = bcx.ins().call(callee_func_ref, &args); - let results = bcx.inst_results(call_inst).to_vec(); // Clone to prevent borrow error - - bcx.ins().return_(&results); - bcx.seal_all_blocks(); - bcx.finalize(); - } - module.define_function(func_id, &mut ctx).unwrap(); - unwind_context.add_function(func_id, &ctx, module.isa()); + crate::common::create_wrapper_function( + module, + unwind_context, + sig, + &format!("__rust_{}", method.name), + &kind.fn_name(method.name), + ); } let sig = Signature { @@ -108,36 +84,13 @@ fn codegen_inner( params: vec![AbiParam::new(usize_ty), AbiParam::new(usize_ty)], returns: vec![], }; - - let callee_name = alloc_error_handler_kind.fn_name(sym::oom); - - let func_id = - module.declare_function("__rust_alloc_error_handler", Linkage::Export, &sig).unwrap(); - - let callee_func_id = module.declare_function(&callee_name, Linkage::Import, &sig).unwrap(); - - let mut ctx = Context::new(); - ctx.func.signature = sig; - { - let mut func_ctx = FunctionBuilderContext::new(); - let mut bcx = FunctionBuilder::new(&mut ctx.func, &mut func_ctx); - - let block = bcx.create_block(); - bcx.switch_to_block(block); - let args = (&[usize_ty, usize_ty]) - .iter() - .map(|&ty| bcx.append_block_param(block, ty)) - .collect::>(); - - let callee_func_ref = module.declare_func_in_func(callee_func_id, &mut bcx.func); - bcx.ins().call(callee_func_ref, &args); - - bcx.ins().trap(TrapCode::UnreachableCodeReached); - bcx.seal_all_blocks(); - bcx.finalize(); - } - module.define_function(func_id, &mut ctx).unwrap(); - unwind_context.add_function(func_id, &ctx, module.isa()); + crate::common::create_wrapper_function( + module, + unwind_context, + sig, + "__rust_alloc_error_handler", + &alloc_error_handler_kind.fn_name(sym::oom), + ); let data_id = module.declare_data(OomStrategy::SYMBOL, Linkage::Export, false, false).unwrap(); let mut data_ctx = DataContext::new(); diff --git a/src/common.rs b/src/common.rs index f41af3a9e6366..16ae526090b7e 100644 --- a/src/common.rs +++ b/src/common.rs @@ -254,6 +254,44 @@ pub(crate) fn type_sign(ty: Ty<'_>) -> bool { } } +pub(crate) fn create_wrapper_function( + module: &mut impl Module, + unwind_context: &mut UnwindContext, + sig: Signature, + wrapper_name: &str, + callee_name: &str, +) { + let wrapper_func_id = module.declare_function(wrapper_name, Linkage::Export, &sig).unwrap(); + let callee_func_id = module.declare_function(callee_name, Linkage::Import, &sig).unwrap(); + + let mut ctx = Context::new(); + ctx.func.signature = sig; + { + let mut func_ctx = FunctionBuilderContext::new(); + let mut bcx = FunctionBuilder::new(&mut ctx.func, &mut func_ctx); + + let block = bcx.create_block(); + bcx.switch_to_block(block); + let func = &mut bcx.func.stencil; + let args = func + .signature + .params + .iter() + .map(|param| func.dfg.append_block_param(block, param.value_type)) + .collect::>(); + + let callee_func_ref = module.declare_func_in_func(callee_func_id, &mut bcx.func); + let call_inst = bcx.ins().call(callee_func_ref, &args); + let results = bcx.inst_results(call_inst).to_vec(); // Clone to prevent borrow error + + bcx.ins().return_(&results); + bcx.seal_all_blocks(); + bcx.finalize(); + } + module.define_function(wrapper_func_id, &mut ctx).unwrap(); + unwind_context.add_function(wrapper_func_id, &ctx, module.isa()); +} + pub(crate) struct FunctionCx<'m, 'clif, 'tcx: 'm> { pub(crate) cx: &'clif mut crate::CodegenCx, pub(crate) module: &'m mut dyn Module, From a2719a285c7baeddb5cb0fd9bdd801b303b74a3e Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 5 Feb 2023 17:39:00 +0000 Subject: [PATCH 279/501] Fix linker error when inline asm sym operand is not exported from local CGU --- src/common.rs | 2 +- src/inline_asm.rs | 31 ++++++++++++++++++++++++++----- 2 files changed, 27 insertions(+), 6 deletions(-) diff --git a/src/common.rs b/src/common.rs index 16ae526090b7e..a8be0d32cc8c7 100644 --- a/src/common.rs +++ b/src/common.rs @@ -255,7 +255,7 @@ pub(crate) fn type_sign(ty: Ty<'_>) -> bool { } pub(crate) fn create_wrapper_function( - module: &mut impl Module, + module: &mut dyn Module, unwind_context: &mut UnwindContext, sig: Signature, wrapper_name: &str, diff --git a/src/inline_asm.rs b/src/inline_asm.rs index aa0b51181f40a..6206fbf7dd571 100644 --- a/src/inline_asm.rs +++ b/src/inline_asm.rs @@ -6,7 +6,6 @@ use std::fmt::Write; use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_middle::mir::InlineAsmOperand; -use rustc_middle::ty::SymbolName; use rustc_span::sym; use rustc_target::asm::*; @@ -30,7 +29,7 @@ enum CInlineAsmOperand<'tcx> { value: String, }, Symbol { - symbol: SymbolName<'tcx>, + symbol: String, }, } @@ -263,7 +262,29 @@ pub(crate) fn codegen_inline_asm<'tcx>( substs, ) .unwrap(); - CInlineAsmOperand::Symbol { symbol: fx.tcx.symbol_name(instance) } + let symbol = fx.tcx.symbol_name(instance); + + // Pass a wrapper rather than the function itself as the function itself may not + // be exported from the main codegen unit and may thus be unreachable from the + // object file created by an external assembler. + let inline_asm_index = fx.cx.inline_asm_index.get(); + fx.cx.inline_asm_index.set(inline_asm_index + 1); + let wrapper_name = format!( + "__inline_asm_{}_wrapper_n{}", + fx.cx.cgu_name.as_str().replace('.', "__").replace('-', "_"), + inline_asm_index + ); + let sig = + get_function_sig(fx.tcx, fx.target_config.default_call_conv, instance); + create_wrapper_function( + fx.module, + &mut fx.cx.unwind_context, + sig, + &wrapper_name, + symbol.name, + ); + + CInlineAsmOperand::Symbol { symbol: wrapper_name } } else { span_bug!(span, "invalid type for asm sym (fn)"); } @@ -271,7 +292,7 @@ pub(crate) fn codegen_inline_asm<'tcx>( InlineAsmOperand::SymStatic { def_id } => { assert!(fx.tcx.is_static(def_id)); let instance = Instance::mono(fx.tcx, def_id).polymorphize(fx.tcx); - CInlineAsmOperand::Symbol { symbol: fx.tcx.symbol_name(instance) } + CInlineAsmOperand::Symbol { symbol: fx.tcx.symbol_name(instance).name.to_owned() } } }) .collect::>(); @@ -630,7 +651,7 @@ impl<'tcx> InlineAssemblyGenerator<'_, 'tcx> { CInlineAsmOperand::Const { ref value } => { generated_asm.push_str(value); } - CInlineAsmOperand::Symbol { symbol } => generated_asm.push_str(symbol.name), + CInlineAsmOperand::Symbol { ref symbol } => generated_asm.push_str(symbol), } } } From df6b06790057ca32584a6ed6bfdd02045594137d Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 5 Feb 2023 18:17:31 +0000 Subject: [PATCH 280/501] Implement const operands for global asm --- src/global_asm.rs | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/src/global_asm.rs b/src/global_asm.rs index dcbcaba30feed..9aee4908c5ca1 100644 --- a/src/global_asm.rs +++ b/src/global_asm.rs @@ -7,7 +7,7 @@ use std::process::{Command, Stdio}; use std::sync::Arc; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; -use rustc_hir::ItemId; +use rustc_hir::{InlineAsmOperand, ItemId}; use rustc_session::config::{OutputFilenames, OutputType}; use crate::prelude::*; @@ -23,7 +23,32 @@ pub(crate) fn codegen_global_asm_item(tcx: TyCtxt<'_>, global_asm: &mut String, for piece in asm.template { match *piece { InlineAsmTemplatePiece::String(ref s) => global_asm.push_str(s), - InlineAsmTemplatePiece::Placeholder { .. } => todo!(), + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: op_sp } => { + match asm.operands[operand_idx].0 { + InlineAsmOperand::Const { ref anon_const } => { + let const_value = + tcx.const_eval_poly(anon_const.def_id.to_def_id()).unwrap_or_else( + |_| span_bug!(op_sp, "asm const cannot be resolved"), + ); + let ty = tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id); + let string = rustc_codegen_ssa::common::asm_const_to_str( + tcx, + op_sp, + const_value, + RevealAllLayoutCx(tcx).layout_of(ty), + ); + global_asm.push_str(&string); + } + InlineAsmOperand::SymFn { anon_const: _ } => todo!(), + InlineAsmOperand::SymStatic { path: _, def_id: _ } => todo!(), + InlineAsmOperand::In { .. } + | InlineAsmOperand::Out { .. } + | InlineAsmOperand::InOut { .. } + | InlineAsmOperand::SplitInOut { .. } => { + span_bug!(op_sp, "invalid operand type for global_asm!") + } + } + } } } global_asm.push_str("\n.att_syntax\n\n"); From 178e267977c646e34a1440cb10e32e0716e2241e Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Sun, 5 Feb 2023 18:42:30 +0000 Subject: [PATCH 281/501] Implement sym operands for global asm --- src/global_asm.rs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/global_asm.rs b/src/global_asm.rs index 9aee4908c5ca1..46c78ce6a1e34 100644 --- a/src/global_asm.rs +++ b/src/global_asm.rs @@ -39,8 +39,22 @@ pub(crate) fn codegen_global_asm_item(tcx: TyCtxt<'_>, global_asm: &mut String, ); global_asm.push_str(&string); } - InlineAsmOperand::SymFn { anon_const: _ } => todo!(), - InlineAsmOperand::SymStatic { path: _, def_id: _ } => todo!(), + InlineAsmOperand::SymFn { anon_const } => { + let ty = tcx.typeck_body(anon_const.body).node_type(anon_const.hir_id); + let instance = match ty.kind() { + &ty::FnDef(def_id, substs) => Instance::new(def_id, substs), + _ => span_bug!(op_sp, "asm sym is not a function"), + }; + let symbol = tcx.symbol_name(instance); + // FIXME handle the case where the function was made private to the + // current codegen unit + global_asm.push_str(symbol.name); + } + InlineAsmOperand::SymStatic { path: _, def_id } => { + let instance = Instance::mono(tcx, def_id).polymorphize(tcx); + let symbol = tcx.symbol_name(instance); + global_asm.push_str(symbol.name); + } InlineAsmOperand::In { .. } | InlineAsmOperand::Out { .. } | InlineAsmOperand::InOut { .. } From cb9a5b9549cb5bb7f8f946643555398ae180937b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 6 Feb 2023 12:07:33 +0100 Subject: [PATCH 282/501] Support sysroot library source being defined inside the workspace --- crates/project-model/src/cargo_workspace.rs | 1 + crates/project-model/src/sysroot.rs | 11 ++++++ crates/project-model/src/workspace.rs | 41 ++++++++++++++++----- crates/rust-analyzer/src/config.rs | 8 ++++ docs/user/generated_config.adoc | 8 ++++ editors/code/package.json | 8 ++++ 6 files changed, 68 insertions(+), 9 deletions(-) diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index 467cf0917875a..fdc7859eb90fb 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -96,6 +96,7 @@ pub struct CargoConfig { pub target: Option, /// Sysroot loading behavior pub sysroot: Option, + pub sysroot_src: Option, /// rustc private crate source pub rustc_source: Option, /// crates to disable `#[cfg(test)]` on diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index e1dde12bad821..328d2fbcf31f4 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -76,6 +76,7 @@ impl Sysroot { } } +// FIXME: Expose a builder api as loading the sysroot got way too modular and complicated. impl Sysroot { /// Attempts to discover the toolchain's sysroot from the given `dir`. pub fn discover(dir: &AbsPath, extra_env: &FxHashMap) -> Result { @@ -86,6 +87,16 @@ impl Sysroot { Ok(Sysroot::load(sysroot_dir, sysroot_src_dir)) } + pub fn discover_with_src_override( + dir: &AbsPath, + extra_env: &FxHashMap, + src: AbsPathBuf, + ) -> Result { + tracing::debug!("discovering sysroot for {}", dir.display()); + let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; + Ok(Sysroot::load(sysroot_dir, src)) + } + pub fn discover_rustc( cargo_toml: &ManifestPath, extra_env: &FxHashMap, diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index d784d3d0e9afb..2a11f1e8eb820 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -190,8 +190,8 @@ impl ProjectWorkspace { })?; let cargo = CargoWorkspace::new(meta); - let sysroot = match &config.sysroot { - Some(RustcSource::Path(path)) => { + let sysroot = match (&config.sysroot, &config.sysroot_src) { + (Some(RustcSource::Path(path)), None) => { match Sysroot::with_sysroot_dir(path.clone()) { Ok(it) => Some(it), Err(e) => { @@ -200,7 +200,7 @@ impl ProjectWorkspace { } } } - Some(RustcSource::Discover) => { + (Some(RustcSource::Discover), None) => { match Sysroot::discover(cargo_toml.parent(), &config.extra_env) { Ok(it) => Some(it), Err(e) => { @@ -213,8 +213,29 @@ impl ProjectWorkspace { } } } - None => None, + (Some(RustcSource::Path(sysroot)), Some(sysroot_src)) => { + Some(Sysroot::load(sysroot.clone(), sysroot_src.clone())) + } + (Some(RustcSource::Discover), Some(sysroot_src)) => { + match Sysroot::discover_with_src_override( + cargo_toml.parent(), + &config.extra_env, + sysroot_src.clone(), + ) { + Ok(it) => Some(it), + Err(e) => { + tracing::error!( + %e, + "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?", + cargo_toml.display() + ); + None + } + } + } + (None, _) => None, }; + if let Some(sysroot) = &sysroot { tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); } @@ -440,9 +461,11 @@ impl ProjectWorkspace { /// The return type contains the path and whether or not /// the root is a member of the current workspace pub fn to_roots(&self) -> Vec { - let mk_sysroot = |sysroot: Option<&Sysroot>| { + let mk_sysroot = |sysroot: Option<&Sysroot>, project_root: Option<&AbsPath>| { sysroot.map(|sysroot| PackageRoot { - is_local: false, + // mark the sysroot as mutable if it is located inside of the project + is_local: project_root + .map_or(false, |project_root| sysroot.src_root().starts_with(project_root)), include: vec![sysroot.src_root().to_path_buf()], exclude: Vec::new(), }) @@ -457,7 +480,7 @@ impl ProjectWorkspace { }) .collect::>() .into_iter() - .chain(mk_sysroot(sysroot.as_ref())) + .chain(mk_sysroot(sysroot.as_ref(), Some(project.path()))) .collect::>(), ProjectWorkspace::Cargo { cargo, @@ -507,7 +530,7 @@ impl ProjectWorkspace { } PackageRoot { is_local, include, exclude } }) - .chain(mk_sysroot(sysroot.as_ref())) + .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root()))) .chain(rustc.iter().flat_map(|rustc| { rustc.packages().map(move |krate| PackageRoot { is_local: false, @@ -524,7 +547,7 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(sysroot.as_ref())) + .chain(mk_sysroot(sysroot.as_ref(), None)) .collect(), } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 8ea161dbdc4f3..c8075aefbbeca 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -117,6 +117,11 @@ config_data! { /// /// This option does not take effect until rust-analyzer is restarted. cargo_sysroot: Option = "\"discover\"", + /// Relative path to the sysroot library sources. If left unset, this will default to + /// `{cargo.sysroot}/lib/rustlib/src/rust/library`. + /// + /// This option does not take effect until rust-analyzer is restarted. + cargo_sysrootSrc: Option = "null", /// Compilation target override (target triple). // FIXME(@poliorcetics): move to multiple targets here too, but this will need more work // than `checkOnSave_target` @@ -1103,6 +1108,8 @@ impl Config { RustcSource::Path(self.root_path.join(sysroot)) } }); + let sysroot_src = + self.data.cargo_sysrootSrc.as_ref().map(|sysroot| self.root_path.join(sysroot)); CargoConfig { features: match &self.data.cargo_features { @@ -1114,6 +1121,7 @@ impl Config { }, target: self.data.cargo_target.clone(), sysroot, + sysroot_src, rustc_source, unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()), wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper, diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index 1bfb8a917a803..d5fdedfe3af45 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -97,6 +97,14 @@ Relative path to the sysroot, or "discover" to try to automatically find it via Unsetting this disables sysroot loading. +This option does not take effect until rust-analyzer is restarted. +-- +[[rust-analyzer.cargo.sysrootSrc]]rust-analyzer.cargo.sysrootSrc (default: `null`):: ++ +-- +Relative path to the sysroot library sources. If left unset, this will default to +`{cargo.sysroot}/lib/rustlib/src/rust/library`. + This option does not take effect until rust-analyzer is restarted. -- [[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`):: diff --git a/editors/code/package.json b/editors/code/package.json index 599e9c5a7bf5b..7160781b6f3eb 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -539,6 +539,14 @@ "string" ] }, + "rust-analyzer.cargo.sysrootSrc": { + "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.", + "default": null, + "type": [ + "null", + "string" + ] + }, "rust-analyzer.cargo.target": { "markdownDescription": "Compilation target override (target triple).", "default": null, From c6305c56592b49ae817e550e3cd42ef704e2307d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 6 Feb 2023 12:38:57 +0100 Subject: [PATCH 283/501] fix: Don't panic on broken syntax trees in adjustment inlay hints --- crates/ide/src/inlay_hints/adjustment.rs | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/crates/ide/src/inlay_hints/adjustment.rs b/crates/ide/src/inlay_hints/adjustment.rs index 45e85a338a452..188eb7f977ba6 100644 --- a/crates/ide/src/inlay_hints/adjustment.rs +++ b/crates/ide/src/inlay_hints/adjustment.rs @@ -6,6 +6,7 @@ use hir::{Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, PointerCast, Safety, Semantics}; use ide_db::RootDatabase; +use stdx::never; use syntax::{ ast::{self, make, AstNode}, ted, @@ -210,16 +211,21 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, ted::replace(expr.syntax(), dummy_expr.syntax()); let parent = dummy_expr.syntax().parent(); - let expr = if postfix { - let ast::Expr::TryExpr(e) = &dummy_expr else { unreachable!() }; - let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + let Some(expr) = (|| { + if postfix { + let ast::Expr::TryExpr(e) = &dummy_expr else { return None }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None }; - e.expr().unwrap() - } else { - let ast::Expr::RefExpr(e) = &dummy_expr else { unreachable!() }; - let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() }; + e.expr() + } else { + let ast::Expr::RefExpr(e) = &dummy_expr else { return None }; + let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None }; - e.expr().unwrap() + e.expr() + } + })() else { + never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr); + return (true, true) }; // At this point From 521c5f36d69b9056c0147502d12426fbd9b6c9c4 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Sun, 29 Jan 2023 13:37:05 +0000 Subject: [PATCH 284/501] Migrate `rustc_parse` to derive diagnostics --- .../locales/en-US/parse.ftl | 115 +++++++ compiler/rustc_parse/src/errors.rs | 312 ++++++++++++++++++ compiler/rustc_parse/src/lexer/mod.rs | 159 +++------ .../src/lexer/unescape_error_reporting.rs | 248 +++++--------- .../rustc_parse/src/lexer/unicode_chars.rs | 60 ++-- compiler/rustc_parse/src/parser/expr.rs | 3 +- compiler/rustc_parse/src/parser/item.rs | 124 +++---- tests/ui/parser/raw/too-many-hash.rs | 6 + tests/ui/parser/raw/too-many-hash.stderr | 8 + 9 files changed, 655 insertions(+), 380 deletions(-) create mode 100644 tests/ui/parser/raw/too-many-hash.rs create mode 100644 tests/ui/parser/raw/too-many-hash.stderr diff --git a/compiler/rustc_error_messages/locales/en-US/parse.ftl b/compiler/rustc_error_messages/locales/en-US/parse.ftl index 2ef3dba557ea5..45f84ac4c625b 100644 --- a/compiler/rustc_error_messages/locales/en-US/parse.ftl +++ b/compiler/rustc_error_messages/locales/en-US/parse.ftl @@ -582,3 +582,118 @@ parse_negative_bounds_not_supported = negative bounds are not supported parse_help_set_edition_cargo = set `edition = "{$edition}"` in `Cargo.toml` parse_help_set_edition_standalone = pass `--edition {$edition}` to `rustc` parse_note_edition_guide = for more on editions, read https://doc.rust-lang.org/edition-guide + +parse_unexpected_token_after_dot = unexpected token: `{$actual}` + +parse_cannot_be_raw_ident = `{$ident}` cannot be a raw identifier + +parse_cr_doc_comment = bare CR not allowed in {$block -> + [true] block doc-comment + *[false] doc-comment +} + +parse_no_digits_literal = no valid digits found for number + +parse_invalid_digit_literal = invalid digit for a base {$base} literal + +parse_empty_exponent_float = expected at least one digit in exponent + +parse_float_literal_unsupported_base = {$base} float literal is not supported + +parse_more_than_one_char = character literal may only contain one codepoint + .followed_by = this `{$chr}` is followed by the combining {$len -> + [one] mark + *[other] marks + } `{$escaped_marks}` + .non_printing = there are non-printing characters, the full sequence is `{$escaped}` + .consider_normalized = consider using the normalized form `{$ch}` of this character + .remove_non = consider removing the non-printing characters + .use_double_quotes = if you meant to write a {$is_byte -> + [true] byte string + *[false] `str` + } literal, use double quotes + +parse_no_brace_unicode_escape = incorrect unicode escape sequence + .label = {parse_no_brace_unicode_escape} + .use_braces = format of unicode escape sequences uses braces + .format_of_unicode = format of unicode escape sequences is `\u{"{...}"}` + +parse_invalid_unicode_escape = invalid unicode character escape + .label = invalid escape + .help = unicode escape must {$surrogate -> + [true] not be a surrogate + *[false] be at most 10FFFF + } + +parse_escape_only_char = {$byte -> + [true] byte + *[false] character + } constant must be escaped: `{$escaped_msg}` + .escape = escape the character + +parse_bare_cr = {$double_quotes -> + [true] bare CR not allowed in string, use `\r` instead + *[false] character constant must be escaped: `\r` + } + .escape = escape the character + +parse_bare_cr_in_raw_string = bare CR not allowed in raw string + +parse_too_short_hex_escape = numeric character escape is too short + +parse_invalid_char_in_escape = {parse_invalid_char_in_escape_msg}: `{$ch}` + .label = {parse_invalid_char_in_escape_msg} + +parse_invalid_char_in_escape_msg = invalid character in {$is_hex -> + [true] numeric character + *[false] unicode + } escape + +parse_out_of_range_hex_escape = out of range hex escape + .label = must be a character in the range [\x00-\x7f] + +parse_leading_underscore_unicode_escape = {parse_leading_underscore_unicode_escape_label}: `_` +parse_leading_underscore_unicode_escape_label = invalid start of unicode escape + +parse_overlong_unicode_escape = overlong unicode escape + .label = must have at most 6 hex digits + +parse_unclosed_unicode_escape = unterminated unicode escape + .label = missing a closing `{"}"}` + .terminate = terminate the unicode escape + +parse_unicode_escape_in_byte = unicode escape in byte string + .label = {parse_unicode_escape_in_byte} + .help = unicode escape sequences cannot be used as a byte or in a byte string + +parse_empty_unicode_escape = empty unicode escape + .label = this escape must have at least 1 hex digit + +parse_zero_chars = empty character literal + .label = {parse_zero_chars} + +parse_lone_slash = invalid trailing slash in literal + .label = {parse_lone_slash} + +parse_unskipped_whitespace = non-ASCII whitespace symbol '{$ch}' is not skipped + .label = {parse_unskipped_whitespace} + +parse_multiple_skipped_lines = multiple lines skipped by escaped newline + .label = skipping everything up to and including this point + +parse_unknown_prefix = prefix `{$prefix}` is unknown + .label = unknown prefix + .note = prefixed identifiers and literals are reserved since Rust 2021 + .suggestion_br = use `br` for a raw byte string + .suggestion_whitespace = consider inserting whitespace here + +parse_too_many_hashes = too many `#` symbols: raw strings may be delimited by up to 255 `#` symbols, but found {$num} + +parse_unknown_start_of_token = unknown start of token: {$escaped} + .sugg_quotes = Unicode characters '“' (Left Double Quotation Mark) and '”' (Right Double Quotation Mark) look like '{$ascii_str}' ({$ascii_name}), but are not + .sugg_other = Unicode character '{$ch}' ({$u_name}) looks like '{$ascii_str}' ({$ascii_name}), but it is not + .help_null = source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used + .note_repeats = character appears {$repeats -> + [one] once more + *[other] {$repeats} more times + } diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index fc7c839f1c461..81a727182cd89 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -1368,6 +1368,14 @@ pub(crate) struct SelfArgumentPointer { pub span: Span, } +#[derive(Diagnostic)] +#[diag(parse_unexpected_token_after_dot)] +pub struct UnexpectedTokenAfterDot<'a> { + #[primary_span] + pub span: Span, + pub actual: Cow<'a, str>, +} + #[derive(Diagnostic)] #[diag(parse_visibility_not_followed_by_item)] #[help] @@ -1650,6 +1658,310 @@ pub(crate) enum TopLevelOrPatternNotAllowed { }, } +#[derive(Diagnostic)] +#[diag(parse_cannot_be_raw_ident)] +pub struct CannotBeRawIdent { + #[primary_span] + pub span: Span, + pub ident: Symbol, +} + +#[derive(Diagnostic)] +#[diag(parse_cr_doc_comment)] +pub struct CrDocComment { + #[primary_span] + pub span: Span, + pub block: bool, +} + +#[derive(Diagnostic)] +#[diag(parse_no_digits_literal, code = "E0768")] +pub struct NoDigitsLiteral { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_invalid_digit_literal)] +pub struct InvalidDigitLiteral { + #[primary_span] + pub span: Span, + pub base: u32, +} + +#[derive(Diagnostic)] +#[diag(parse_empty_exponent_float)] +pub struct EmptyExponentFloat { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(parse_float_literal_unsupported_base)] +pub struct FloatLiteralUnsupportedBase { + #[primary_span] + pub span: Span, + pub base: &'static str, +} + +#[derive(Diagnostic)] +#[diag(parse_unknown_prefix)] +#[note] +pub struct UnknownPrefix<'a> { + #[primary_span] + #[label] + pub span: Span, + pub prefix: &'a str, + #[subdiagnostic] + pub sugg: Option, +} + +#[derive(Subdiagnostic)] +pub enum UnknownPrefixSugg { + #[suggestion(suggestion_br, code = "br", applicability = "maybe-incorrect", style = "verbose")] + UseBr(#[primary_span] Span), + #[suggestion( + suggestion_whitespace, + code = " ", + applicability = "maybe-incorrect", + style = "verbose" + )] + Whitespace(#[primary_span] Span), +} + +#[derive(Diagnostic)] +#[diag(parse_too_many_hashes)] +pub struct TooManyHashes { + #[primary_span] + pub span: Span, + pub num: u32, +} + +#[derive(Diagnostic)] +#[diag(parse_unknown_start_of_token)] +pub struct UnknownTokenStart { + #[primary_span] + pub span: Span, + pub escaped: String, + #[subdiagnostic] + pub sugg: Option, + #[subdiagnostic] + pub null: Option, + #[subdiagnostic] + pub repeat: Option, +} + +#[derive(Subdiagnostic)] +pub enum TokenSubstitution { + #[suggestion(sugg_quotes, code = "{suggestion}", applicability = "maybe-incorrect")] + DirectedQuotes { + #[primary_span] + span: Span, + suggestion: String, + ascii_str: &'static str, + ascii_name: &'static str, + }, + #[suggestion(sugg_other, code = "{suggestion}", applicability = "maybe-incorrect")] + Other { + #[primary_span] + span: Span, + suggestion: String, + ch: String, + u_name: &'static str, + ascii_str: &'static str, + ascii_name: &'static str, + }, +} + +#[derive(Subdiagnostic)] +#[note(note_repeats)] +pub struct UnknownTokenRepeat { + pub repeats: usize, +} + +#[derive(Subdiagnostic)] +#[help(help_null)] +pub struct UnknownTokenNull; + +#[derive(Diagnostic)] +pub enum UnescapeError { + #[diag(parse_invalid_unicode_escape)] + #[help] + InvalidUnicodeEscape { + #[primary_span] + #[label] + span: Span, + surrogate: bool, + }, + #[diag(parse_escape_only_char)] + EscapeOnlyChar { + #[primary_span] + span: Span, + #[suggestion(escape, applicability = "machine-applicable", code = "{escaped_sugg}")] + char_span: Span, + escaped_sugg: String, + escaped_msg: String, + byte: bool, + }, + #[diag(parse_bare_cr)] + BareCr { + #[primary_span] + #[suggestion(escape, applicability = "machine-applicable", code = "\\r")] + span: Span, + double_quotes: bool, + }, + #[diag(parse_bare_cr_in_raw_string)] + BareCrRawString(#[primary_span] Span), + #[diag(parse_too_short_hex_escape)] + TooShortHexEscape(#[primary_span] Span), + #[diag(parse_invalid_char_in_escape)] + InvalidCharInEscape { + #[primary_span] + #[label] + span: Span, + is_hex: bool, + ch: String, + }, + #[diag(parse_out_of_range_hex_escape)] + OutOfRangeHexEscape( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_leading_underscore_unicode_escape)] + LeadingUnderscoreUnicodeEscape { + #[primary_span] + #[label(parse_leading_underscore_unicode_escape_label)] + span: Span, + ch: String, + }, + #[diag(parse_overlong_unicode_escape)] + OverlongUnicodeEscape( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_unclosed_unicode_escape)] + UnclosedUnicodeEscape( + #[primary_span] + #[label] + Span, + #[suggestion(terminate, code = "}}", applicability = "maybe-incorrect", style = "verbose")] + Span, + ), + #[diag(parse_no_brace_unicode_escape)] + NoBraceInUnicodeEscape { + #[primary_span] + span: Span, + #[label] + label: Option, + #[subdiagnostic] + sub: NoBraceUnicodeSub, + }, + #[diag(parse_unicode_escape_in_byte)] + #[help] + UnicodeEscapeInByte( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_empty_unicode_escape)] + EmptyUnicodeEscape( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_zero_chars)] + ZeroChars( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_lone_slash)] + LoneSlash( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_unskipped_whitespace)] + UnskippedWhitespace { + #[primary_span] + span: Span, + #[label] + char_span: Span, + ch: String, + }, + #[diag(parse_multiple_skipped_lines)] + MultipleSkippedLinesWarning( + #[primary_span] + #[label] + Span, + ), + #[diag(parse_more_than_one_char)] + MoreThanOneChar { + #[primary_span] + span: Span, + #[subdiagnostic] + note: Option, + #[subdiagnostic] + suggestion: MoreThanOneCharSugg, + }, +} + +#[derive(Subdiagnostic)] +pub enum MoreThanOneCharSugg { + #[suggestion(consider_normalized, code = "{normalized}", applicability = "machine-applicable")] + NormalizedForm { + #[primary_span] + span: Span, + ch: String, + normalized: String, + }, + #[suggestion(remove_non, code = "{ch}", applicability = "maybe-incorrect")] + RemoveNonPrinting { + #[primary_span] + span: Span, + ch: String, + }, + #[suggestion(use_double_quotes, code = "{sugg}", applicability = "machine-applicable")] + Quotes { + #[primary_span] + span: Span, + is_byte: bool, + sugg: String, + }, +} + +#[derive(Subdiagnostic)] +pub enum MoreThanOneCharNote { + #[note(followed_by)] + AllCombining { + #[primary_span] + span: Span, + chr: String, + len: usize, + escaped_marks: String, + }, + #[note(non_printing)] + NonPrinting { + #[primary_span] + span: Span, + escaped: String, + }, +} + +#[derive(Subdiagnostic)] +pub enum NoBraceUnicodeSub { + #[suggestion(use_braces, code = "{suggestion}", applicability = "maybe-incorrect")] + Suggestion { + #[primary_span] + span: Span, + suggestion: String, + }, + #[help(format_of_unicode)] + Help, +} + #[derive(Subdiagnostic)] pub(crate) enum TopLevelOrPatternNotAllowedSugg { #[suggestion( diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index e957224a03377..bd998ed91d977 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -1,11 +1,10 @@ +use crate::errors; use crate::lexer::unicode_chars::UNICODE_ARRAY; use rustc_ast::ast::{self, AttrStyle}; use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::unicode::contains_text_flow_control_chars; -use rustc_errors::{ - error_code, Applicability, DiagnosticBuilder, ErrorGuaranteed, PResult, StashKey, -}; +use rustc_errors::{error_code, Applicability, DiagnosticBuilder, PResult, StashKey}; use rustc_lexer::unescape::{self, Mode}; use rustc_lexer::Cursor; use rustc_lexer::{Base, DocStyle, RawStrError}; @@ -151,7 +150,7 @@ impl<'a> StringReader<'a> { let span = self.mk_sp(start, self.pos); self.sess.symbol_gallery.insert(sym, span); if !sym.can_be_raw() { - self.err_span(span, &format!("`{}` cannot be a raw identifier", sym)); + self.sess.emit_err(errors::CannotBeRawIdent { span, ident: sym }); } self.sess.raw_identifier_spans.borrow_mut().push(span); token::Ident(sym, true) @@ -262,27 +261,24 @@ impl<'a> StringReader<'a> { self.nbsp_is_whitespace = true; } let repeats = it.take_while(|c1| *c1 == c).count(); - let mut err = - self.struct_err_span_char(start, self.pos + Pos::from_usize(repeats * c.len_utf8()), "unknown start of token", c); // FIXME: the lexer could be used to turn the ASCII version of unicode // homoglyphs, instead of keeping a table in `check_for_substitution`into the // token. Ideally, this should be inside `rustc_lexer`. However, we should // first remove compound tokens like `<<` from `rustc_lexer`, and then add // fancier error recovery to it, as there will be less overall work to do this // way. - let token = unicode_chars::check_for_substitution(self, start, c, &mut err, repeats+1); - if c == '\x00' { - err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used"); - } - if repeats > 0 { - if repeats == 1 { - err.note(format!("character appears once more")); - } else { - err.note(format!("character appears {repeats} more times")); - } - swallow_next_invalid = repeats; - } - err.emit(); + let (token, sugg) = unicode_chars::check_for_substitution(self, start, c, repeats+1); + self.sess.emit_err(errors::UnknownTokenStart { + span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())), + escaped: escaped_char(c), + sugg, + null: if c == '\x00' {Some(errors::UnknownTokenNull)} else {None}, + repeat: if repeats > 0 { + swallow_next_invalid = repeats; + Some(errors::UnknownTokenRepeat { repeats }) + } else {None} + }); + if let Some(token) = token { token } else { @@ -297,26 +293,6 @@ impl<'a> StringReader<'a> { } } - /// Report a fatal lexical error with a given span. - fn fatal_span(&self, sp: Span, m: &str) -> ! { - self.sess.span_diagnostic.span_fatal(sp, m) - } - - /// Report a lexical error with a given span. - fn err_span(&self, sp: Span, m: &str) { - self.sess.span_diagnostic.struct_span_err(sp, m).emit(); - } - - /// Report a fatal error spanning [`from_pos`, `to_pos`). - fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> ! { - self.fatal_span(self.mk_sp(from_pos, to_pos), m) - } - - /// Report a lexical error spanning [`from_pos`, `to_pos`). - fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(self.mk_sp(from_pos, to_pos), m) - } - fn struct_fatal_span_char( &self, from_pos: BytePos, @@ -329,18 +305,6 @@ impl<'a> StringReader<'a> { .struct_span_fatal(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c))) } - fn struct_err_span_char( - &self, - from_pos: BytePos, - to_pos: BytePos, - m: &str, - c: char, - ) -> DiagnosticBuilder<'a, ErrorGuaranteed> { - self.sess - .span_diagnostic - .struct_span_err(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c))) - } - /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly /// complain about it. fn lint_unicode_text_flow(&self, start: BytePos) { @@ -368,14 +332,12 @@ impl<'a> StringReader<'a> { ) -> TokenKind { if content.contains('\r') { for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') { - self.err_span_( + let span = self.mk_sp( content_start + BytePos(idx as u32), content_start + BytePos(idx as u32 + 1), - match comment_kind { - CommentKind::Line => "bare CR not allowed in doc-comment", - CommentKind::Block => "bare CR not allowed in block doc-comment", - }, ); + let block = matches!(comment_kind, CommentKind::Block); + self.sess.emit_err(errors::CrDocComment { span, block }); } } @@ -454,26 +416,20 @@ impl<'a> StringReader<'a> { } rustc_lexer::LiteralKind::Int { base, empty_int } => { if empty_int { - self.sess - .span_diagnostic - .struct_span_err_with_code( - self.mk_sp(start, end), - "no valid digits found for number", - error_code!(E0768), - ) - .emit(); + let span = self.mk_sp(start, end); + self.sess.emit_err(errors::NoDigitsLiteral { span }); (token::Integer, sym::integer(0)) } else { if matches!(base, Base::Binary | Base::Octal) { let base = base as u32; let s = self.str_from_to(start + BytePos(2), end); for (idx, c) in s.char_indices() { + let span = self.mk_sp( + start + BytePos::from_usize(2 + idx), + start + BytePos::from_usize(2 + idx + c.len_utf8()), + ); if c != '_' && c.to_digit(base).is_none() { - self.err_span_( - start + BytePos::from_usize(2 + idx), - start + BytePos::from_usize(2 + idx + c.len_utf8()), - &format!("invalid digit for a base {} literal", base), - ); + self.sess.emit_err(errors::InvalidDigitLiteral { span, base }); } } } @@ -482,19 +438,18 @@ impl<'a> StringReader<'a> { } rustc_lexer::LiteralKind::Float { base, empty_exponent } => { if empty_exponent { - self.err_span_(start, self.pos, "expected at least one digit in exponent"); + let span = self.mk_sp(start, self.pos); + self.sess.emit_err(errors::EmptyExponentFloat { span }); } - match base { - Base::Hexadecimal => { - self.err_span_(start, end, "hexadecimal float literal is not supported") - } - Base::Octal => { - self.err_span_(start, end, "octal float literal is not supported") - } - Base::Binary => { - self.err_span_(start, end, "binary float literal is not supported") - } - _ => {} + let base = match base { + Base::Hexadecimal => Some("hexadecimal"), + Base::Octal => Some("octal"), + Base::Binary => Some("binary"), + _ => None, + }; + if let Some(base) = base { + let span = self.mk_sp(start, end); + self.sess.emit_err(errors::FloatLiteralUnsupportedBase { span, base }); } (token::Float, self.symbol_from_to(start, end)) } @@ -644,54 +599,34 @@ impl<'a> StringReader<'a> { // identifier tokens. fn report_unknown_prefix(&self, start: BytePos) { let prefix_span = self.mk_sp(start, self.pos); - let prefix_str = self.str_from_to(start, self.pos); - let msg = format!("prefix `{}` is unknown", prefix_str); + let prefix = self.str_from_to(start, self.pos); let expn_data = prefix_span.ctxt().outer_expn_data(); if expn_data.edition >= Edition::Edition2021 { // In Rust 2021, this is a hard error. - let mut err = self.sess.span_diagnostic.struct_span_err(prefix_span, &msg); - err.span_label(prefix_span, "unknown prefix"); - if prefix_str == "rb" { - err.span_suggestion_verbose( - prefix_span, - "use `br` for a raw byte string", - "br", - Applicability::MaybeIncorrect, - ); + let sugg = if prefix == "rb" { + Some(errors::UnknownPrefixSugg::UseBr(prefix_span)) } else if expn_data.is_root() { - err.span_suggestion_verbose( - prefix_span.shrink_to_hi(), - "consider inserting whitespace here", - " ", - Applicability::MaybeIncorrect, - ); - } - err.note("prefixed identifiers and literals are reserved since Rust 2021"); - err.emit(); + Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi())) + } else { + None + }; + self.sess.emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg }); } else { // Before Rust 2021, only emit a lint for migration. self.sess.buffer_lint_with_diagnostic( &RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, prefix_span, ast::CRATE_NODE_ID, - &msg, + &format!("prefix `{prefix}` is unknown"), BuiltinLintDiagnostics::ReservedPrefix(prefix_span), ); } } - fn report_too_many_hashes(&self, start: BytePos, found: u32) -> ! { - self.fatal_span_( - start, - self.pos, - &format!( - "too many `#` symbols: raw strings may be delimited \ - by up to 255 `#` symbols, but found {}", - found - ), - ) + fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! { + self.sess.emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num }); } fn cook_quoted( diff --git a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs index 6373f5b4fd6ff..0d12ec6081d83 100644 --- a/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs +++ b/compiler/rustc_parse/src/lexer/unescape_error_reporting.rs @@ -3,10 +3,12 @@ use std::iter::once; use std::ops::Range; -use rustc_errors::{pluralize, Applicability, Handler}; +use rustc_errors::{Applicability, Handler}; use rustc_lexer::unescape::{EscapeError, Mode}; use rustc_span::{BytePos, Span}; +use crate::errors::{MoreThanOneCharNote, MoreThanOneCharSugg, NoBraceUnicodeSub, UnescapeError}; + pub(crate) fn emit_unescape_error( handler: &Handler, // interior part of the literal, without quotes @@ -31,53 +33,32 @@ pub(crate) fn emit_unescape_error( }; match error { EscapeError::LoneSurrogateUnicodeEscape => { - handler - .struct_span_err(span, "invalid unicode character escape") - .span_label(span, "invalid escape") - .help("unicode escape must not be a surrogate") - .emit(); + handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: true }); } EscapeError::OutOfRangeUnicodeEscape => { - handler - .struct_span_err(span, "invalid unicode character escape") - .span_label(span, "invalid escape") - .help("unicode escape must be at most 10FFFF") - .emit(); + handler.emit_err(UnescapeError::InvalidUnicodeEscape { span, surrogate: false }); } EscapeError::MoreThanOneChar => { use unicode_normalization::{char::is_combining_mark, UnicodeNormalization}; + let mut sugg = None; + let mut note = None; - let mut has_help = false; - let mut handler = handler.struct_span_err( - span_with_quotes, - "character literal may only contain one codepoint", - ); - - if lit.chars().skip(1).all(|c| is_combining_mark(c)) { - let escaped_marks = - lit.chars().skip(1).map(|c| c.escape_default().to_string()).collect::>(); - handler.span_note( - span, - &format!( - "this `{}` is followed by the combining mark{} `{}`", - lit.chars().next().unwrap(), - pluralize!(escaped_marks.len()), - escaped_marks.join(""), - ), - ); + let lit_chars = lit.chars().collect::>(); + let (first, rest) = lit_chars.split_first().unwrap(); + if rest.iter().copied().all(is_combining_mark) { let normalized = lit.nfc().to_string(); if normalized.chars().count() == 1 { - has_help = true; - handler.span_suggestion( - span, - &format!( - "consider using the normalized form `{}` of this character", - normalized.chars().next().unwrap().escape_default() - ), - normalized, - Applicability::MachineApplicable, - ); + let ch = normalized.chars().next().unwrap().escape_default().to_string(); + sugg = Some(MoreThanOneCharSugg::NormalizedForm { span, ch, normalized }); } + let escaped_marks = + rest.iter().map(|c| c.escape_default().to_string()).collect::>(); + note = Some(MoreThanOneCharNote::AllCombining { + span, + chr: format!("{first}"), + len: escaped_marks.len(), + escaped_marks: escaped_marks.join(""), + }); } else { let printable: Vec = lit .chars() @@ -87,32 +68,18 @@ pub(crate) fn emit_unescape_error( }) .collect(); - if let [ch] = printable.as_slice() { - has_help = true; - - handler.span_note( + if let &[ch] = printable.as_slice() { + sugg = + Some(MoreThanOneCharSugg::RemoveNonPrinting { span, ch: ch.to_string() }); + note = Some(MoreThanOneCharNote::NonPrinting { span, - &format!( - "there are non-printing characters, the full sequence is `{}`", - lit.escape_default(), - ), - ); - - handler.span_suggestion( - span, - "consider removing the non-printing characters", - ch, - Applicability::MaybeIncorrect, - ); + escaped: lit.escape_default().to_string(), + }); } - } - - if !has_help { - let (prefix, msg) = if mode.is_byte() { - ("b", "if you meant to write a byte string literal, use double quotes") - } else { - ("", "if you meant to write a `str` literal, use double quotes") - }; + }; + let sugg = sugg.unwrap_or_else(|| { + let is_byte = mode.is_byte(); + let prefix = if is_byte { "b" } else { "" }; let mut escaped = String::with_capacity(lit.len()); let mut chrs = lit.chars().peekable(); while let Some(first) = chrs.next() { @@ -129,54 +96,32 @@ pub(crate) fn emit_unescape_error( (c, _) => escaped.push(c), }; } - handler.span_suggestion( - span_with_quotes, - msg, - format!("{prefix}\"{escaped}\""), - Applicability::MachineApplicable, - ); - } - - handler.emit(); + let sugg = format!("{prefix}\"{escaped}\""); + MoreThanOneCharSugg::Quotes { span: span_with_quotes, is_byte, sugg } + }); + handler.emit_err(UnescapeError::MoreThanOneChar { + span: span_with_quotes, + note, + suggestion: sugg, + }); } EscapeError::EscapeOnlyChar => { let (c, char_span) = last_char(); - - let msg = if mode.is_byte() { - "byte constant must be escaped" - } else { - "character constant must be escaped" - }; - handler - .struct_span_err(span, &format!("{}: `{}`", msg, escaped_char(c))) - .span_suggestion( - char_span, - "escape the character", - c.escape_default(), - Applicability::MachineApplicable, - ) - .emit(); + handler.emit_err(UnescapeError::EscapeOnlyChar { + span, + char_span, + escaped_sugg: c.escape_default().to_string(), + escaped_msg: escaped_char(c), + byte: mode.is_byte(), + }); } EscapeError::BareCarriageReturn => { - let msg = if mode.in_double_quotes() { - "bare CR not allowed in string, use `\\r` instead" - } else { - "character constant must be escaped: `\\r`" - }; - handler - .struct_span_err(span, msg) - .span_suggestion( - span, - "escape the character", - "\\r", - Applicability::MachineApplicable, - ) - .emit(); + let double_quotes = mode.in_double_quotes(); + handler.emit_err(UnescapeError::BareCr { span, double_quotes }); } EscapeError::BareCarriageReturnInRawString => { assert!(mode.in_double_quotes()); - let msg = "bare CR not allowed in raw string"; - handler.span_err(span, msg); + handler.emit_err(UnescapeError::BareCrRawString(span)); } EscapeError::InvalidEscape => { let (c, span) = last_char(); @@ -213,22 +158,13 @@ pub(crate) fn emit_unescape_error( diag.emit(); } EscapeError::TooShortHexEscape => { - handler.span_err(span, "numeric character escape is too short"); + handler.emit_err(UnescapeError::TooShortHexEscape(span)); } EscapeError::InvalidCharInHexEscape | EscapeError::InvalidCharInUnicodeEscape => { let (c, span) = last_char(); - - let msg = if error == EscapeError::InvalidCharInHexEscape { - "invalid character in numeric character escape" - } else { - "invalid character in unicode escape" - }; - let c = escaped_char(c); - - handler - .struct_span_err(span, &format!("{}: `{}`", msg, c)) - .span_label(span, msg) - .emit(); + let is_hex = error == EscapeError::InvalidCharInHexEscape; + let ch = escaped_char(c); + handler.emit_err(UnescapeError::InvalidCharInEscape { span, is_hex, ch }); } EscapeError::NonAsciiCharInByte => { let (c, span) = last_char(); @@ -278,41 +214,22 @@ pub(crate) fn emit_unescape_error( err.emit(); } EscapeError::OutOfRangeHexEscape => { - handler - .struct_span_err(span, "out of range hex escape") - .span_label(span, "must be a character in the range [\\x00-\\x7f]") - .emit(); + handler.emit_err(UnescapeError::OutOfRangeHexEscape(span)); } EscapeError::LeadingUnderscoreUnicodeEscape => { let (c, span) = last_char(); - let msg = "invalid start of unicode escape"; - handler - .struct_span_err(span, &format!("{}: `{}`", msg, c)) - .span_label(span, msg) - .emit(); + handler.emit_err(UnescapeError::LeadingUnderscoreUnicodeEscape { + span, + ch: escaped_char(c), + }); } EscapeError::OverlongUnicodeEscape => { - handler - .struct_span_err(span, "overlong unicode escape") - .span_label(span, "must have at most 6 hex digits") - .emit(); + handler.emit_err(UnescapeError::OverlongUnicodeEscape(span)); } EscapeError::UnclosedUnicodeEscape => { - handler - .struct_span_err(span, "unterminated unicode escape") - .span_label(span, "missing a closing `}`") - .span_suggestion_verbose( - span.shrink_to_hi(), - "terminate the unicode escape", - "}", - Applicability::MaybeIncorrect, - ) - .emit(); + handler.emit_err(UnescapeError::UnclosedUnicodeEscape(span, span.shrink_to_hi())); } EscapeError::NoBraceInUnicodeEscape => { - let msg = "incorrect unicode escape sequence"; - let mut diag = handler.struct_span_err(span, msg); - let mut suggestion = "\\u{".to_owned(); let mut suggestion_len = 0; let (c, char_span) = last_char(); @@ -322,54 +239,37 @@ pub(crate) fn emit_unescape_error( suggestion_len += c.len_utf8(); } - if suggestion_len > 0 { + let (label, sub) = if suggestion_len > 0 { suggestion.push('}'); let hi = char_span.lo() + BytePos(suggestion_len as u32); - diag.span_suggestion( - span.with_hi(hi), - "format of unicode escape sequences uses braces", - suggestion, - Applicability::MaybeIncorrect, - ); + (None, NoBraceUnicodeSub::Suggestion { span: span.with_hi(hi), suggestion }) } else { - diag.span_label(span, msg); - diag.help("format of unicode escape sequences is `\\u{...}`"); - } - - diag.emit(); + (Some(span), NoBraceUnicodeSub::Help) + }; + handler.emit_err(UnescapeError::NoBraceInUnicodeEscape { span, label, sub }); } EscapeError::UnicodeEscapeInByte => { - let msg = "unicode escape in byte string"; - handler - .struct_span_err(span, msg) - .span_label(span, msg) - .help("unicode escape sequences cannot be used as a byte or in a byte string") - .emit(); + handler.emit_err(UnescapeError::UnicodeEscapeInByte(span)); } EscapeError::EmptyUnicodeEscape => { - handler - .struct_span_err(span, "empty unicode escape") - .span_label(span, "this escape must have at least 1 hex digit") - .emit(); + handler.emit_err(UnescapeError::EmptyUnicodeEscape(span)); } EscapeError::ZeroChars => { - let msg = "empty character literal"; - handler.struct_span_err(span, msg).span_label(span, msg).emit(); + handler.emit_err(UnescapeError::ZeroChars(span)); } EscapeError::LoneSlash => { - let msg = "invalid trailing slash in literal"; - handler.struct_span_err(span, msg).span_label(span, msg).emit(); + handler.emit_err(UnescapeError::LoneSlash(span)); } EscapeError::UnskippedWhitespaceWarning => { let (c, char_span) = last_char(); - let msg = - format!("non-ASCII whitespace symbol '{}' is not skipped", c.escape_unicode()); - handler.struct_span_warn(span, &msg).span_label(char_span, &msg).emit(); + handler.emit_warning(UnescapeError::UnskippedWhitespace { + span, + ch: escaped_char(c), + char_span, + }); } EscapeError::MultipleSkippedLinesWarning => { - let msg = "multiple lines skipped by escaped newline"; - let bottom_msg = "skipping everything up to and including this point"; - handler.struct_span_warn(span, msg).span_label(span, bottom_msg).emit(); + handler.emit_warning(UnescapeError::MultipleSkippedLinesWarning(span)); } } } diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index 34d003ccfa7b4..d4f971d5bc84f 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -2,8 +2,10 @@ //! use super::StringReader; -use crate::token::{self, Delimiter}; -use rustc_errors::{Applicability, Diagnostic}; +use crate::{ + errors::TokenSubstitution, + token::{self, Delimiter}, +}; use rustc_span::{symbol::kw, BytePos, Pos, Span}; #[rustfmt::skip] // for line breaks @@ -338,48 +340,44 @@ pub(super) fn check_for_substitution<'a>( reader: &StringReader<'a>, pos: BytePos, ch: char, - err: &mut Diagnostic, count: usize, -) -> Option { - let &(_, u_name, ascii_str) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch)?; +) -> (Option, Option) { + let Some(&(_, u_name, ascii_str)) = UNICODE_ARRAY.iter().find(|&&(c, _, _)| c == ch) else { + return (None, None); + }; let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count)); let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else { let msg = format!("substitution character not found for '{}'", ch); reader.sess.span_diagnostic.span_bug_no_panic(span, &msg); - return None; + return (None, None); }; // special help suggestion for "directed" double quotes - if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') { - let msg = format!( - "Unicode characters '“' (Left Double Quotation Mark) and \ - '”' (Right Double Quotation Mark) look like '{}' ({}), but are not", - ascii_str, ascii_name - ); - err.span_suggestion( - Span::with_root_ctxt( - pos, - pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()), - ), - &msg, - format!("\"{}\"", s), - Applicability::MaybeIncorrect, + let sugg = if let Some(s) = peek_delimited(&reader.src[reader.src_index(pos)..], '“', '”') { + let span = Span::with_root_ctxt( + pos, + pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()), ); + Some(TokenSubstitution::DirectedQuotes { + span, + suggestion: format!("\"{s}\""), + ascii_str, + ascii_name, + }) } else { - let msg = format!( - "Unicode character '{}' ({}) looks like '{}' ({}), but it is not", - ch, u_name, ascii_str, ascii_name - ); - err.span_suggestion( + let suggestion = ascii_str.to_string().repeat(count); + Some(TokenSubstitution::Other { span, - &msg, - ascii_str.to_string().repeat(count), - Applicability::MaybeIncorrect, - ); - } - token.clone() + suggestion, + ch: ch.to_string(), + u_name, + ascii_str, + ascii_name, + }) + }; + (token.clone(), sugg) } /// Extract string if found at current position with given delimiters diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index c37808f8c3d19..2fc8ce98af04d 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -5,6 +5,7 @@ use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, }; + use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use core::mem; @@ -1017,7 +1018,7 @@ impl<'a> Parser<'a> { fn error_unexpected_after_dot(&self) { // FIXME Could factor this out into non_fatal_unexpected or something. let actual = pprust::token_to_string(&self.token); - self.struct_span_err(self.token.span, &format!("unexpected token: `{actual}`")).emit(); + self.sess.emit_err(errors::UnexpectedTokenAfterDot { span: self.token.span, actual }); } // We need an identifier or integer, but the next token is a float. diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 628e9d88cf1df..fd46a1292a823 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,18 +1,8 @@ -use crate::errors::{ - AmbiguousMissingKwForItemSub, AssociatedStaticItemNotAllowed, AsyncFnIn2015, - BoundsNotAllowedOnTraitAliases, ConstGlobalCannotBeMutable, ConstLetMutuallyExclusive, - DefaultNotFollowedByItem, DocCommentDoesNotDocumentAnything, EnumStructMutuallyExclusive, - ExpectedTraitInTraitImplFoundType, ExternCrateNameWithDashes, ExternCrateNameWithDashesSugg, - ExternItemCannotBeConst, HelpUseLatestEdition, MissingConstType, MissingForInTraitImpl, - MissingKeywordForItemDefinition, MissingTraitInTraitImpl, SelfArgumentPointer, - TraitAliasCannotBeAuto, TraitAliasCannotBeUnsafe, UnexpectedTokenAfterStructName, - UseEmptyBlockNotSemi, VisibilityNotFollowedByItem, -}; +use crate::errors; use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; -use crate::errors::FnTypoWithImpl; use rustc_ast::ast::*; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, TokenKind}; @@ -177,11 +167,11 @@ impl<'a> Parser<'a> { // At this point, we have failed to parse an item. if !matches!(vis.kind, VisibilityKind::Inherited) { - self.sess.emit_err(VisibilityNotFollowedByItem { span: vis.span, vis }); + self.sess.emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis }); } if let Defaultness::Default(span) = def { - self.sess.emit_err(DefaultNotFollowedByItem { span }); + self.sess.emit_err(errors::DefaultNotFollowedByItem { span }); } if !attrs_allowed { @@ -403,7 +393,7 @@ impl<'a> Parser<'a> { let err = if self.check(&token::OpenDelim(Delimiter::Brace)) { // possible public struct definition where `struct` was forgotten - Some(MissingKeywordForItemDefinition::Struct { span: sp, ident }) + Some(errors::MissingKeywordForItemDefinition::Struct { span: sp, ident }) } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) { // possible public function or tuple struct definition where `fn`/`struct` was // forgotten @@ -412,34 +402,36 @@ impl<'a> Parser<'a> { self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes); - let err = if self.check(&token::RArrow) - || self.check(&token::OpenDelim(Delimiter::Brace)) - { - self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]); - self.bump(); // `{` - self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); - if is_method { - MissingKeywordForItemDefinition::Method { span: sp, ident } - } else { - MissingKeywordForItemDefinition::Function { span: sp, ident } - } - } else if self.check(&token::Semi) { - MissingKeywordForItemDefinition::Struct { span: sp, ident } - } else { - MissingKeywordForItemDefinition::Ambiguous { - span: sp, - subdiag: if found_generics { - None - } else if let Ok(snippet) = self.span_to_snippet(ident_sp) { - Some(AmbiguousMissingKwForItemSub::SuggestMacro { span: full_sp, snippet }) + let err = + if self.check(&token::RArrow) || self.check(&token::OpenDelim(Delimiter::Brace)) { + self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]); + self.bump(); // `{` + self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes); + if is_method { + errors::MissingKeywordForItemDefinition::Method { span: sp, ident } } else { - Some(AmbiguousMissingKwForItemSub::HelpMacro) - }, - } - }; + errors::MissingKeywordForItemDefinition::Function { span: sp, ident } + } + } else if self.check(&token::Semi) { + errors::MissingKeywordForItemDefinition::Struct { span: sp, ident } + } else { + errors::MissingKeywordForItemDefinition::Ambiguous { + span: sp, + subdiag: if found_generics { + None + } else if let Ok(snippet) = self.span_to_snippet(ident_sp) { + Some(errors::AmbiguousMissingKwForItemSub::SuggestMacro { + span: full_sp, + snippet, + }) + } else { + Some(errors::AmbiguousMissingKwForItemSub::HelpMacro) + }, + } + }; Some(err) } else if found_generics { - Some(MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None }) + Some(errors::MissingKeywordForItemDefinition::Ambiguous { span: sp, subdiag: None }) } else { None }; @@ -567,8 +559,10 @@ impl<'a> Parser<'a> { let ty_first = if self.token.is_keyword(kw::For) && self.look_ahead(1, |t| t != &token::Lt) { let span = self.prev_token.span.between(self.token.span); - self.sess - .emit_err(MissingTraitInTraitImpl { span, for_span: span.to(self.token.span) }); + self.sess.emit_err(errors::MissingTraitInTraitImpl { + span, + for_span: span.to(self.token.span), + }); P(Ty { kind: TyKind::Path(None, err_path(span)), @@ -602,7 +596,7 @@ impl<'a> Parser<'a> { Some(ty_second) => { // impl Trait for Type if !has_for { - self.sess.emit_err(MissingForInTraitImpl { span: missing_for_span }); + self.sess.emit_err(errors::MissingForInTraitImpl { span: missing_for_span }); } let ty_first = ty_first.into_inner(); @@ -610,8 +604,9 @@ impl<'a> Parser<'a> { // This notably includes paths passed through `ty` macro fragments (#46438). TyKind::Path(None, path) => path, _ => { - self.sess - .emit_err(ExpectedTraitInTraitImplFoundType { span: ty_first.span }); + self.sess.emit_err(errors::ExpectedTraitInTraitImplFoundType { + span: ty_first.span, + }); err_path(ty_first.span) } }; @@ -655,7 +650,7 @@ impl<'a> Parser<'a> { // Recover `impl Ty;` instead of `impl Ty {}` if self.token == TokenKind::Semi { - self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span }); + self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span }); self.bump(); return Ok(vec![]); } @@ -812,7 +807,7 @@ impl<'a> Parser<'a> { // It's a trait alias. if had_colon { let span = span_at_colon.to(span_before_eq); - self.sess.emit_err(BoundsNotAllowedOnTraitAliases { span }); + self.sess.emit_err(errors::BoundsNotAllowedOnTraitAliases { span }); } let bounds = self.parse_generic_bounds(None)?; @@ -821,10 +816,10 @@ impl<'a> Parser<'a> { let whole_span = lo.to(self.prev_token.span); if is_auto == IsAuto::Yes { - self.sess.emit_err(TraitAliasCannotBeAuto { span: whole_span }); + self.sess.emit_err(errors::TraitAliasCannotBeAuto { span: whole_span }); } if let Unsafe::Yes(_) = unsafety { - self.sess.emit_err(TraitAliasCannotBeUnsafe { span: whole_span }); + self.sess.emit_err(errors::TraitAliasCannotBeUnsafe { span: whole_span }); } self.sess.gated_spans.gate(sym::trait_alias, whole_span); @@ -870,7 +865,7 @@ impl<'a> Parser<'a> { Ok(kind) => kind, Err(kind) => match kind { ItemKind::Static(a, _, b) => { - self.sess.emit_err(AssociatedStaticItemNotAllowed { span }); + self.sess.emit_err(errors::AssociatedStaticItemNotAllowed { span }); AssocItemKind::Const(Defaultness::Final, a, b) } _ => return self.error_bad_item_kind(span, &kind, "`trait`s or `impl`s"), @@ -1069,9 +1064,9 @@ impl<'a> Parser<'a> { write!(fixed_name, "_{}", part.name).unwrap(); } - self.sess.emit_err(ExternCrateNameWithDashes { + self.sess.emit_err(errors::ExternCrateNameWithDashes { span: fixed_name_sp, - sugg: ExternCrateNameWithDashesSugg { dashes }, + sugg: errors::ExternCrateNameWithDashesSugg { dashes }, }); Ok(Ident::from_str_and_span(&fixed_name, fixed_name_sp)) @@ -1122,7 +1117,7 @@ impl<'a> Parser<'a> { Ok(kind) => kind, Err(kind) => match kind { ItemKind::Const(_, a, b) => { - self.sess.emit_err(ExternItemCannotBeConst { + self.sess.emit_err(errors::ExternItemCannotBeConst { ident_span: ident.span, const_span: span.with_hi(ident.span.lo()), }); @@ -1173,10 +1168,10 @@ impl<'a> Parser<'a> { fn recover_const_mut(&mut self, const_span: Span) { if self.eat_keyword(kw::Mut) { let span = self.prev_token.span; - self.sess.emit_err(ConstGlobalCannotBeMutable { ident_span: span, const_span }); + self.sess.emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span }); } else if self.eat_keyword(kw::Let) { let span = self.prev_token.span; - self.sess.emit_err(ConstLetMutuallyExclusive { span: const_span.to(span) }); + self.sess.emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) }); } } @@ -1262,7 +1257,8 @@ impl<'a> Parser<'a> { let span = self.prev_token.span.shrink_to_hi(); let err: DiagnosticBuilder<'_, ErrorGuaranteed> = - MissingConstType { span, colon, kind }.into_diagnostic(&self.sess.span_diagnostic); + errors::MissingConstType { span, colon, kind } + .into_diagnostic(&self.sess.span_diagnostic); err.stash(span, StashKey::ItemNoType); // The user intended that the type be inferred, @@ -1274,7 +1270,7 @@ impl<'a> Parser<'a> { fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { if self.token.is_keyword(kw::Struct) { let span = self.prev_token.span.to(self.token.span); - let err = EnumStructMutuallyExclusive { span }; + let err = errors::EnumStructMutuallyExclusive { span }; if self.look_ahead(1, |t| t.is_ident()) { self.bump(); self.sess.emit_err(err); @@ -1289,7 +1285,7 @@ impl<'a> Parser<'a> { // Possibly recover `enum Foo;` instead of `enum Foo {}` let (variants, _) = if self.token == TokenKind::Semi { - self.sess.emit_err(UseEmptyBlockNotSemi { span: self.token.span }); + self.sess.emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span }); self.bump(); (vec![], false) } else { @@ -1415,7 +1411,8 @@ impl<'a> Parser<'a> { self.expect_semi()?; body } else { - let err = UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); + let err = + errors::UnexpectedTokenAfterStructName::new(self.token.span, self.token.clone()); return Err(err.into_diagnostic(&self.sess.span_diagnostic)); }; @@ -1593,7 +1590,7 @@ impl<'a> Parser<'a> { token::CloseDelim(Delimiter::Brace) => {} token::DocComment(..) => { let previous_span = self.prev_token.span; - let mut err = DocCommentDoesNotDocumentAnything { + let mut err = errors::DocCommentDoesNotDocumentAnything { span: self.token.span, missing_comma: None, }; @@ -2103,7 +2100,7 @@ impl<'a> Parser<'a> { // If we see `for Ty ...` then user probably meant `impl` item. if self.token.is_keyword(kw::For) { old_err.cancel(); - return Err(self.sess.create_err(FnTypoWithImpl { fn_span })); + return Err(self.sess.create_err(errors::FnTypoWithImpl { fn_span })); } else { return Err(old_err); } @@ -2248,7 +2245,10 @@ impl<'a> Parser<'a> { if let Async::Yes { span, .. } = asyncness { if span.is_rust_2015() { - self.sess.emit_err(AsyncFnIn2015 { span, help: HelpUseLatestEdition::new() }); + self.sess.emit_err(errors::AsyncFnIn2015 { + span, + help: errors::HelpUseLatestEdition::new(), + }); } } @@ -2501,7 +2501,7 @@ impl<'a> Parser<'a> { }; // Recover for the grammar `*self`, `*const self`, and `*mut self`. let recover_self_ptr = |this: &mut Self| { - self.sess.emit_err(SelfArgumentPointer { span: this.token.span }); + self.sess.emit_err(errors::SelfArgumentPointer { span: this.token.span }); Ok((SelfKind::Value(Mutability::Not), expect_self_ident(this), this.prev_token.span)) }; diff --git a/tests/ui/parser/raw/too-many-hash.rs b/tests/ui/parser/raw/too-many-hash.rs new file mode 100644 index 0000000000000..f3d3b207fad6b --- /dev/null +++ b/tests/ui/parser/raw/too-many-hash.rs @@ -0,0 +1,6 @@ +// ignore-tidy-linelength + +fn main() { + let s: &str = r################################################################################################################################################################################################################################################################"very raw"################################################################################################################################################################################################################################################################; + //~^ ERROR too many `#` symbols: raw strings may be delimited by up to 255 `#` symbols, but found 256 +} diff --git a/tests/ui/parser/raw/too-many-hash.stderr b/tests/ui/parser/raw/too-many-hash.stderr new file mode 100644 index 0000000000000..29ec17842aacc --- /dev/null +++ b/tests/ui/parser/raw/too-many-hash.stderr @@ -0,0 +1,8 @@ +error: too many `#` symbols: raw strings may be delimited by up to 255 `#` symbols, but found 256 + --> $DIR/too-many-hash.rs:4:19 + | +LL | ... = r################################################################################################################################################################################################################################################################"very raw"##############################################################################################################################################################################################################################################################... + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to previous error + From 2fc1693cd577eb8af48b75518945a71f1652711c Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Mon, 6 Feb 2023 21:34:35 +0530 Subject: [PATCH 285/501] split suggestions into two separate suggestions --- clippy_lints/src/methods/suspicious_to_owned.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/clippy_lints/src/methods/suspicious_to_owned.rs b/clippy_lints/src/methods/suspicious_to_owned.rs index 62eeb3f7a4ec8..e818f1892e510 100644 --- a/clippy_lints/src/methods/suspicious_to_owned.rs +++ b/clippy_lints/src/methods/suspicious_to_owned.rs @@ -29,10 +29,16 @@ pub fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr<'_>) - "this `to_owned` call clones the {input_type} itself and does not cause the {input_type} contents to become owned" )), |diag| { - diag.span_suggestions( + diag.span_suggestion( expr.span, - "depending on intent, either make the Cow an Owned variant or clone the Cow itself", - [format!("{recv_snip}.into_owned()"), format!("{recv_snip}.clone()")], + "depending on intent, either make the Cow an Owned variant", + format!("{recv_snip}.into_owned()"), + app + ); + diag.span_suggestion( + expr.span, + "or clone the Cow itself", + format!("{recv_snip}.clone()"), app ); } From 443bc7f19366d0d5dff1396275f25ce946d9d80c Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Mon, 6 Feb 2023 20:50:25 +0330 Subject: [PATCH 286/501] Support layout of RPIT --- crates/hir-ty/src/chalk_db.rs | 2 +- crates/hir-ty/src/chalk_ext.rs | 10 ++- crates/hir-ty/src/display.rs | 15 ++--- crates/hir-ty/src/infer.rs | 9 ++- crates/hir-ty/src/layout.rs | 19 ++++-- crates/hir-ty/src/layout/tests.rs | 107 ++++++++++++++++++++++++++++- crates/hir-ty/src/lib.rs | 9 ++- crates/hir-ty/src/lower.rs | 108 +++++++++++++++++++----------- 8 files changed, 214 insertions(+), 65 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index bbb6625855d32..6989e9fb9be5c 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -205,7 +205,7 @@ impl<'a> chalk_solve::RustIrDatabase for ChalkContext<'a> { .return_type_impl_traits(func) .expect("impl trait id without impl traits"); let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders(); - let data = &datas.impl_traits[idx as usize]; + let data = &datas.impl_traits[idx]; let bound = OpaqueTyDatumBound { bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()), where_clauses: chalk_ir::Binders::empty(Interner, vec![]), diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 329c87c74e97e..45c975dfcdc32 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -234,9 +234,8 @@ impl TyExt for Ty { } ImplTraitId::ReturnTypeImplTrait(func, idx) => { db.return_type_impl_traits(func).map(|it| { - let data = (*it) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + let data = + (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); data.substitute(Interner, &subst).into_value_and_skipped_binders().0 }) } @@ -247,9 +246,8 @@ impl TyExt for Ty { { ImplTraitId::ReturnTypeImplTrait(func, idx) => { db.return_type_impl_traits(func).map(|it| { - let data = (*it) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + let data = + (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); data.substitute(Interner, &opaque_ty.substitution) }) } diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index ae2162dd7cd24..464860dea3c38 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -458,9 +458,8 @@ impl HirDisplay for Ty { let datas = db .return_type_impl_traits(func) .expect("impl trait id without data"); - let data = (*datas) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + let data = + (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, parameters); let mut len = bounds.skip_binders().len(); @@ -718,9 +717,8 @@ impl HirDisplay for Ty { ImplTraitId::ReturnTypeImplTrait(func, idx) => { let datas = db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = (*datas) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + let data = + (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, ¶meters); let krate = func.lookup(db.upcast()).module(db.upcast()).krate(); write_bounds_like_dyn_trait_with_prefix( @@ -828,9 +826,8 @@ impl HirDisplay for Ty { ImplTraitId::ReturnTypeImplTrait(func, idx) => { let datas = db.return_type_impl_traits(func).expect("impl trait id without data"); - let data = (*datas) - .as_ref() - .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); + let data = + (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone()); let bounds = data.substitute(Interner, &opaque_ty.substitution); let krate = func.lookup(db.upcast()).module(db.upcast()).krate(); write_bounds_like_dyn_trait_with_prefix( diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 571b3e9686317..a76d33c0cded3 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -39,7 +39,7 @@ use stdx::always; use crate::{ db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany, lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal, - GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, Substitution, + GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; @@ -352,6 +352,7 @@ pub struct InferenceResult { /// **Note**: When a pattern type is resolved it may still contain /// unresolved or missing subpatterns or subpatterns of mismatched types. pub type_of_pat: ArenaMap, + pub type_of_rpit: ArenaMap, type_mismatches: FxHashMap, /// Interned common types to return references to. standard_types: InternedStandardTypes, @@ -525,6 +526,9 @@ impl<'a> InferenceContext<'a> { for ty in result.type_of_pat.values_mut() { *ty = table.resolve_completely(ty.clone()); } + for ty in result.type_of_rpit.iter_mut().map(|x| x.1) { + *ty = table.resolve_completely(ty.clone()); + } for mismatch in result.type_mismatches.values_mut() { mismatch.expected = table.resolve_completely(mismatch.expected.clone()); mismatch.actual = table.resolve_completely(mismatch.actual.clone()); @@ -603,7 +607,7 @@ impl<'a> InferenceContext<'a> { _ => unreachable!(), }; let bounds = (*rpits).map_ref(|rpits| { - rpits.impl_traits[idx as usize].bounds.map_ref(|it| it.into_iter()) + rpits.impl_traits[idx].bounds.map_ref(|it| it.into_iter()) }); let var = self.table.new_type_var(); let var_subst = Substitution::from1(Interner, var.clone()); @@ -616,6 +620,7 @@ impl<'a> InferenceContext<'a> { always!(binders.is_empty(Interner)); // quantified where clauses not yet handled self.push_obligation(var_predicate.cast(Interner)); } + self.result.type_of_rpit.insert(idx, var.clone()); var }, DebruijnIndex::INNERMOST, diff --git a/crates/hir-ty/src/layout.rs b/crates/hir-ty/src/layout.rs index cd7f1b805c5c5..c82c274524acd 100644 --- a/crates/hir-ty/src/layout.rs +++ b/crates/hir-ty/src/layout.rs @@ -225,10 +225,21 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result return Err(LayoutError::NotImplemented), + TyKind::OpaqueType(opaque_ty_id, _) => { + let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into()); + match impl_trait_id { + crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => { + let infer = db.infer(func.into()); + layout_of_ty(db, &infer.type_of_rpit[idx], krate)? + } + crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => { + return Err(LayoutError::NotImplemented) + } + } + } + TyKind::Closure(_, _) | TyKind::Generator(_, _) | TyKind::GeneratorWitness(_, _) => { + return Err(LayoutError::NotImplemented) + } TyKind::AssociatedType(_, _) | TyKind::Error | TyKind::Alias(_) diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index 53838cf41d274..ba7db9b0e2007 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -5,7 +5,7 @@ use hir_def::{ layout::{Layout, LayoutError}, }; -use crate::{test_db::TestDB, Interner, Substitution}; +use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution}; use super::layout_of_ty; @@ -45,6 +45,50 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result { layout_of_ty(&db, &goal_ty, module_id.krate()) } +/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait` +fn eval_expr(ra_fixture: &str, minicore: &str) -> Result { + // using unstable cargo features failed, fall back to using plain rustc + let mut cmd = std::process::Command::new("rustc"); + cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1"); + let output = cmd.output().unwrap(); + assert!(output.status.success(), "{}", output.status); + let stdout = String::from_utf8(output.stdout).unwrap(); + let target_data_layout = + stdout.split_once(r#""data-layout": ""#).unwrap().1.split_once('"').unwrap().0.to_owned(); + + let ra_fixture = format!( + "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", + ); + + let (db, file_id) = TestDB::with_single_file(&ra_fixture); + let module_id = db.module_for_file(file_id); + let def_map = module_id.def_map(&db); + let scope = &def_map[module_id.local_id].scope; + let adt_id = scope + .declarations() + .find_map(|x| match x { + hir_def::ModuleDefId::FunctionId(x) => { + let name = db.function_data(x).name.to_smol_str(); + (name == "main").then_some(x) + } + _ => None, + }) + .unwrap(); + let hir_body = db.body(adt_id.into()); + let pat = hir_body + .pats + .iter() + .find(|x| match x.1 { + hir_def::expr::Pat::Bind { name, .. } => name.to_smol_str() == "goal", + _ => false, + }) + .unwrap() + .0; + let infer = db.infer(adt_id.into()); + let goal_ty = infer.type_of_pat[pat].clone(); + layout_of_ty(&db, &goal_ty, module_id.krate()) +} + #[track_caller] fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) { let l = eval_goal(ra_fixture, minicore).unwrap(); @@ -52,6 +96,13 @@ fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) assert_eq!(l.align.abi.bytes(), align); } +#[track_caller] +fn check_size_and_align_expr(ra_fixture: &str, minicore: &str, size: u64, align: u64) { + let l = eval_expr(ra_fixture, minicore).unwrap(); + assert_eq!(l.size.bytes(), size); + assert_eq!(l.align.abi.bytes(), align); +} + #[track_caller] fn check_fail(ra_fixture: &str, e: LayoutError) { let r = eval_goal(ra_fixture, ""); @@ -85,11 +136,31 @@ macro_rules! size_and_align { }; } +macro_rules! size_and_align_expr { + ($($t:tt)*) => { + { + #[allow(dead_code)] + { + let val = { $($t)* }; + check_size_and_align_expr( + stringify!($($t)*), + "", + ::std::mem::size_of_val(&val) as u64, + ::std::mem::align_of_val(&val) as u64, + ); + } + } + }; +} + #[test] fn hello_world() { size_and_align! { struct Goal(i32); } + size_and_align_expr! { + 2i32 + } } #[test] @@ -143,6 +214,40 @@ fn generic() { } } +#[test] +fn return_position_impl_trait() { + size_and_align_expr! { + trait T {} + impl T for i32 {} + impl T for i64 {} + fn foo() -> impl T { 2i64 } + foo() + } + size_and_align_expr! { + trait T {} + impl T for i32 {} + impl T for i64 {} + fn foo() -> (impl T, impl T, impl T) { (2i64, 5i32, 7i32) } + foo() + } + size_and_align_expr! { + struct Foo(T, T, (T, T)); + trait T {} + impl T for Foo {} + impl T for Foo {} + + fn foo() -> Foo { Foo( + Foo(1i64, 2, (3, 4)), + Foo(5, 6, (7, 8)), + ( + Foo(1i64, 2, (3, 4)), + Foo(5, 6, (7, 8)), + ), + ) } + foo() + } +} + #[test] fn enums() { size_and_align! { diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index cbeb61067dfbe..59a5ef8c14dae 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -45,6 +45,7 @@ use chalk_ir::{ use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId}; use hir_expand::name; use itertools::Either; +use la_arena::{Arena, Idx}; use rustc_hash::FxHashSet; use traits::FnTrait; use utils::Generics; @@ -290,22 +291,24 @@ impl TypeFoldable for CallableSig { #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum ImplTraitId { - ReturnTypeImplTrait(hir_def::FunctionId, u16), + ReturnTypeImplTrait(hir_def::FunctionId, RpitId), AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId), } #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct ReturnTypeImplTraits { - pub(crate) impl_traits: Vec, + pub(crate) impl_traits: Arena, } has_interner!(ReturnTypeImplTraits); #[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub(crate) struct ReturnTypeImplTrait { +pub struct ReturnTypeImplTrait { pub(crate) bounds: Binders>, } +pub type RpitId = Idx; + pub fn static_lifetime() -> Lifetime { LifetimeData::Static.intern(Interner) } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index b1a7ad3e940ed..86abe1af68a60 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -36,7 +36,7 @@ use hir_def::{ use hir_expand::{name::Name, ExpandResult}; use intern::Interned; use itertools::Either; -use la_arena::ArenaMap; +use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashSet; use smallvec::SmallVec; use stdx::{impl_from, never}; @@ -57,6 +57,51 @@ use crate::{ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause, }; +#[derive(Debug)] +enum ImplTraitLoweringState { + /// When turning `impl Trait` into opaque types, we have to collect the + /// bounds at the same time to get the IDs correct (without becoming too + /// complicated). I don't like using interior mutability (as for the + /// counter), but I've tried and failed to make the lifetimes work for + /// passing around a `&mut TyLoweringContext`. The core problem is that + /// we're grouping the mutable data (the counter and this field) together + /// with the immutable context (the references to the DB and resolver). + /// Splitting this up would be a possible fix. + Opaque(RefCell>), + Param(Cell), + Variable(Cell), + Disallowed, +} +impl ImplTraitLoweringState { + fn new(impl_trait_mode: ImplTraitLoweringMode) -> ImplTraitLoweringState { + match impl_trait_mode { + ImplTraitLoweringMode::Opaque => Self::Opaque(RefCell::new(Arena::new())), + ImplTraitLoweringMode::Param => Self::Param(Cell::new(0)), + ImplTraitLoweringMode::Variable => Self::Variable(Cell::new(0)), + ImplTraitLoweringMode::Disallowed => Self::Disallowed, + } + } + + fn take(&self) -> Self { + match self { + Self::Opaque(x) => Self::Opaque(RefCell::new(x.take())), + Self::Param(x) => Self::Param(Cell::new(x.get())), + Self::Variable(x) => Self::Variable(Cell::new(x.get())), + Self::Disallowed => Self::Disallowed, + } + } + + fn swap(&self, impl_trait_mode: &Self) { + match (self, impl_trait_mode) { + (Self::Opaque(x), Self::Opaque(y)) => x.swap(y), + (Self::Param(x), Self::Param(y)) => x.swap(y), + (Self::Variable(x), Self::Variable(y)) => x.swap(y), + (Self::Disallowed, Self::Disallowed) => (), + _ => panic!("mismatched lowering mode"), + } + } +} + #[derive(Debug)] pub struct TyLoweringContext<'a> { pub db: &'a dyn HirDatabase, @@ -67,17 +112,7 @@ pub struct TyLoweringContext<'a> { /// should be converted to variables. I think in practice, this isn't /// possible currently, so this should be fine for now. pub type_param_mode: ParamLoweringMode, - pub impl_trait_mode: ImplTraitLoweringMode, - impl_trait_counter: Cell, - /// When turning `impl Trait` into opaque types, we have to collect the - /// bounds at the same time to get the IDs correct (without becoming too - /// complicated). I don't like using interior mutability (as for the - /// counter), but I've tried and failed to make the lifetimes work for - /// passing around a `&mut TyLoweringContext`. The core problem is that - /// we're grouping the mutable data (the counter and this field) together - /// with the immutable context (the references to the DB and resolver). - /// Splitting this up would be a possible fix. - opaque_type_data: RefCell>, + impl_trait_mode: ImplTraitLoweringState, expander: RefCell>, /// Tracks types with explicit `?Sized` bounds. pub(crate) unsized_types: RefCell>, @@ -85,19 +120,15 @@ pub struct TyLoweringContext<'a> { impl<'a> TyLoweringContext<'a> { pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self { - let impl_trait_counter = Cell::new(0); - let impl_trait_mode = ImplTraitLoweringMode::Disallowed; + let impl_trait_mode = ImplTraitLoweringState::Disallowed; let type_param_mode = ParamLoweringMode::Placeholder; let in_binders = DebruijnIndex::INNERMOST; - let opaque_type_data = RefCell::new(Vec::new()); Self { db, resolver, in_binders, impl_trait_mode, - impl_trait_counter, type_param_mode, - opaque_type_data, expander: RefCell::new(None), unsized_types: RefCell::default(), } @@ -108,20 +139,18 @@ impl<'a> TyLoweringContext<'a> { debruijn: DebruijnIndex, f: impl FnOnce(&TyLoweringContext<'_>) -> T, ) -> T { - let opaque_ty_data_vec = self.opaque_type_data.take(); + let impl_trait_mode = self.impl_trait_mode.take(); let expander = self.expander.take(); let unsized_types = self.unsized_types.take(); let new_ctx = Self { in_binders: debruijn, - impl_trait_counter: Cell::new(self.impl_trait_counter.get()), - opaque_type_data: RefCell::new(opaque_ty_data_vec), + impl_trait_mode, expander: RefCell::new(expander), unsized_types: RefCell::new(unsized_types), ..*self }; let result = f(&new_ctx); - self.impl_trait_counter.set(new_ctx.impl_trait_counter.get()); - self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner()); + self.impl_trait_mode.swap(&new_ctx.impl_trait_mode); self.expander.replace(new_ctx.expander.into_inner()); self.unsized_types.replace(new_ctx.unsized_types.into_inner()); result @@ -136,7 +165,7 @@ impl<'a> TyLoweringContext<'a> { } pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self { - Self { impl_trait_mode, ..self } + Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self } } pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self { @@ -244,20 +273,17 @@ impl<'a> TyLoweringContext<'a> { } TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds), TypeRef::ImplTrait(bounds) => { - match self.impl_trait_mode { - ImplTraitLoweringMode::Opaque => { - let idx = self.impl_trait_counter.get(); - self.impl_trait_counter.set(idx + 1); + match &self.impl_trait_mode { + ImplTraitLoweringState::Opaque(opaque_type_data) => { let func = match self.resolver.generic_def() { Some(GenericDefId::FunctionId(f)) => f, _ => panic!("opaque impl trait lowering in non-function"), }; - assert!(idx as usize == self.opaque_type_data.borrow().len()); // this dance is to make sure the data is in the right // place even if we encounter more opaque types while // lowering the bounds - self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait { + let idx = opaque_type_data.borrow_mut().alloc(ReturnTypeImplTrait { bounds: crate::make_single_type_binders(Vec::new()), }); // We don't want to lower the bounds inside the binders @@ -273,7 +299,7 @@ impl<'a> TyLoweringContext<'a> { .with_debruijn(DebruijnIndex::INNERMOST, |ctx| { ctx.lower_impl_trait(bounds, func) }); - self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data; + opaque_type_data.borrow_mut()[idx] = actual_opaque_type_data; let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx); let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into(); @@ -281,10 +307,10 @@ impl<'a> TyLoweringContext<'a> { let parameters = generics.bound_vars_subst(self.db, self.in_binders); TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner) } - ImplTraitLoweringMode::Param => { - let idx = self.impl_trait_counter.get(); + ImplTraitLoweringState::Param(counter) => { + let idx = counter.get(); // FIXME we're probably doing something wrong here - self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); + counter.set(idx + count_impl_traits(type_ref) as u16); if let Some(def) = self.resolver.generic_def() { let generics = generics(self.db.upcast(), def); let param = generics @@ -305,10 +331,10 @@ impl<'a> TyLoweringContext<'a> { TyKind::Error.intern(Interner) } } - ImplTraitLoweringMode::Variable => { - let idx = self.impl_trait_counter.get(); + ImplTraitLoweringState::Variable(counter) => { + let idx = counter.get(); // FIXME we're probably doing something wrong here - self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16); + counter.set(idx + count_impl_traits(type_ref) as u16); let ( _parent_params, self_params, @@ -327,7 +353,7 @@ impl<'a> TyLoweringContext<'a> { )) .intern(Interner) } - ImplTraitLoweringMode::Disallowed => { + ImplTraitLoweringState::Disallowed => { // FIXME: report error TyKind::Error.intern(Interner) } @@ -1863,8 +1889,12 @@ pub(crate) fn return_type_impl_traits( .with_type_param_mode(ParamLoweringMode::Variable); let _ret = ctx_ret.lower_ty(&data.ret_type); let generics = generics(db.upcast(), def.into()); - let return_type_impl_traits = - ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() }; + let return_type_impl_traits = ReturnTypeImplTraits { + impl_traits: match ctx_ret.impl_trait_mode { + ImplTraitLoweringState::Opaque(x) => x.into_inner(), + _ => unreachable!(), + }, + }; if return_type_impl_traits.impl_traits.is_empty() { None } else { From 8494882773392b461e9609e4f4a60818b26e964c Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Mon, 6 Feb 2023 18:32:25 +0100 Subject: [PATCH 287/501] Rustup to rustc 1.69.0-nightly (75a0be98f 2023-02-05) --- build_sysroot/Cargo.lock | 4 ++-- rust-toolchain | 2 +- src/base.rs | 31 ++++++++++++++++++++++--------- 3 files changed, 25 insertions(+), 12 deletions(-) diff --git a/build_sysroot/Cargo.lock b/build_sysroot/Cargo.lock index 24f15fc8521fe..b7e0b68a2a27a 100644 --- a/build_sysroot/Cargo.lock +++ b/build_sysroot/Cargo.lock @@ -34,9 +34,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" [[package]] name = "cfg-if" diff --git a/rust-toolchain b/rust-toolchain index f7205cb98009a..40fb54b915992 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-01-27" +channel = "nightly-2023-02-06" components = ["rust-src", "rustc-dev", "llvm-tools-preview"] diff --git a/src/base.rs b/src/base.rs index 50baa70ab0fe5..189d952a92f17 100644 --- a/src/base.rs +++ b/src/base.rs @@ -790,17 +790,30 @@ fn codegen_stmt<'tcx>( let val = CValue::const_val(fx, fx.layout_of(fx.tcx.types.usize), val.into()); lval.write_cvalue(fx, val); } - Rvalue::Aggregate(ref kind, ref operands) => match kind.as_ref() { - AggregateKind::Array(_ty) => { - for (i, operand) in operands.iter().enumerate() { - let operand = codegen_operand(fx, operand); - let index = fx.bcx.ins().iconst(fx.pointer_type, i as i64); - let to = lval.place_index(fx, index); - to.write_cvalue(fx, operand); + Rvalue::Aggregate(ref kind, ref operands) => { + let (variant_index, variant_dest, active_field_index) = match **kind { + mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => { + let variant_dest = lval.downcast_variant(fx, variant_index); + (variant_index, variant_dest, active_field_index) } + _ => (VariantIdx::from_u32(0), lval, None), + }; + if active_field_index.is_some() { + assert_eq!(operands.len(), 1); } - _ => unreachable!("shouldn't exist at codegen {:?}", to_place_and_rval.1), - }, + for (i, operand) in operands.iter().enumerate() { + let operand = codegen_operand(fx, operand); + let field_index = active_field_index.unwrap_or(i); + let to = if let mir::AggregateKind::Array(_) = **kind { + let index = fx.bcx.ins().iconst(fx.pointer_type, field_index as i64); + variant_dest.place_index(fx, index) + } else { + variant_dest.place_field(fx, mir::Field::new(field_index)) + }; + to.write_cvalue(fx, operand); + } + crate::discriminant::codegen_set_discriminant(fx, lval, variant_index); + } } } StatementKind::StorageLive(_) From 40cf8b45ac1d51c4d0d0d984ce1a0424e05719a8 Mon Sep 17 00:00:00 2001 From: hkalbasi Date: Mon, 6 Feb 2023 23:48:20 +0330 Subject: [PATCH 288/501] reuse fetching target data layout from rustc function --- Cargo.lock | 1 + crates/hir-ty/Cargo.toml | 1 + crates/hir-ty/src/layout/tests.rs | 26 ++++++------------- crates/project-model/src/lib.rs | 2 +- .../project-model/src/target_data_layout.rs | 2 +- 5 files changed, 12 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c1f146411b232..9cc34a876dc70 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -583,6 +583,7 @@ dependencies = [ "limit", "once_cell", "profile", + "project-model", "rustc-hash", "scoped-tls", "smallvec", diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 8b762bf829bcf..490bbe1e7240d 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -48,6 +48,7 @@ tracing-subscriber = { version = "0.3.16", default-features = false, features = "registry", ] } tracing-tree = "0.2.1" +project-model = { path = "../project-model" } # local deps test-utils.workspace = true diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index ba7db9b0e2007..067bdc960dadf 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use base_db::fixture::WithFixture; use chalk_ir::{AdtId, TyKind}; use hir_def::{ @@ -9,16 +11,12 @@ use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution}; use super::layout_of_ty; -fn eval_goal(ra_fixture: &str, minicore: &str) -> Result { - // using unstable cargo features failed, fall back to using plain rustc - let mut cmd = std::process::Command::new("rustc"); - cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1"); - let output = cmd.output().unwrap(); - assert!(output.status.success(), "{}", output.status); - let stdout = String::from_utf8(output.stdout).unwrap(); - let target_data_layout = - stdout.split_once(r#""data-layout": ""#).unwrap().1.split_once('"').unwrap().0.to_owned(); +fn current_machine_data_layout() -> String { + project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap() +} +fn eval_goal(ra_fixture: &str, minicore: &str) -> Result { + let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", ); @@ -47,15 +45,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result { /// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait` fn eval_expr(ra_fixture: &str, minicore: &str) -> Result { - // using unstable cargo features failed, fall back to using plain rustc - let mut cmd = std::process::Command::new("rustc"); - cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1"); - let output = cmd.output().unwrap(); - assert!(output.status.success(), "{}", output.status); - let stdout = String::from_utf8(output.stdout).unwrap(); - let target_data_layout = - stdout.split_once(r#""data-layout": ""#).unwrap().1.split_once('"').unwrap().0.to_owned(); - + let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", ); diff --git a/crates/project-model/src/lib.rs b/crates/project-model/src/lib.rs index e2f09bad2ded5..9b6a71db81145 100644 --- a/crates/project-model/src/lib.rs +++ b/crates/project-model/src/lib.rs @@ -25,7 +25,7 @@ mod sysroot; mod workspace; mod rustc_cfg; mod build_scripts; -mod target_data_layout; +pub mod target_data_layout; #[cfg(test)] mod tests; diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index 267a73ac5bd88..42c06ad0ed371 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -6,7 +6,7 @@ use rustc_hash::FxHashMap; use crate::{utf8_stdout, ManifestPath}; -pub(super) fn get( +pub fn get( cargo_toml: Option<&ManifestPath>, target: Option<&str>, extra_env: &FxHashMap, From 0d59b8c997e31095732c9f9864e10d76daaeb42e Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 21 Jan 2023 21:52:40 +0000 Subject: [PATCH 289/501] Remove redundant test. --- compiler/rustc_mir_dataflow/src/value_analysis.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 8bf6493be4b01..45afd16c313e4 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -622,8 +622,7 @@ impl Map { ty: Ty<'tcx>, filter: &mut impl FnMut(Ty<'tcx>) -> bool, ) { - // Note: The framework supports only scalars for now. - if filter(ty) && ty.is_scalar() { + if filter(ty) { // We know that the projection only contains trackable elements. let place = self.make_place(local, projection).unwrap(); From cd3649b2a595da17dcff983b6d5f74a28a98dd00 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 21 Jan 2023 21:53:26 +0000 Subject: [PATCH 290/501] Only exclude locals if the place is not indirect. --- .../src/impls/borrowed_locals.rs | 4 +++- .../rustc_mir_dataflow/src/value_analysis.rs | 20 ++++++++++--------- compiler/rustc_mir_transform/src/sroa.rs | 18 +++++++++-------- 3 files changed, 24 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs index 0f8e86d1d6679..6f4e7fd4682c1 100644 --- a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs +++ b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs @@ -121,7 +121,9 @@ where // for now. See discussion on [#61069]. // // [#61069]: https://github.com/rust-lang/rust/pull/61069 - self.trans.gen(dropped_place.local); + if !dropped_place.is_indirect() { + self.trans.gen(dropped_place.local); + } } TerminatorKind::Abort diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 45afd16c313e4..8003da6bbd269 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -35,6 +35,7 @@ use std::fmt::{Debug, Formatter}; use rustc_data_structures::fx::FxHashMap; +use rustc_index::bit_set::BitSet; use rustc_index::vec::IndexVec; use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; @@ -589,7 +590,7 @@ impl Map { ) -> Self { let mut map = Self::new(); let exclude = excluded_locals(body); - map.register_with_filter(tcx, body, filter, &exclude); + map.register_with_filter(tcx, body, filter, exclude); debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len()); map } @@ -600,12 +601,12 @@ impl Map { tcx: TyCtxt<'tcx>, body: &Body<'tcx>, mut filter: impl FnMut(Ty<'tcx>) -> bool, - exclude: &IndexVec, + exclude: BitSet, ) { // We use this vector as stack, pushing and popping projections. let mut projection = Vec::new(); for (local, decl) in body.local_decls.iter_enumerated() { - if !exclude[local] { + if !exclude.contains(local) { self.register_with_filter_rec(tcx, local, &mut projection, decl.ty, &mut filter); } } @@ -823,26 +824,27 @@ pub fn iter_fields<'tcx>( } /// Returns all locals with projections that have their reference or address taken. -pub fn excluded_locals(body: &Body<'_>) -> IndexVec { +pub fn excluded_locals(body: &Body<'_>) -> BitSet { struct Collector { - result: IndexVec, + result: BitSet, } impl<'tcx> Visitor<'tcx> for Collector { fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) { - if context.is_borrow() + if (context.is_borrow() || context.is_address_of() || context.is_drop() - || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput) + || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput)) + && !place.is_indirect() { // A pointer to a place could be used to access other places with the same local, // hence we have to exclude the local completely. - self.result[place.local] = true; + self.result.insert(place.local); } } } - let mut collector = Collector { result: IndexVec::from_elem(false, &body.local_decls) }; + let mut collector = Collector { result: BitSet::new_empty(body.local_decls.len()) }; collector.visit_body(body); collector.result } diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs index 26acd406ed8a9..3fb7836ed6867 100644 --- a/compiler/rustc_mir_transform/src/sroa.rs +++ b/compiler/rustc_mir_transform/src/sroa.rs @@ -1,5 +1,5 @@ use crate::MirPass; -use rustc_index::bit_set::BitSet; +use rustc_index::bit_set::{BitSet, GrowableBitSet}; use rustc_index::vec::IndexVec; use rustc_middle::mir::patch::MirPatch; use rustc_middle::mir::visit::*; @@ -26,10 +26,12 @@ impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates { debug!(?replacements); let all_dead_locals = replace_flattened_locals(tcx, body, replacements); if !all_dead_locals.is_empty() { - for local in excluded.indices() { - excluded[local] |= all_dead_locals.contains(local); - } - excluded.raw.resize(body.local_decls.len(), false); + excluded.union(&all_dead_locals); + excluded = { + let mut growable = GrowableBitSet::from(excluded); + growable.ensure(body.local_decls.len()); + growable.into() + }; } else { break; } @@ -44,11 +46,11 @@ impl<'tcx> MirPass<'tcx> for ScalarReplacementOfAggregates { /// - the locals is a union or an enum; /// - the local's address is taken, and thus the relative addresses of the fields are observable to /// client code. -fn escaping_locals(excluded: &IndexVec, body: &Body<'_>) -> BitSet { +fn escaping_locals(excluded: &BitSet, body: &Body<'_>) -> BitSet { let mut set = BitSet::new_empty(body.local_decls.len()); set.insert_range(RETURN_PLACE..=Local::from_usize(body.arg_count)); for (local, decl) in body.local_decls().iter_enumerated() { - if decl.ty.is_union() || decl.ty.is_enum() || excluded[local] { + if decl.ty.is_union() || decl.ty.is_enum() || excluded.contains(local) { set.insert(local); } } @@ -172,7 +174,7 @@ fn replace_flattened_locals<'tcx>( body: &mut Body<'tcx>, replacements: ReplacementMap<'tcx>, ) -> BitSet { - let mut all_dead_locals = BitSet::new_empty(body.local_decls.len()); + let mut all_dead_locals = BitSet::new_empty(replacements.fragments.len()); for (local, replacements) in replacements.fragments.iter_enumerated() { if replacements.is_some() { all_dead_locals.insert(local); From 9a6c04f5d0e6ec47bf150187cffcb7f737799db4 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 21 Jan 2023 22:28:54 +0000 Subject: [PATCH 291/501] Handle discriminants in dataflow-const-prop. --- compiler/rustc_mir_dataflow/src/lib.rs | 1 + .../rustc_mir_dataflow/src/value_analysis.rs | 173 ++++++++++++++---- .../src/dataflow_const_prop.rs | 103 +++++++++-- ...mutate_discriminant.DataflowConstProp.diff | 26 +++ tests/mir-opt/dataflow-const-prop/enum.rs | 45 ++++- ...iff => enum.simple.DataflowConstProp.diff} | 16 +- 6 files changed, 305 insertions(+), 59 deletions(-) create mode 100644 tests/mir-opt/dataflow-const-prop/enum.mutate_discriminant.DataflowConstProp.diff rename tests/mir-opt/dataflow-const-prop/{enum.main.DataflowConstProp.diff => enum.simple.DataflowConstProp.diff} (84%) diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs index 7f40cfca32fff..3e382f500afbe 100644 --- a/compiler/rustc_mir_dataflow/src/lib.rs +++ b/compiler/rustc_mir_dataflow/src/lib.rs @@ -1,6 +1,7 @@ #![feature(associated_type_defaults)] #![feature(box_patterns)] #![feature(exact_size_is_empty)] +#![feature(let_chains)] #![feature(min_specialization)] #![feature(once_cell)] #![feature(stmt_expr_attributes)] diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 8003da6bbd269..03b6c182062db 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -65,10 +65,8 @@ pub trait ValueAnalysis<'tcx> { StatementKind::Assign(box (place, rvalue)) => { self.handle_assign(*place, rvalue, state); } - StatementKind::SetDiscriminant { .. } => { - // Could treat this as writing a constant to a pseudo-place. - // But discriminants are currently not tracked, so we do nothing. - // Related: https://github.com/rust-lang/unsafe-code-guidelines/issues/84 + StatementKind::SetDiscriminant { box ref place, .. } => { + state.flood_discr(place.as_ref(), self.map()); } StatementKind::Intrinsic(box intrinsic) => { self.handle_intrinsic(intrinsic, state); @@ -447,26 +445,29 @@ impl State { } pub fn flood_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) { - if let Some(root) = map.find(place) { - self.flood_idx_with(root, map, value); - } + let StateData::Reachable(values) = &mut self.0 else { return }; + map.for_each_aliasing_place(place, None, &mut |place| { + if let Some(vi) = map.places[place].value_index { + values[vi] = value.clone(); + } + }); } pub fn flood(&mut self, place: PlaceRef<'_>, map: &Map) { self.flood_with(place, map, V::top()) } - pub fn flood_idx_with(&mut self, place: PlaceIndex, map: &Map, value: V) { + pub fn flood_discr_with(&mut self, place: PlaceRef<'_>, map: &Map, value: V) { let StateData::Reachable(values) = &mut self.0 else { return }; - map.preorder_invoke(place, &mut |place| { + map.for_each_aliasing_place(place, Some(TrackElem::Discriminant), &mut |place| { if let Some(vi) = map.places[place].value_index { values[vi] = value.clone(); } }); } - pub fn flood_idx(&mut self, place: PlaceIndex, map: &Map) { - self.flood_idx_with(place, map, V::top()) + pub fn flood_discr(&mut self, place: PlaceRef<'_>, map: &Map) { + self.flood_discr_with(place, map, V::top()) } /// Copies `source` to `target`, including all tracked places beneath. @@ -474,7 +475,9 @@ impl State { /// If `target` contains a place that is not contained in `source`, it will be overwritten with /// Top. Also, because this will copy all entries one after another, it may only be used for /// places that are non-overlapping or identical. - pub fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) { + /// + /// The target place must have been flooded before calling this method. + fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) { let StateData::Reachable(values) = &mut self.0 else { return }; // If both places are tracked, we copy the value to the target. If the target is tracked, @@ -492,26 +495,28 @@ impl State { let projection = map.places[target_child].proj_elem.unwrap(); if let Some(source_child) = map.projections.get(&(source, projection)) { self.assign_place_idx(target_child, *source_child, map); - } else { - self.flood_idx(target_child, map); } } } pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) { + self.flood(target, map); if let Some(target) = map.find(target) { self.assign_idx(target, result, map); - } else { - // We don't track this place nor any projections, assignment can be ignored. } } + pub fn assign_discr(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) { + self.flood_discr(target, map); + if let Some(target) = map.find_discr(target) { + self.assign_idx(target, result, map); + } + } + + /// The target place must have been flooded before calling this method. pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlace, map: &Map) { match result { ValueOrPlace::Value(value) => { - // First flood the target place in case we also track any projections (although - // this scenario is currently not well-supported by the API). - self.flood_idx(target, map); let StateData::Reachable(values) = &mut self.0 else { return }; if let Some(value_index) = map.places[target].value_index { values[value_index] = value; @@ -526,6 +531,14 @@ impl State { map.find(place).map(|place| self.get_idx(place, map)).unwrap_or(V::top()) } + /// Retrieve the value stored for a place, or ⊤ if it is not tracked. + pub fn get_discr(&self, place: PlaceRef<'_>, map: &Map) -> V { + match map.find_discr(place) { + Some(place) => self.get_idx(place, map), + None => V::top(), + } + } + /// Retrieve the value stored for a place index, or ⊤ if it is not tracked. pub fn get_idx(&self, place: PlaceIndex, map: &Map) -> V { match &self.0 { @@ -582,7 +595,6 @@ impl Map { /// This is currently the only way to create a [`Map`]. The way in which the tracked places are /// chosen is an implementation detail and may not be relied upon (other than that their type /// passes the filter). - #[instrument(skip_all, level = "debug")] pub fn from_filter<'tcx>( tcx: TyCtxt<'tcx>, body: &Body<'tcx>, @@ -614,7 +626,7 @@ impl Map { /// Potentially register the (local, projection) place and its fields, recursively. /// - /// Invariant: The projection must only contain fields. + /// Invariant: The projection must only contain trackable elements. fn register_with_filter_rec<'tcx>( &mut self, tcx: TyCtxt<'tcx>, @@ -623,21 +635,46 @@ impl Map { ty: Ty<'tcx>, filter: &mut impl FnMut(Ty<'tcx>) -> bool, ) { - if filter(ty) { - // We know that the projection only contains trackable elements. - let place = self.make_place(local, projection).unwrap(); + // We know that the projection only contains trackable elements. + let place = self.make_place(local, projection).unwrap(); - // Allocate a value slot if it doesn't have one. - if self.places[place].value_index.is_none() { - self.places[place].value_index = Some(self.value_count.into()); - self.value_count += 1; + // Allocate a value slot if it doesn't have one, and the user requested one. + if self.places[place].value_index.is_none() && filter(ty) { + self.places[place].value_index = Some(self.value_count.into()); + self.value_count += 1; + } + + if ty.is_enum() { + let discr_ty = ty.discriminant_ty(tcx); + if filter(discr_ty) { + let discr = *self + .projections + .entry((place, TrackElem::Discriminant)) + .or_insert_with(|| { + // Prepend new child to the linked list. + let next = self.places.push(PlaceInfo::new(Some(TrackElem::Discriminant))); + self.places[next].next_sibling = self.places[place].first_child; + self.places[place].first_child = Some(next); + next + }); + + // Allocate a value slot if it doesn't have one. + if self.places[discr].value_index.is_none() { + self.places[discr].value_index = Some(self.value_count.into()); + self.value_count += 1; + } } } // Recurse with all fields of this place. iter_fields(ty, tcx, |variant, field, ty| { - if variant.is_some() { - // Downcasts are currently not supported. + if let Some(variant) = variant { + projection.push(PlaceElem::Downcast(None, variant)); + let _ = self.make_place(local, projection); + projection.push(PlaceElem::Field(field, ty)); + self.register_with_filter_rec(tcx, local, projection, ty, filter); + projection.pop(); + projection.pop(); return; } projection.push(PlaceElem::Field(field, ty)); @@ -694,13 +731,77 @@ impl Map { Some(index) } + /// Locates the given place, if it exists in the tree. + pub fn find_discr(&self, place: PlaceRef<'_>) -> Option { + let index = self.find(place)?; + self.apply(index, TrackElem::Discriminant) + } + /// Iterate over all direct children. pub fn children(&self, parent: PlaceIndex) -> impl Iterator + '_ { Children::new(self, parent) } + /// Invoke a function on the given place and all places that may alias it. + /// + /// In particular, when the given place has a variant downcast, we invoke the function on all + /// the other variants. + /// + /// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track + /// as such. + fn for_each_aliasing_place( + &self, + place: PlaceRef<'_>, + tail_elem: Option, + f: &mut impl FnMut(PlaceIndex), + ) { + let Some(&Some(mut index)) = self.locals.get(place.local) else { + // The local is not tracked at all, nothing to invalidate. + return; + }; + let elems = place + .projection + .iter() + .map(|&elem| elem.try_into()) + .chain(tail_elem.map(Ok).into_iter()); + for elem in elems { + let Ok(elem) = elem else { return }; + let sub = self.apply(index, elem); + if let TrackElem::Variant(..) | TrackElem::Discriminant = elem { + // Writing to an enum variant field invalidates the other variants and the discriminant. + self.for_each_variant_sibling(index, sub, f); + } + if let Some(sub) = sub { + index = sub + } else { + return; + } + } + self.preorder_invoke(index, f); + } + + /// Invoke the given function on all the descendants of the given place, except one branch. + pub fn for_each_variant_sibling( + &self, + parent: PlaceIndex, + preserved_child: Option, + f: &mut impl FnMut(PlaceIndex), + ) { + for sibling in self.children(parent) { + let elem = self.places[sibling].proj_elem; + // Only invalidate variants and discriminant. Fields (for generators) are not + // invalidated by assignment to a variant. + if let Some(TrackElem::Variant(..) | TrackElem::Discriminant) = elem + // Only invalidate the other variants, the current one is fine. + && Some(sibling) != preserved_child + { + self.preorder_invoke(sibling, f); + } + } + } + /// Invoke a function on the given place and all descendants. - pub fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) { + fn preorder_invoke(&self, root: PlaceIndex, f: &mut impl FnMut(PlaceIndex)) { f(root); for child in self.children(root) { self.preorder_invoke(child, f); @@ -759,6 +860,7 @@ impl<'a> Iterator for Children<'a> { } /// Used as the result of an operand or r-value. +#[derive(Debug)] pub enum ValueOrPlace { Value(V), Place(PlaceIndex), @@ -776,6 +878,8 @@ impl ValueOrPlace { #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum TrackElem { Field(Field), + Variant(VariantIdx), + Discriminant, } impl TryFrom> for TrackElem { @@ -784,6 +888,7 @@ impl TryFrom> for TrackElem { fn try_from(value: ProjectionElem) -> Result { match value { ProjectionElem::Field(field, _) => Ok(TrackElem::Field(field)), + ProjectionElem::Downcast(_, idx) => Ok(TrackElem::Variant(idx)), _ => Err(()), } } @@ -900,6 +1005,12 @@ fn debug_with_context_rec( for child in map.children(place) { let info_elem = map.places[child].proj_elem.unwrap(); let child_place_str = match info_elem { + TrackElem::Discriminant => { + format!("discriminant({})", place_str) + } + TrackElem::Variant(idx) => { + format!("({} as {:?})", place_str, idx) + } TrackElem::Field(field) => { if place_str.starts_with('*') { format!("({}).{}", place_str, field.index()) diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index 949a59a97bfb6..f10f208f5de3c 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -13,6 +13,7 @@ use rustc_mir_dataflow::value_analysis::{Map, State, TrackElem, ValueAnalysis, V use rustc_mir_dataflow::{lattice::FlatSet, Analysis, ResultsVisitor, SwitchIntEdgeEffects}; use rustc_span::DUMMY_SP; use rustc_target::abi::Align; +use rustc_target::abi::VariantIdx; use crate::MirPass; @@ -30,6 +31,7 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp { #[instrument(skip_all level = "debug")] fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + debug!(def_id = ?body.source.def_id()); if tcx.sess.mir_opt_level() < 4 && body.basic_blocks.len() > BLOCK_LIMIT { debug!("aborted dataflow const prop due too many basic blocks"); return; @@ -63,14 +65,31 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp { } } -struct ConstAnalysis<'tcx> { +struct ConstAnalysis<'a, 'tcx> { map: Map, tcx: TyCtxt<'tcx>, + local_decls: &'a LocalDecls<'tcx>, ecx: InterpCx<'tcx, 'tcx, DummyMachine>, param_env: ty::ParamEnv<'tcx>, } -impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> { +impl<'tcx> ConstAnalysis<'_, 'tcx> { + fn eval_disciminant( + &self, + enum_ty: Ty<'tcx>, + variant_index: VariantIdx, + ) -> Option> { + if !enum_ty.is_enum() { + return None; + } + let discr = enum_ty.discriminant_for_variant(self.tcx, variant_index)?; + let discr_layout = self.tcx.layout_of(self.param_env.and(discr.ty)).ok()?; + let discr_value = Scalar::try_from_uint(discr.val, discr_layout.size)?; + Some(ScalarTy(discr_value, discr.ty)) + } +} + +impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { type Value = FlatSet>; const NAME: &'static str = "ConstAnalysis"; @@ -79,6 +98,25 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> { &self.map } + fn handle_statement(&self, statement: &Statement<'tcx>, state: &mut State) { + match statement.kind { + StatementKind::SetDiscriminant { box ref place, variant_index } => { + state.flood_discr(place.as_ref(), &self.map); + if self.map.find_discr(place.as_ref()).is_some() { + let enum_ty = place.ty(self.local_decls, self.tcx).ty; + if let Some(discr) = self.eval_disciminant(enum_ty, variant_index) { + state.assign_discr( + place.as_ref(), + ValueOrPlace::Value(FlatSet::Elem(discr)), + &self.map, + ); + } + } + } + _ => self.super_statement(statement, state), + } + } + fn handle_assign( &self, target: Place<'tcx>, @@ -87,17 +125,22 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> { ) { match rvalue { Rvalue::Aggregate(kind, operands) => { - let target = self.map().find(target.as_ref()); - if let Some(target) = target { - state.flood_idx_with(target, self.map(), FlatSet::Bottom); - let field_based = match **kind { - AggregateKind::Tuple | AggregateKind::Closure(..) => true, - AggregateKind::Adt(def_id, ..) => { - matches!(self.tcx.def_kind(def_id), DefKind::Struct) + state.flood_with(target.as_ref(), self.map(), FlatSet::Bottom); + if let Some(target_idx) = self.map().find(target.as_ref()) { + let (variant_target, variant_index) = match **kind { + AggregateKind::Tuple | AggregateKind::Closure(..) => { + (Some(target_idx), None) + } + AggregateKind::Adt(def_id, variant_index, ..) => { + match self.tcx.def_kind(def_id) { + DefKind::Struct => (Some(target_idx), None), + DefKind::Enum => (Some(target_idx), Some(variant_index)), + _ => (None, None), + } } - _ => false, + _ => (None, None), }; - if field_based { + if let Some(target) = variant_target { for (field_index, operand) in operands.iter().enumerate() { if let Some(field) = self .map() @@ -108,15 +151,20 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> { } } } + if let Some(variant_index) = variant_index + && let Some(discr_idx) = self.map().apply(target_idx, TrackElem::Discriminant) + { + let enum_ty = target.ty(self.local_decls, self.tcx).ty; + if let Some(discr_val) = self.eval_disciminant(enum_ty, variant_index) { + state.assign_idx(discr_idx, ValueOrPlace::Value(FlatSet::Elem(discr_val)), &self.map); + } + } } } Rvalue::CheckedBinaryOp(op, box (left, right)) => { + state.flood(target.as_ref(), self.map()); + let target = self.map().find(target.as_ref()); - if let Some(target) = target { - // We should not track any projections other than - // what is overwritten below, but just in case... - state.flood_idx(target, self.map()); - } let value_target = target .and_then(|target| self.map().apply(target, TrackElem::Field(0_u32.into()))); @@ -195,6 +243,9 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'tcx> { FlatSet::Bottom => ValueOrPlace::Value(FlatSet::Bottom), FlatSet::Top => ValueOrPlace::Value(FlatSet::Top), }, + Rvalue::Discriminant(place) => { + ValueOrPlace::Value(state.get_discr(place.as_ref(), self.map())) + } _ => self.super_rvalue(rvalue, state), } } @@ -268,12 +319,13 @@ impl<'tcx> std::fmt::Debug for ScalarTy<'tcx> { } } -impl<'tcx> ConstAnalysis<'tcx> { - pub fn new(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, map: Map) -> Self { +impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, map: Map) -> Self { let param_env = tcx.param_env(body.source.def_id()); Self { map, tcx, + local_decls: &body.local_decls, ecx: InterpCx::new(tcx, DUMMY_SP, param_env, DummyMachine), param_env: param_env, } @@ -466,6 +518,21 @@ impl<'tcx, 'map, 'a> Visitor<'tcx> for OperandCollector<'tcx, 'map, 'a> { _ => (), } } + + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { + match rvalue { + Rvalue::Discriminant(place) => { + match self.state.get_discr(place.as_ref(), self.visitor.map) { + FlatSet::Top => (), + FlatSet::Elem(value) => { + self.visitor.before_effect.insert((location, *place), value); + } + FlatSet::Bottom => (), + } + } + _ => self.super_rvalue(rvalue, location), + } + } } struct DummyMachine; diff --git a/tests/mir-opt/dataflow-const-prop/enum.mutate_discriminant.DataflowConstProp.diff b/tests/mir-opt/dataflow-const-prop/enum.mutate_discriminant.DataflowConstProp.diff new file mode 100644 index 0000000000000..038e6c6bd9005 --- /dev/null +++ b/tests/mir-opt/dataflow-const-prop/enum.mutate_discriminant.DataflowConstProp.diff @@ -0,0 +1,26 @@ +- // MIR for `mutate_discriminant` before DataflowConstProp ++ // MIR for `mutate_discriminant` after DataflowConstProp + + fn mutate_discriminant() -> u8 { + let mut _0: u8; // return place in scope 0 at $DIR/enum.rs:+0:29: +0:31 + let mut _1: std::option::Option; // in scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL + let mut _2: isize; // in scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL + + bb0: { + discriminant(_1) = 1; // scope 0 at $DIR/enum.rs:+4:13: +4:34 + (((_1 as variant#1).0: NonZeroUsize).0: usize) = const 0_usize; // scope 0 at $DIR/enum.rs:+6:13: +6:64 + _2 = discriminant(_1); // scope 0 at $SRC_DIR/core/src/intrinsics/mir.rs:LL:COL + switchInt(_2) -> [0: bb1, otherwise: bb2]; // scope 0 at $DIR/enum.rs:+9:13: +12:14 + } + + bb1: { + _0 = const 1_u8; // scope 0 at $DIR/enum.rs:+15:13: +15:20 + return; // scope 0 at $DIR/enum.rs:+16:13: +16:21 + } + + bb2: { + _0 = const 2_u8; // scope 0 at $DIR/enum.rs:+19:13: +19:20 + unreachable; // scope 0 at $DIR/enum.rs:+20:13: +20:26 + } + } + diff --git a/tests/mir-opt/dataflow-const-prop/enum.rs b/tests/mir-opt/dataflow-const-prop/enum.rs index 13288577dea3f..7ea405bd9c408 100644 --- a/tests/mir-opt/dataflow-const-prop/enum.rs +++ b/tests/mir-opt/dataflow-const-prop/enum.rs @@ -1,13 +1,52 @@ // unit-test: DataflowConstProp -// Not trackable, because variants could be aliased. +#![feature(custom_mir, core_intrinsics, rustc_attrs)] + +use std::intrinsics::mir::*; + enum E { V1(i32), V2(i32) } -// EMIT_MIR enum.main.DataflowConstProp.diff -fn main() { +// EMIT_MIR enum.simple.DataflowConstProp.diff +fn simple() { let e = E::V1(0); let x = match e { E::V1(x) => x, E::V2(x) => x }; } + +#[rustc_layout_scalar_valid_range_start(1)] +#[rustc_nonnull_optimization_guaranteed] +struct NonZeroUsize(usize); + +// EMIT_MIR enum.mutate_discriminant.DataflowConstProp.diff +#[custom_mir(dialect = "runtime", phase = "post-cleanup")] +fn mutate_discriminant() -> u8 { + mir!( + let x: Option; + { + SetDiscriminant(x, 1); + // This assignment overwrites the niche in which the discriminant is stored. + place!(Field(Field(Variant(x, 1), 0), 0)) = 0_usize; + // So we cannot know the value of this discriminant. + let a = Discriminant(x); + match a { + 0 => bb1, + _ => bad, + } + } + bb1 = { + RET = 1; + Return() + } + bad = { + RET = 2; + Unreachable() + } + ) +} + +fn main() { + simple(); + mutate_discriminant(); +} diff --git a/tests/mir-opt/dataflow-const-prop/enum.main.DataflowConstProp.diff b/tests/mir-opt/dataflow-const-prop/enum.simple.DataflowConstProp.diff similarity index 84% rename from tests/mir-opt/dataflow-const-prop/enum.main.DataflowConstProp.diff rename to tests/mir-opt/dataflow-const-prop/enum.simple.DataflowConstProp.diff index d049c79d78def..1fb65e6584525 100644 --- a/tests/mir-opt/dataflow-const-prop/enum.main.DataflowConstProp.diff +++ b/tests/mir-opt/dataflow-const-prop/enum.simple.DataflowConstProp.diff @@ -1,8 +1,8 @@ -- // MIR for `main` before DataflowConstProp -+ // MIR for `main` after DataflowConstProp +- // MIR for `simple` before DataflowConstProp ++ // MIR for `simple` after DataflowConstProp - fn main() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum.rs:+0:11: +0:11 + fn simple() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum.rs:+0:13: +0:13 let _1: E; // in scope 0 at $DIR/enum.rs:+1:9: +1:10 let mut _3: isize; // in scope 0 at $DIR/enum.rs:+2:23: +2:31 scope 1 { @@ -25,8 +25,10 @@ StorageLive(_1); // scope 0 at $DIR/enum.rs:+1:9: +1:10 _1 = E::V1(const 0_i32); // scope 0 at $DIR/enum.rs:+1:13: +1:21 StorageLive(_2); // scope 1 at $DIR/enum.rs:+2:9: +2:10 - _3 = discriminant(_1); // scope 1 at $DIR/enum.rs:+2:19: +2:20 - switchInt(move _3) -> [0: bb3, 1: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20 +- _3 = discriminant(_1); // scope 1 at $DIR/enum.rs:+2:19: +2:20 +- switchInt(move _3) -> [0: bb3, 1: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20 ++ _3 = const 0_isize; // scope 1 at $DIR/enum.rs:+2:19: +2:20 ++ switchInt(const 0_isize) -> [0: bb3, 1: bb1, otherwise: bb2]; // scope 1 at $DIR/enum.rs:+2:13: +2:20 } bb1: { @@ -50,7 +52,7 @@ } bb4: { - _0 = const (); // scope 0 at $DIR/enum.rs:+0:11: +3:2 + _0 = const (); // scope 0 at $DIR/enum.rs:+0:13: +3:2 StorageDead(_2); // scope 1 at $DIR/enum.rs:+3:1: +3:2 StorageDead(_1); // scope 0 at $DIR/enum.rs:+3:1: +3:2 return; // scope 0 at $DIR/enum.rs:+3:2: +3:2 From c48756cdbfb1725251cbfa6fe760b2cb4e47b2d9 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 28 Jan 2023 11:23:18 +0000 Subject: [PATCH 292/501] Limit creation of tracked place directly. --- .../rustc_mir_dataflow/src/value_analysis.rs | 22 +++++++++++++++---- .../src/dataflow_const_prop.rs | 11 ++++------ 2 files changed, 22 insertions(+), 11 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 03b6c182062db..f587f17f12a1b 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -599,10 +599,11 @@ impl Map { tcx: TyCtxt<'tcx>, body: &Body<'tcx>, filter: impl FnMut(Ty<'tcx>) -> bool, + place_limit: Option, ) -> Self { let mut map = Self::new(); let exclude = excluded_locals(body); - map.register_with_filter(tcx, body, filter, exclude); + map.register_with_filter(tcx, body, filter, exclude, place_limit); debug!("registered {} places ({} nodes in total)", map.value_count, map.places.len()); map } @@ -614,12 +615,20 @@ impl Map { body: &Body<'tcx>, mut filter: impl FnMut(Ty<'tcx>) -> bool, exclude: BitSet, + place_limit: Option, ) { // We use this vector as stack, pushing and popping projections. let mut projection = Vec::new(); for (local, decl) in body.local_decls.iter_enumerated() { if !exclude.contains(local) { - self.register_with_filter_rec(tcx, local, &mut projection, decl.ty, &mut filter); + self.register_with_filter_rec( + tcx, + local, + &mut projection, + decl.ty, + &mut filter, + place_limit, + ); } } } @@ -634,7 +643,12 @@ impl Map { projection: &mut Vec>, ty: Ty<'tcx>, filter: &mut impl FnMut(Ty<'tcx>) -> bool, + place_limit: Option, ) { + if let Some(place_limit) = place_limit && self.value_count >= place_limit { + return + } + // We know that the projection only contains trackable elements. let place = self.make_place(local, projection).unwrap(); @@ -672,13 +686,13 @@ impl Map { projection.push(PlaceElem::Downcast(None, variant)); let _ = self.make_place(local, projection); projection.push(PlaceElem::Field(field, ty)); - self.register_with_filter_rec(tcx, local, projection, ty, filter); + self.register_with_filter_rec(tcx, local, projection, ty, filter, place_limit); projection.pop(); projection.pop(); return; } projection.push(PlaceElem::Field(field, ty)); - self.register_with_filter_rec(tcx, local, projection, ty, filter); + self.register_with_filter_rec(tcx, local, projection, ty, filter, place_limit); projection.pop(); }); } diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index f10f208f5de3c..bfb1eb8b5fb79 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -37,9 +37,6 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp { return; } - // Decide which places to track during the analysis. - let map = Map::from_filter(tcx, body, Ty::is_scalar); - // We want to have a somewhat linear runtime w.r.t. the number of statements/terminators. // Let's call this number `n`. Dataflow analysis has `O(h*n)` transfer function // applications, where `h` is the height of the lattice. Because the height of our lattice @@ -48,10 +45,10 @@ impl<'tcx> MirPass<'tcx> for DataflowConstProp { // `O(num_nodes * tracked_places * n)` in terms of time complexity. Since the number of // map nodes is strongly correlated to the number of tracked places, this becomes more or // less `O(n)` if we place a constant limit on the number of tracked places. - if tcx.sess.mir_opt_level() < 4 && map.tracked_places() > PLACE_LIMIT { - debug!("aborted dataflow const prop due to too many tracked places"); - return; - } + let place_limit = if tcx.sess.mir_opt_level() < 4 { Some(PLACE_LIMIT) } else { None }; + + // Decide which places to track during the analysis. + let map = Map::from_filter(tcx, body, Ty::is_scalar, place_limit); // Perform the actual dataflow analysis. let analysis = ConstAnalysis::new(tcx, body, map); From 9af191f86f2c81ec5613ae35ab1a3b2ac3edbdee Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sun, 29 Jan 2023 14:20:45 +0000 Subject: [PATCH 293/501] Improve value_analysis API. --- .../rustc_mir_dataflow/src/value_analysis.rs | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index f587f17f12a1b..353b8d801d57b 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -735,20 +735,31 @@ impl Map { } /// Locates the given place, if it exists in the tree. - pub fn find(&self, place: PlaceRef<'_>) -> Option { + pub fn find_extra( + &self, + place: PlaceRef<'_>, + extra: impl IntoIterator, + ) -> Option { let mut index = *self.locals.get(place.local)?.as_ref()?; for &elem in place.projection { index = self.apply(index, elem.try_into().ok()?)?; } + for elem in extra { + index = self.apply(index, elem)?; + } Some(index) } /// Locates the given place, if it exists in the tree. + pub fn find(&self, place: PlaceRef<'_>) -> Option { + self.find_extra(place, []) + } + + /// Locates the given place and applies `Discriminant`, if it exists in the tree. pub fn find_discr(&self, place: PlaceRef<'_>) -> Option { - let index = self.find(place)?; - self.apply(index, TrackElem::Discriminant) + self.find_extra(place, [TrackElem::Discriminant]) } /// Iterate over all direct children. @@ -763,14 +774,14 @@ impl Map { /// /// `tail_elem` allows to support discriminants that are not a place in MIR, but that we track /// as such. - fn for_each_aliasing_place( + pub fn for_each_aliasing_place( &self, place: PlaceRef<'_>, tail_elem: Option, f: &mut impl FnMut(PlaceIndex), ) { let Some(&Some(mut index)) = self.locals.get(place.local) else { - // The local is not tracked at all, nothing to invalidate. + // The local is not tracked at all, so it does not alias anything. return; }; let elems = place @@ -782,7 +793,7 @@ impl Map { let Ok(elem) = elem else { return }; let sub = self.apply(index, elem); if let TrackElem::Variant(..) | TrackElem::Discriminant = elem { - // Writing to an enum variant field invalidates the other variants and the discriminant. + // Enum variant fields and enum discriminants alias each another. self.for_each_variant_sibling(index, sub, f); } if let Some(sub) = sub { @@ -795,7 +806,7 @@ impl Map { } /// Invoke the given function on all the descendants of the given place, except one branch. - pub fn for_each_variant_sibling( + fn for_each_variant_sibling( &self, parent: PlaceIndex, preserved_child: Option, From 67a8c16fe285dc5dc3ca8a0c74fb1bcfa58ce8dc Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Mon, 30 Jan 2023 17:37:56 +0000 Subject: [PATCH 294/501] Complete for_each_aliasing_place. --- compiler/rustc_middle/src/mir/mod.rs | 8 ++++++++ compiler/rustc_mir_dataflow/src/value_analysis.rs | 7 +++++++ 2 files changed, 15 insertions(+) diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 10ac7e0d39af6..e272c90e0cdaf 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -1640,6 +1640,14 @@ impl<'tcx> PlaceRef<'tcx> { } } + /// Returns `true` if this `Place` contains a `Deref` projection. + /// + /// If `Place::is_indirect` returns false, the caller knows that the `Place` refers to the + /// same region of memory as its base. + pub fn is_indirect(&self) -> bool { + self.projection.iter().any(|elem| elem.is_indirect()) + } + /// If MirPhase >= Derefered and if projection contains Deref, /// It's guaranteed to be in the first place pub fn has_deref(&self) -> bool { diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index 353b8d801d57b..f24280e218716 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -780,6 +780,10 @@ impl Map { tail_elem: Option, f: &mut impl FnMut(PlaceIndex), ) { + if place.is_indirect() { + // We do not track indirect places. + return; + } let Some(&Some(mut index)) = self.locals.get(place.local) else { // The local is not tracked at all, so it does not alias anything. return; @@ -790,6 +794,9 @@ impl Map { .map(|&elem| elem.try_into()) .chain(tail_elem.map(Ok).into_iter()); for elem in elems { + // A field aliases the parent place. + f(index); + let Ok(elem) = elem else { return }; let sub = self.apply(index, elem); if let TrackElem::Variant(..) | TrackElem::Discriminant = elem { From df889c9821970020abb8dbb1ed9b0014e38c6137 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Fri, 3 Feb 2023 16:39:10 +0000 Subject: [PATCH 295/501] Rename assign_idx methods. --- .../rustc_mir_dataflow/src/value_analysis.rs | 55 +++++++++++-------- .../src/dataflow_const_prop.rs | 15 +++-- 2 files changed, 39 insertions(+), 31 deletions(-) diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs index f24280e218716..2da7cdd02a7f8 100644 --- a/compiler/rustc_mir_dataflow/src/value_analysis.rs +++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs @@ -24,7 +24,7 @@ //! - The bottom state denotes uninitialized memory. Because we are only doing a sound approximation //! of the actual execution, we can also use this state for places where access would be UB. //! -//! - The assignment logic in `State::assign_place_idx` assumes that the places are non-overlapping, +//! - The assignment logic in `State::insert_place_idx` assumes that the places are non-overlapping, //! or identical. Note that this refers to place expressions, not memory locations. //! //! - Currently, places that have their reference taken cannot be tracked. Although this would be @@ -470,6 +470,28 @@ impl State { self.flood_discr_with(place, map, V::top()) } + /// Low-level method that assigns to a place. + /// This does nothing if the place is not tracked. + /// + /// The target place must have been flooded before calling this method. + pub fn insert_idx(&mut self, target: PlaceIndex, result: ValueOrPlace, map: &Map) { + match result { + ValueOrPlace::Value(value) => self.insert_value_idx(target, value, map), + ValueOrPlace::Place(source) => self.insert_place_idx(target, source, map), + } + } + + /// Low-level method that assigns a value to a place. + /// This does nothing if the place is not tracked. + /// + /// The target place must have been flooded before calling this method. + pub fn insert_value_idx(&mut self, target: PlaceIndex, value: V, map: &Map) { + let StateData::Reachable(values) = &mut self.0 else { return }; + if let Some(value_index) = map.places[target].value_index { + values[value_index] = value; + } + } + /// Copies `source` to `target`, including all tracked places beneath. /// /// If `target` contains a place that is not contained in `source`, it will be overwritten with @@ -477,52 +499,39 @@ impl State { /// places that are non-overlapping or identical. /// /// The target place must have been flooded before calling this method. - fn assign_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) { + fn insert_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) { let StateData::Reachable(values) = &mut self.0 else { return }; - // If both places are tracked, we copy the value to the target. If the target is tracked, - // but the source is not, we have to invalidate the value in target. If the target is not - // tracked, then we don't have to do anything. + // If both places are tracked, we copy the value to the target. + // If the target is tracked, but the source is not, we do nothing, as invalidation has + // already been performed. if let Some(target_value) = map.places[target].value_index { if let Some(source_value) = map.places[source].value_index { values[target_value] = values[source_value].clone(); - } else { - values[target_value] = V::top(); } } for target_child in map.children(target) { // Try to find corresponding child and recurse. Reasoning is similar as above. let projection = map.places[target_child].proj_elem.unwrap(); if let Some(source_child) = map.projections.get(&(source, projection)) { - self.assign_place_idx(target_child, *source_child, map); + self.insert_place_idx(target_child, *source_child, map); } } } + /// Helper method to interpret `target = result`. pub fn assign(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) { self.flood(target, map); if let Some(target) = map.find(target) { - self.assign_idx(target, result, map); + self.insert_idx(target, result, map); } } + /// Helper method for assignments to a discriminant. pub fn assign_discr(&mut self, target: PlaceRef<'_>, result: ValueOrPlace, map: &Map) { self.flood_discr(target, map); if let Some(target) = map.find_discr(target) { - self.assign_idx(target, result, map); - } - } - - /// The target place must have been flooded before calling this method. - pub fn assign_idx(&mut self, target: PlaceIndex, result: ValueOrPlace, map: &Map) { - match result { - ValueOrPlace::Value(value) => { - let StateData::Reachable(values) = &mut self.0 else { return }; - if let Some(value_index) = map.places[target].value_index { - values[value_index] = value; - } - } - ValueOrPlace::Place(source) => self.assign_place_idx(target, source, map), + self.insert_idx(target, result, map); } } diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs index bfb1eb8b5fb79..d715e250ca4eb 100644 --- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs +++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs @@ -144,7 +144,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { .apply(target, TrackElem::Field(Field::from_usize(field_index))) { let result = self.handle_operand(operand, state); - state.assign_idx(field, result, self.map()); + state.insert_idx(field, result, self.map()); } } } @@ -153,12 +153,13 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { { let enum_ty = target.ty(self.local_decls, self.tcx).ty; if let Some(discr_val) = self.eval_disciminant(enum_ty, variant_index) { - state.assign_idx(discr_idx, ValueOrPlace::Value(FlatSet::Elem(discr_val)), &self.map); + state.insert_value_idx(discr_idx, FlatSet::Elem(discr_val), &self.map); } } } } Rvalue::CheckedBinaryOp(op, box (left, right)) => { + // Flood everything now, so we can use `insert_value_idx` directly later. state.flood(target.as_ref(), self.map()); let target = self.map().find(target.as_ref()); @@ -172,7 +173,8 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { let (val, overflow) = self.binary_op(state, *op, left, right); if let Some(value_target) = value_target { - state.assign_idx(value_target, ValueOrPlace::Value(val), self.map()); + // We have flooded `target` earlier. + state.insert_value_idx(value_target, val, self.map()); } if let Some(overflow_target) = overflow_target { let overflow = match overflow { @@ -187,11 +189,8 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> { } FlatSet::Bottom => FlatSet::Bottom, }; - state.assign_idx( - overflow_target, - ValueOrPlace::Value(overflow), - self.map(), - ); + // We have flooded `target` earlier. + state.insert_value_idx(overflow_target, overflow, self.map()); } } } From 8d14502574bfc978f0f16fd2e407fa6eceb466c4 Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Tue, 7 Feb 2023 04:25:05 +0200 Subject: [PATCH 296/501] refer to new home The module has since been made its own crate... see 2d75a339ca9e7cd11338b165311927e6eb73cca4. --- compiler/rustc_session/src/session.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 8a0176f639174..8abd6f1e009fd 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -156,7 +156,7 @@ pub struct Session { /// `-C metadata` arguments passed to the compiler. Its value forms a unique /// global identifier for the crate. It is used to allow multiple crates /// with the same name to coexist. See the - /// `rustc_codegen_llvm::back::symbol_names` module for more information. + /// `rustc_symbol_mangling` crate for more information. pub stable_crate_id: OnceCell, features: OnceCell, From c825e08571136deb5643ccd42bddf76b799ea93b Mon Sep 17 00:00:00 2001 From: Rafael Rivera Date: Mon, 6 Feb 2023 21:17:06 -0800 Subject: [PATCH 297/501] Specify dlltool prefix when generating import libs --- compiler/rustc_codegen_llvm/src/back/archive.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs index 58ca87524deb6..dd3268d7780c6 100644 --- a/compiler/rustc_codegen_llvm/src/back/archive.rs +++ b/compiler/rustc_codegen_llvm/src/back/archive.rs @@ -183,6 +183,12 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder { // able to control the *exact* spelling of each of the symbols that are being imported: // hence we don't want `dlltool` adding leading underscores automatically. let dlltool = find_binutils_dlltool(sess); + let temp_prefix = { + let mut path = PathBuf::from(&output_path); + path.pop(); + path.push(lib_name); + path + }; let result = std::process::Command::new(dlltool) .args([ "-d", @@ -192,6 +198,8 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder { "-l", output_path.to_str().unwrap(), "--no-leading-underscore", + "--temp-prefix", + temp_prefix.to_str().unwrap(), ]) .output(); From 18144b66e1515fa1391b7c7034ba55c47511fb9e Mon Sep 17 00:00:00 2001 From: kadmin Date: Mon, 10 May 2021 04:13:02 +0000 Subject: [PATCH 298/501] Create initial version of opt --- .../rustc_mir/src/transform/large_enums.rs | 212 ++++++++++++++++++ 1 file changed, 212 insertions(+) create mode 100644 compiler/rustc_mir/src/transform/large_enums.rs diff --git a/compiler/rustc_mir/src/transform/large_enums.rs b/compiler/rustc_mir/src/transform/large_enums.rs new file mode 100644 index 0000000000000..b742b7a45e66e --- /dev/null +++ b/compiler/rustc_mir/src/transform/large_enums.rs @@ -0,0 +1,212 @@ +use crate::transform::MirPass; +use crate::util::patch::MirPatch; +use rustc_data_structures::stable_map::FxHashMap; +use rustc_middle::mir::*; +use rustc_middle::ty::{self, Const, List, Ty, TyCtxt}; +use rustc_span::def_id::DefId; +use rustc_target::abi::{Size, Variants}; + +/// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large +/// enough discrepanc between them +pub struct EnumSizeOpt; + +impl<'tcx, const D: u64> MirPass<'tcx> for EnumSizeOpt { + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + self.optim(tcx, body); + } +} + +impl EnumSizeOpt { + fn candidate<'tcx>( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + body_did: DefId, + ) -> Option<(Size, u64, Vec)> { + match ty.kind() { + ty::Adt(adt_def, _substs) if adt_def.is_enum() => { + let p_e = tcx.param_env(body_did); + // FIXME(jknodt) handle error better below + let layout = tcx.layout_of(p_e.and(ty)).unwrap(); + let variants = &layout.variants; + match variants { + Variants::Single { .. } => None, + Variants::Multiple { variants, .. } if variants.len() <= 1 => None, + Variants::Multiple { variants, .. } => { + let min = variants.iter().map(|v| v.size).min().unwrap(); + let max = variants.iter().map(|v| v.size).max().unwrap(); + if max.bytes() - min.bytes() < D { + return None; + } + Some(( + layout.size, + variants.len() as u64, + variants.iter().map(|v| v.size).collect(), + )) + } + } + } + _ => None, + } + } + fn optim(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let mut match_cache = FxHashMap::default(); + let body_did = body.source.def_id(); + let mut patch = MirPatch::new(body); + let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); + for bb in bbs { + bb.expand_statements(|st| { + match &st.kind { + StatementKind::Assign(box ( + lhs, + Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)), + )) => { + let ty = lhs.ty(local_decls, tcx).ty; + let (total_size, num_variants, sizes) = + if let Some((ts, nv, s)) = match_cache.get(ty) { + (*ts, *nv, s) + } else if let Some((ts, nv, s)) = Self::candidate(tcx, ty, body_did) { + // FIXME(jknodt) use entry API. + match_cache.insert(ty, (ts, nv, s)); + let (ts, nv, s) = match_cache.get(ty).unwrap(); + (*ts, *nv, s) + } else { + return None; + }; + + let source_info = st.source_info; + let span = source_info.span; + + let tmp_ty = tcx.mk_ty(ty::Array( + tcx.types.usize, + Const::from_usize(tcx, num_variants), + )); + + let new_local = patch.new_temp(tmp_ty, span); + let store_live = + Statement { source_info, kind: StatementKind::StorageLive(new_local) }; + + let place = Place { local: new_local, projection: List::empty() }; + let mut data = + vec![0; std::mem::size_of::() * num_variants as usize]; + data.copy_from_slice(unsafe { std::mem::transmute(&sizes[..]) }); + let alloc = interpret::Allocation::from_bytes( + data, + tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi, + ); + let alloc = tcx.intern_const_alloc(alloc); + let constant_vals = Constant { + span, + user_ty: None, + literal: ConstantKind::Val( + interpret::ConstValue::ByRef { alloc, offset: Size::ZERO }, + tmp_ty, + ), + }; + let rval = Rvalue::Use(Operand::Constant(box (constant_vals))); + + let const_assign = Statement { + source_info, + kind: StatementKind::Assign(box (place, rval)), + }; + + // FIXME(jknodt) do I need to add a storage live here for this place? + let discr_place = Place { + local: patch.new_temp(tcx.types.usize, span), + projection: List::empty(), + }; + + let store_discr = Statement { + source_info, + kind: StatementKind::Assign(box ( + discr_place, + Rvalue::Discriminant(*rhs), + )), + }; + + // FIXME(jknodt) do I need to add a storage live here for this place? + let size_place = Place { + local: patch.new_temp(tcx.types.usize, span), + projection: List::empty(), + }; + + let store_size = Statement { + source_info, + kind: StatementKind::Assign(box ( + size_place, + Rvalue::Use(Operand::Copy(Place { + local: discr_place.local, + projection: tcx + .intern_place_elems(&[PlaceElem::Index(size_place.local)]), + })), + )), + }; + + // FIXME(jknodt) do I need to add a storage live here for this place? + let dst = Place { + local: patch.new_temp(tcx.mk_mut_ptr(tcx.types.u8), span), + projection: List::empty(), + }; + + let dst_ptr = Statement { + source_info, + kind: StatementKind::Assign(box ( + dst, + Rvalue::AddressOf(Mutability::Mut, *lhs), + )), + }; + + // FIXME(jknodt) do I need to add a storage live here for this place? + let src = Place { + local: patch.new_temp(tcx.mk_imm_ptr(tcx.types.u8), span), + projection: List::empty(), + }; + + let src_ptr = Statement { + source_info, + kind: StatementKind::Assign(box ( + src, + Rvalue::AddressOf(Mutability::Mut, *rhs), + )), + }; + + let copy_bytes = Statement { + source_info, + kind: StatementKind::CopyNonOverlapping(box CopyNonOverlapping { + src: Operand::Copy(src), + dst: Operand::Copy(src), + count: Operand::Constant( + box (Constant { + span, + user_ty: None, + literal: ConstantKind::Val( + interpret::ConstValue::from_u64(total_size.bytes()), + tcx.types.usize, + ), + }), + ), + }), + }; + + let store_dead = + Statement { source_info, kind: StatementKind::StorageDead(new_local) }; + let iter = std::array::IntoIter::new([ + store_live, + const_assign, + store_discr, + store_size, + dst_ptr, + src_ptr, + copy_bytes, + store_dead, + ]); + + st.make_nop(); + Some(iter) + } + _ => return None, + } + }); + } + patch.apply(body); + } +} From 96db5e9c7b2f8b97b75a5afeae21e0e0abf7bdfe Mon Sep 17 00:00:00 2001 From: kadmin Date: Fri, 28 May 2021 04:17:00 +0000 Subject: [PATCH 299/501] Add comments Still need to make it so that it maps discriminants to variant indexes. Maybe instead I can map the variant indexes to discriminants? --- .../rustc_mir/src/transform/large_enums.rs | 82 +++++++++++-------- 1 file changed, 49 insertions(+), 33 deletions(-) diff --git a/compiler/rustc_mir/src/transform/large_enums.rs b/compiler/rustc_mir/src/transform/large_enums.rs index b742b7a45e66e..a8377c95dcb39 100644 --- a/compiler/rustc_mir/src/transform/large_enums.rs +++ b/compiler/rustc_mir/src/transform/large_enums.rs @@ -4,7 +4,7 @@ use rustc_data_structures::stable_map::FxHashMap; use rustc_middle::mir::*; use rustc_middle::ty::{self, Const, List, Ty, TyCtxt}; use rustc_span::def_id::DefId; -use rustc_target::abi::{Size, Variants}; +use rustc_target::abi::{Size, TagEncoding, Variants}; /// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large /// enough discrepanc between them @@ -31,17 +31,25 @@ impl EnumSizeOpt { match variants { Variants::Single { .. } => None, Variants::Multiple { variants, .. } if variants.len() <= 1 => None, + Variants::Multiple { tag_encoding, .. } + if matches!(tag_encoding, TagEncoding::Niche { .. }) => + { + None + } Variants::Multiple { variants, .. } => { let min = variants.iter().map(|v| v.size).min().unwrap(); let max = variants.iter().map(|v| v.size).max().unwrap(); if max.bytes() - min.bytes() < D { return None; } - Some(( - layout.size, - variants.len() as u64, - variants.iter().map(|v| v.size).collect(), - )) + let mut discr_sizes = vec![Size::ZERO; adt_def.discriminants(tcx).count()]; + for (var_idx, layout) in variants.iter_enumerated() { + let disc_idx = + adt_def.discriminant_for_variant(tcx, var_idx).val as usize; + assert_eq!(discr_sizes[disc_idx], Size::ZERO); + discr_sizes[disc_idx] = layout.size; + } + Some((layout.size, variants.len() as u64, discr_sizes)) } } } @@ -49,7 +57,7 @@ impl EnumSizeOpt { } } fn optim(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let mut match_cache = FxHashMap::default(); + let mut alloc_cache = FxHashMap::default(); let body_did = body.source.def_id(); let mut patch = MirPatch::new(body); let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); @@ -61,39 +69,45 @@ impl EnumSizeOpt { Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)), )) => { let ty = lhs.ty(local_decls, tcx).ty; + let source_info = st.source_info; + let span = source_info.span; + let (total_size, num_variants, sizes) = - if let Some((ts, nv, s)) = match_cache.get(ty) { - (*ts, *nv, s) - } else if let Some((ts, nv, s)) = Self::candidate(tcx, ty, body_did) { - // FIXME(jknodt) use entry API. - match_cache.insert(ty, (ts, nv, s)); - let (ts, nv, s) = match_cache.get(ty).unwrap(); - (*ts, *nv, s) + if let Some((ts, nv, s)) = Self::candidate(tcx, ty, body_did) { + (ts, nv, s) } else { return None; }; - let source_info = st.source_info; - let span = source_info.span; + let alloc = if let Some(alloc) = alloc_cache.get(ty) { + alloc + } else { + let mut data = + vec![0; std::mem::size_of::() * num_variants as usize]; + data.copy_from_slice(unsafe { std::mem::transmute(&sizes[..]) }); + let alloc = interpret::Allocation::from_bytes( + data, + tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi, + Mutability::Not, + ); + let alloc = tcx.intern_const_alloc(alloc); + alloc_cache.insert(ty, alloc); + // FIXME(jknodt) use entry API + alloc_cache.get(ty).unwrap() + }; let tmp_ty = tcx.mk_ty(ty::Array( tcx.types.usize, Const::from_usize(tcx, num_variants), )); - let new_local = patch.new_temp(tmp_ty, span); - let store_live = - Statement { source_info, kind: StatementKind::StorageLive(new_local) }; - - let place = Place { local: new_local, projection: List::empty() }; - let mut data = - vec![0; std::mem::size_of::() * num_variants as usize]; - data.copy_from_slice(unsafe { std::mem::transmute(&sizes[..]) }); - let alloc = interpret::Allocation::from_bytes( - data, - tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi, - ); - let alloc = tcx.intern_const_alloc(alloc); + let size_array_local = patch.new_temp(tmp_ty, span); + let store_live = Statement { + source_info, + kind: StatementKind::StorageLive(size_array_local), + }; + + let place = Place { local: size_array_local, projection: List::empty() }; let constant_vals = Constant { span, user_ty: None, @@ -134,9 +148,9 @@ impl EnumSizeOpt { kind: StatementKind::Assign(box ( size_place, Rvalue::Use(Operand::Copy(Place { - local: discr_place.local, + local: size_array_local, projection: tcx - .intern_place_elems(&[PlaceElem::Index(size_place.local)]), + .intern_place_elems(&[PlaceElem::Index(discr_place.local)]), })), )), }; @@ -187,8 +201,10 @@ impl EnumSizeOpt { }), }; - let store_dead = - Statement { source_info, kind: StatementKind::StorageDead(new_local) }; + let store_dead = Statement { + source_info, + kind: StatementKind::StorageDead(size_array_local), + }; let iter = std::array::IntoIter::new([ store_live, const_assign, From 33b4d203382f37917f5d5e1bc9057582529a9e90 Mon Sep 17 00:00:00 2001 From: kadmin Date: Tue, 1 Jun 2021 02:30:02 +0000 Subject: [PATCH 300/501] Clean up MIR transform --- .../rustc_mir/src/transform/large_enums.rs | 60 +++++++++++++++---- compiler/rustc_mir_transform/src/lib.rs | 2 + 2 files changed, 51 insertions(+), 11 deletions(-) diff --git a/compiler/rustc_mir/src/transform/large_enums.rs b/compiler/rustc_mir/src/transform/large_enums.rs index a8377c95dcb39..f5bf40846e039 100644 --- a/compiler/rustc_mir/src/transform/large_enums.rs +++ b/compiler/rustc_mir/src/transform/large_enums.rs @@ -26,7 +26,8 @@ impl EnumSizeOpt { ty::Adt(adt_def, _substs) if adt_def.is_enum() => { let p_e = tcx.param_env(body_did); // FIXME(jknodt) handle error better below - let layout = tcx.layout_of(p_e.and(ty)).unwrap(); + let layout = + if let Ok(layout) = tcx.layout_of(p_e.and(ty)) { layout } else { return None }; let variants = &layout.variants; match variants { Variants::Single { .. } => None, @@ -84,7 +85,16 @@ impl EnumSizeOpt { } else { let mut data = vec![0; std::mem::size_of::() * num_variants as usize]; - data.copy_from_slice(unsafe { std::mem::transmute(&sizes[..]) }); + + let mut curr = 0; + for byte in sizes + .iter() + .flat_map(|sz| sz.bytes().to_ne_bytes()) + .take(data.len()) + { + data[curr] = byte; + curr += 1; + } let alloc = interpret::Allocation::from_bytes( data, tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi, @@ -123,9 +133,9 @@ impl EnumSizeOpt { kind: StatementKind::Assign(box (place, rval)), }; - // FIXME(jknodt) do I need to add a storage live here for this place? let discr_place = Place { - local: patch.new_temp(tcx.types.usize, span), + // How do I get the discriminant type? + local: patch.new_temp(tcx.types.isize, span), projection: List::empty(), }; @@ -155,9 +165,8 @@ impl EnumSizeOpt { )), }; - // FIXME(jknodt) do I need to add a storage live here for this place? let dst = Place { - local: patch.new_temp(tcx.mk_mut_ptr(tcx.types.u8), span), + local: patch.new_temp(tcx.mk_mut_ptr(ty), span), projection: List::empty(), }; @@ -169,9 +178,22 @@ impl EnumSizeOpt { )), }; - // FIXME(jknodt) do I need to add a storage live here for this place? + let dst_cast_ty = tcx.mk_mut_ptr(tcx.types.u8); + let dst_cast_place = Place { + local: patch.new_temp(dst_cast_ty, span), + projection: List::empty(), + }; + + let dst_cast = Statement { + source_info, + kind: StatementKind::Assign(box ( + dst_cast_place, + Rvalue::Cast(CastKind::Misc, Operand::Copy(dst), dst_cast_ty), + )), + }; + let src = Place { - local: patch.new_temp(tcx.mk_imm_ptr(tcx.types.u8), span), + local: patch.new_temp(tcx.mk_imm_ptr(ty), span), projection: List::empty(), }; @@ -179,15 +201,29 @@ impl EnumSizeOpt { source_info, kind: StatementKind::Assign(box ( src, - Rvalue::AddressOf(Mutability::Mut, *rhs), + Rvalue::AddressOf(Mutability::Not, *rhs), + )), + }; + + let src_cast_ty = tcx.mk_imm_ptr(tcx.types.u8); + let src_cast_place = Place { + local: patch.new_temp(src_cast_ty, span), + projection: List::empty(), + }; + + let src_cast = Statement { + source_info, + kind: StatementKind::Assign(box ( + src_cast_place, + Rvalue::Cast(CastKind::Misc, Operand::Copy(src), src_cast_ty), )), }; let copy_bytes = Statement { source_info, kind: StatementKind::CopyNonOverlapping(box CopyNonOverlapping { - src: Operand::Copy(src), - dst: Operand::Copy(src), + src: Operand::Copy(src_cast_place), + dst: Operand::Copy(dst_cast_place), count: Operand::Constant( box (Constant { span, @@ -211,7 +247,9 @@ impl EnumSizeOpt { store_discr, store_size, dst_ptr, + dst_cast, src_ptr, + src_cast, copy_bytes, store_dead, ]); diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 9070a7368b168..00ec4b3e75457 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -2,6 +2,8 @@ #![feature(box_patterns)] #![feature(drain_filter)] #![feature(let_chains)] +#![feature(let_else)] +#![feature(entry_insert)] #![feature(map_try_insert)] #![feature(min_specialization)] #![feature(never_type)] From f7cbf2eb410915149e983b5a934567d0512e10e5 Mon Sep 17 00:00:00 2001 From: kadmin Date: Wed, 2 Jun 2021 03:47:05 +0000 Subject: [PATCH 301/501] Update with comments Changing a bunch of struct constructors to `from`, no extra destructuring, getting the type of the discriminant. --- .../rustc_mir/src/transform/large_enums.rs | 76 ++++++++++--------- 1 file changed, 41 insertions(+), 35 deletions(-) diff --git a/compiler/rustc_mir/src/transform/large_enums.rs b/compiler/rustc_mir/src/transform/large_enums.rs index f5bf40846e039..efda87a154771 100644 --- a/compiler/rustc_mir/src/transform/large_enums.rs +++ b/compiler/rustc_mir/src/transform/large_enums.rs @@ -1,8 +1,8 @@ use crate::transform::MirPass; -use crate::util::patch::MirPatch; use rustc_data_structures::stable_map::FxHashMap; use rustc_middle::mir::*; -use rustc_middle::ty::{self, Const, List, Ty, TyCtxt}; +use rustc_middle::ty::util::IntTypeExt; +use rustc_middle::ty::{self, Const, Ty, TyCtxt}; use rustc_span::def_id::DefId; use rustc_target::abi::{Size, TagEncoding, Variants}; @@ -60,7 +60,6 @@ impl EnumSizeOpt { fn optim(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let mut alloc_cache = FxHashMap::default(); let body_did = body.source.def_id(); - let mut patch = MirPatch::new(body); let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); for bb in bbs { bb.expand_statements(|st| { @@ -70,15 +69,17 @@ impl EnumSizeOpt { Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)), )) => { let ty = lhs.ty(local_decls, tcx).ty; + let source_info = st.source_info; let span = source_info.span; let (total_size, num_variants, sizes) = - if let Some((ts, nv, s)) = Self::candidate(tcx, ty, body_did) { - (ts, nv, s) + if let Some(cand) = Self::candidate(tcx, ty, body_did) { + cand } else { return None; }; + let adt_def = ty.ty_adt_def().unwrap(); let alloc = if let Some(alloc) = alloc_cache.get(ty) { alloc @@ -111,13 +112,13 @@ impl EnumSizeOpt { Const::from_usize(tcx, num_variants), )); - let size_array_local = patch.new_temp(tmp_ty, span); + let size_array_local = local_decls.push(LocalDecl::new(tmp_ty, span)); let store_live = Statement { source_info, kind: StatementKind::StorageLive(size_array_local), }; - let place = Place { local: size_array_local, projection: List::empty() }; + let place = Place::from(size_array_local); let constant_vals = Constant { span, user_ty: None, @@ -133,11 +134,10 @@ impl EnumSizeOpt { kind: StatementKind::Assign(box (place, rval)), }; - let discr_place = Place { - // How do I get the discriminant type? - local: patch.new_temp(tcx.types.isize, span), - projection: List::empty(), - }; + let discr_place = Place::from( + local_decls + .push(LocalDecl::new(adt_def.repr.discr_type().to_ty(tcx), span)), + ); let store_discr = Statement { source_info, @@ -147,28 +147,40 @@ impl EnumSizeOpt { )), }; - // FIXME(jknodt) do I need to add a storage live here for this place? - let size_place = Place { - local: patch.new_temp(tcx.types.usize, span), - projection: List::empty(), + let discr_cast_place = + Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); + + let cast_discr = Statement { + source_info, + kind: StatementKind::Assign(box ( + discr_cast_place, + Rvalue::Cast( + CastKind::Misc, + Operand::Copy(discr_place), + tcx.types.usize, + ), + )), }; + // FIXME(jknodt) do I need to add a storage live here for this place? + let size_place = + Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); + let store_size = Statement { source_info, kind: StatementKind::Assign(box ( size_place, Rvalue::Use(Operand::Copy(Place { local: size_array_local, - projection: tcx - .intern_place_elems(&[PlaceElem::Index(discr_place.local)]), + projection: tcx.intern_place_elems(&[PlaceElem::Index( + discr_cast_place.local, + )]), })), )), }; - let dst = Place { - local: patch.new_temp(tcx.mk_mut_ptr(ty), span), - projection: List::empty(), - }; + let dst = + Place::from(local_decls.push(LocalDecl::new(tcx.mk_mut_ptr(ty), span))); let dst_ptr = Statement { source_info, @@ -179,10 +191,8 @@ impl EnumSizeOpt { }; let dst_cast_ty = tcx.mk_mut_ptr(tcx.types.u8); - let dst_cast_place = Place { - local: patch.new_temp(dst_cast_ty, span), - projection: List::empty(), - }; + let dst_cast_place = + Place::from(local_decls.push(LocalDecl::new(dst_cast_ty, span))); let dst_cast = Statement { source_info, @@ -192,10 +202,8 @@ impl EnumSizeOpt { )), }; - let src = Place { - local: patch.new_temp(tcx.mk_imm_ptr(ty), span), - projection: List::empty(), - }; + let src = + Place::from(local_decls.push(LocalDecl::new(tcx.mk_imm_ptr(ty), span))); let src_ptr = Statement { source_info, @@ -206,10 +214,8 @@ impl EnumSizeOpt { }; let src_cast_ty = tcx.mk_imm_ptr(tcx.types.u8); - let src_cast_place = Place { - local: patch.new_temp(src_cast_ty, span), - projection: List::empty(), - }; + let src_cast_place = + Place::from(local_decls.push(LocalDecl::new(src_cast_ty, span))); let src_cast = Statement { source_info, @@ -245,6 +251,7 @@ impl EnumSizeOpt { store_live, const_assign, store_discr, + cast_discr, store_size, dst_ptr, dst_cast, @@ -261,6 +268,5 @@ impl EnumSizeOpt { } }); } - patch.apply(body); } } From 3e97cef7e5696a57f1b528b2bf551a2e3721100d Mon Sep 17 00:00:00 2001 From: kadmin Date: Mon, 7 Jun 2021 08:37:41 +0000 Subject: [PATCH 302/501] Set mir-opt-level = 0 on some codegen tests Since we're changing a bunch of stuff, necessary to remove some codegen tests which look for specific things. Also attempting to restart a test which timed out, maybe due to fastly failing? --- .../rustc_mir/src/transform/large_enums.rs | 62 ++++++++++--------- tests/codegen/consts.rs | 2 +- tests/codegen/function-arguments.rs | 2 +- 3 files changed, 36 insertions(+), 30 deletions(-) diff --git a/compiler/rustc_mir/src/transform/large_enums.rs b/compiler/rustc_mir/src/transform/large_enums.rs index efda87a154771..51bf880313a63 100644 --- a/compiler/rustc_mir/src/transform/large_enums.rs +++ b/compiler/rustc_mir/src/transform/large_enums.rs @@ -4,7 +4,8 @@ use rustc_middle::mir::*; use rustc_middle::ty::util::IntTypeExt; use rustc_middle::ty::{self, Const, Ty, TyCtxt}; use rustc_span::def_id::DefId; -use rustc_target::abi::{Size, TagEncoding, Variants}; +use rustc_target::abi::{HasDataLayout, Size, TagEncoding, Variants}; +use std::array::IntoIter; /// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large /// enough discrepanc between them @@ -21,11 +22,10 @@ impl EnumSizeOpt { tcx: TyCtxt<'tcx>, ty: Ty<'tcx>, body_did: DefId, - ) -> Option<(Size, u64, Vec)> { + ) -> Option<(u64, Vec)> { match ty.kind() { ty::Adt(adt_def, _substs) if adt_def.is_enum() => { let p_e = tcx.param_env(body_did); - // FIXME(jknodt) handle error better below let layout = if let Ok(layout) = tcx.layout_of(p_e.and(ty)) { layout } else { return None }; let variants = &layout.variants; @@ -50,7 +50,7 @@ impl EnumSizeOpt { assert_eq!(discr_sizes[disc_idx], Size::ZERO); discr_sizes[disc_idx] = layout.size; } - Some((layout.size, variants.len() as u64, discr_sizes)) + Some((variants.len() as u64, discr_sizes)) } } } @@ -73,28 +73,44 @@ impl EnumSizeOpt { let source_info = st.source_info; let span = source_info.span; - let (total_size, num_variants, sizes) = + let (num_variants, sizes) = if let Some(cand) = Self::candidate(tcx, ty, body_did) { cand } else { return None; }; let adt_def = ty.ty_adt_def().unwrap(); - let alloc = if let Some(alloc) = alloc_cache.get(ty) { alloc } else { - let mut data = - vec![0; std::mem::size_of::() * num_variants as usize]; - + let data_layout = tcx.data_layout(); + let ptr_sized_int = data_layout.ptr_sized_integer(); + let target_bytes = ptr_sized_int.size().bytes() as usize; + let mut data = vec![0; target_bytes * num_variants as usize]; let mut curr = 0; - for byte in sizes - .iter() - .flat_map(|sz| sz.bytes().to_ne_bytes()) - .take(data.len()) - { - data[curr] = byte; - curr += 1; + macro_rules! encode_store { + ($endian: expr, $bytes: expr) => { + let bytes = match $endian { + rustc_target::abi::Endian::Little => $bytes.to_le_bytes(), + rustc_target::abi::Endian::Big => $bytes.to_be_bytes(), + }; + for b in bytes { + data[curr] = b; + curr += 1; + } + }; + } + + for sz in sizes { + match ptr_sized_int { + rustc_target::abi::Integer::I32 => { + encode_store!(data_layout.endian, sz.bytes() as u32); + } + rustc_target::abi::Integer::I64 => { + encode_store!(data_layout.endian, sz.bytes()); + } + _ => unreachable!(), + }; } let alloc = interpret::Allocation::from_bytes( data, @@ -162,7 +178,6 @@ impl EnumSizeOpt { )), }; - // FIXME(jknodt) do I need to add a storage live here for this place? let size_place = Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); @@ -230,16 +245,7 @@ impl EnumSizeOpt { kind: StatementKind::CopyNonOverlapping(box CopyNonOverlapping { src: Operand::Copy(src_cast_place), dst: Operand::Copy(dst_cast_place), - count: Operand::Constant( - box (Constant { - span, - user_ty: None, - literal: ConstantKind::Val( - interpret::ConstValue::from_u64(total_size.bytes()), - tcx.types.usize, - ), - }), - ), + count: Operand::Copy(size_place), }), }; @@ -247,7 +253,7 @@ impl EnumSizeOpt { source_info, kind: StatementKind::StorageDead(size_array_local), }; - let iter = std::array::IntoIter::new([ + let iter = IntoIter::new([ store_live, const_assign, store_discr, diff --git a/tests/codegen/consts.rs b/tests/codegen/consts.rs index 260d9de867087..9d75356b03dc9 100644 --- a/tests/codegen/consts.rs +++ b/tests/codegen/consts.rs @@ -1,4 +1,4 @@ -// compile-flags: -C no-prepopulate-passes +// compile-flags: -C no-prepopulate-passes -Zmir-opt-level=0 // min-llvm-version: 14.0 #![crate_type = "lib"] diff --git a/tests/codegen/function-arguments.rs b/tests/codegen/function-arguments.rs index 96dfde18683e3..020d9234e57cc 100644 --- a/tests/codegen/function-arguments.rs +++ b/tests/codegen/function-arguments.rs @@ -1,4 +1,4 @@ -// compile-flags: -O -C no-prepopulate-passes +// compile-flags: -O -C no-prepopulate-passes -Zmir-opt-level=0 #![crate_type = "lib"] From 5d9f5145ac9ce07d79aeb75ad049cab957b0fb92 Mon Sep 17 00:00:00 2001 From: kadmin Date: Wed, 16 Jun 2021 05:46:56 +0000 Subject: [PATCH 303/501] Rm allocation in candidate Instead of storing an extra array for discriminant values, create an allocation there and store those in an allocation immediately. --- .../rustc_mir/src/transform/large_enums.rs | 278 ----------------- .../rustc_mir_transform/src/large_enums.rs | 283 ++++++++++++++++++ compiler/rustc_mir_transform/src/lib.rs | 3 +- tests/codegen/consts.rs | 2 +- tests/codegen/function-arguments.rs | 2 +- .../enum_opt.cand.EnumSizeOpt.32bit.diff | 55 ++++ .../enum_opt.cand.EnumSizeOpt.64bit.diff | 55 ++++ .../enum_opt.invalid.EnumSizeOpt.32bit.diff | 35 +++ .../enum_opt.invalid.EnumSizeOpt.64bit.diff | 35 +++ tests/mir-opt/enum_opt.rs | 79 +++++ .../enum_opt.trunc.EnumSizeOpt.32bit.diff | 46 +++ .../enum_opt.trunc.EnumSizeOpt.64bit.diff | 46 +++ .../enum_opt.unin.EnumSizeOpt.32bit.diff | 54 ++++ .../enum_opt.unin.EnumSizeOpt.64bit.diff | 54 ++++ 14 files changed, 746 insertions(+), 281 deletions(-) delete mode 100644 compiler/rustc_mir/src/transform/large_enums.rs create mode 100644 compiler/rustc_mir_transform/src/large_enums.rs create mode 100644 tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff create mode 100644 tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff create mode 100644 tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff create mode 100644 tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff create mode 100644 tests/mir-opt/enum_opt.rs create mode 100644 tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff create mode 100644 tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff create mode 100644 tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff create mode 100644 tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff diff --git a/compiler/rustc_mir/src/transform/large_enums.rs b/compiler/rustc_mir/src/transform/large_enums.rs deleted file mode 100644 index 51bf880313a63..0000000000000 --- a/compiler/rustc_mir/src/transform/large_enums.rs +++ /dev/null @@ -1,278 +0,0 @@ -use crate::transform::MirPass; -use rustc_data_structures::stable_map::FxHashMap; -use rustc_middle::mir::*; -use rustc_middle::ty::util::IntTypeExt; -use rustc_middle::ty::{self, Const, Ty, TyCtxt}; -use rustc_span::def_id::DefId; -use rustc_target::abi::{HasDataLayout, Size, TagEncoding, Variants}; -use std::array::IntoIter; - -/// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large -/// enough discrepanc between them -pub struct EnumSizeOpt; - -impl<'tcx, const D: u64> MirPass<'tcx> for EnumSizeOpt { - fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - self.optim(tcx, body); - } -} - -impl EnumSizeOpt { - fn candidate<'tcx>( - tcx: TyCtxt<'tcx>, - ty: Ty<'tcx>, - body_did: DefId, - ) -> Option<(u64, Vec)> { - match ty.kind() { - ty::Adt(adt_def, _substs) if adt_def.is_enum() => { - let p_e = tcx.param_env(body_did); - let layout = - if let Ok(layout) = tcx.layout_of(p_e.and(ty)) { layout } else { return None }; - let variants = &layout.variants; - match variants { - Variants::Single { .. } => None, - Variants::Multiple { variants, .. } if variants.len() <= 1 => None, - Variants::Multiple { tag_encoding, .. } - if matches!(tag_encoding, TagEncoding::Niche { .. }) => - { - None - } - Variants::Multiple { variants, .. } => { - let min = variants.iter().map(|v| v.size).min().unwrap(); - let max = variants.iter().map(|v| v.size).max().unwrap(); - if max.bytes() - min.bytes() < D { - return None; - } - let mut discr_sizes = vec![Size::ZERO; adt_def.discriminants(tcx).count()]; - for (var_idx, layout) in variants.iter_enumerated() { - let disc_idx = - adt_def.discriminant_for_variant(tcx, var_idx).val as usize; - assert_eq!(discr_sizes[disc_idx], Size::ZERO); - discr_sizes[disc_idx] = layout.size; - } - Some((variants.len() as u64, discr_sizes)) - } - } - } - _ => None, - } - } - fn optim(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let mut alloc_cache = FxHashMap::default(); - let body_did = body.source.def_id(); - let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); - for bb in bbs { - bb.expand_statements(|st| { - match &st.kind { - StatementKind::Assign(box ( - lhs, - Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)), - )) => { - let ty = lhs.ty(local_decls, tcx).ty; - - let source_info = st.source_info; - let span = source_info.span; - - let (num_variants, sizes) = - if let Some(cand) = Self::candidate(tcx, ty, body_did) { - cand - } else { - return None; - }; - let adt_def = ty.ty_adt_def().unwrap(); - let alloc = if let Some(alloc) = alloc_cache.get(ty) { - alloc - } else { - let data_layout = tcx.data_layout(); - let ptr_sized_int = data_layout.ptr_sized_integer(); - let target_bytes = ptr_sized_int.size().bytes() as usize; - let mut data = vec![0; target_bytes * num_variants as usize]; - let mut curr = 0; - macro_rules! encode_store { - ($endian: expr, $bytes: expr) => { - let bytes = match $endian { - rustc_target::abi::Endian::Little => $bytes.to_le_bytes(), - rustc_target::abi::Endian::Big => $bytes.to_be_bytes(), - }; - for b in bytes { - data[curr] = b; - curr += 1; - } - }; - } - - for sz in sizes { - match ptr_sized_int { - rustc_target::abi::Integer::I32 => { - encode_store!(data_layout.endian, sz.bytes() as u32); - } - rustc_target::abi::Integer::I64 => { - encode_store!(data_layout.endian, sz.bytes()); - } - _ => unreachable!(), - }; - } - let alloc = interpret::Allocation::from_bytes( - data, - tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi, - Mutability::Not, - ); - let alloc = tcx.intern_const_alloc(alloc); - alloc_cache.insert(ty, alloc); - // FIXME(jknodt) use entry API - alloc_cache.get(ty).unwrap() - }; - - let tmp_ty = tcx.mk_ty(ty::Array( - tcx.types.usize, - Const::from_usize(tcx, num_variants), - )); - - let size_array_local = local_decls.push(LocalDecl::new(tmp_ty, span)); - let store_live = Statement { - source_info, - kind: StatementKind::StorageLive(size_array_local), - }; - - let place = Place::from(size_array_local); - let constant_vals = Constant { - span, - user_ty: None, - literal: ConstantKind::Val( - interpret::ConstValue::ByRef { alloc, offset: Size::ZERO }, - tmp_ty, - ), - }; - let rval = Rvalue::Use(Operand::Constant(box (constant_vals))); - - let const_assign = Statement { - source_info, - kind: StatementKind::Assign(box (place, rval)), - }; - - let discr_place = Place::from( - local_decls - .push(LocalDecl::new(adt_def.repr.discr_type().to_ty(tcx), span)), - ); - - let store_discr = Statement { - source_info, - kind: StatementKind::Assign(box ( - discr_place, - Rvalue::Discriminant(*rhs), - )), - }; - - let discr_cast_place = - Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); - - let cast_discr = Statement { - source_info, - kind: StatementKind::Assign(box ( - discr_cast_place, - Rvalue::Cast( - CastKind::Misc, - Operand::Copy(discr_place), - tcx.types.usize, - ), - )), - }; - - let size_place = - Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); - - let store_size = Statement { - source_info, - kind: StatementKind::Assign(box ( - size_place, - Rvalue::Use(Operand::Copy(Place { - local: size_array_local, - projection: tcx.intern_place_elems(&[PlaceElem::Index( - discr_cast_place.local, - )]), - })), - )), - }; - - let dst = - Place::from(local_decls.push(LocalDecl::new(tcx.mk_mut_ptr(ty), span))); - - let dst_ptr = Statement { - source_info, - kind: StatementKind::Assign(box ( - dst, - Rvalue::AddressOf(Mutability::Mut, *lhs), - )), - }; - - let dst_cast_ty = tcx.mk_mut_ptr(tcx.types.u8); - let dst_cast_place = - Place::from(local_decls.push(LocalDecl::new(dst_cast_ty, span))); - - let dst_cast = Statement { - source_info, - kind: StatementKind::Assign(box ( - dst_cast_place, - Rvalue::Cast(CastKind::Misc, Operand::Copy(dst), dst_cast_ty), - )), - }; - - let src = - Place::from(local_decls.push(LocalDecl::new(tcx.mk_imm_ptr(ty), span))); - - let src_ptr = Statement { - source_info, - kind: StatementKind::Assign(box ( - src, - Rvalue::AddressOf(Mutability::Not, *rhs), - )), - }; - - let src_cast_ty = tcx.mk_imm_ptr(tcx.types.u8); - let src_cast_place = - Place::from(local_decls.push(LocalDecl::new(src_cast_ty, span))); - - let src_cast = Statement { - source_info, - kind: StatementKind::Assign(box ( - src_cast_place, - Rvalue::Cast(CastKind::Misc, Operand::Copy(src), src_cast_ty), - )), - }; - - let copy_bytes = Statement { - source_info, - kind: StatementKind::CopyNonOverlapping(box CopyNonOverlapping { - src: Operand::Copy(src_cast_place), - dst: Operand::Copy(dst_cast_place), - count: Operand::Copy(size_place), - }), - }; - - let store_dead = Statement { - source_info, - kind: StatementKind::StorageDead(size_array_local), - }; - let iter = IntoIter::new([ - store_live, - const_assign, - store_discr, - cast_discr, - store_size, - dst_ptr, - dst_cast, - src_ptr, - src_cast, - copy_bytes, - store_dead, - ]); - - st.make_nop(); - Some(iter) - } - _ => return None, - } - }); - } - } -} diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs new file mode 100644 index 0000000000000..1919720de4996 --- /dev/null +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -0,0 +1,283 @@ +use crate::rustc_middle::ty::util::IntTypeExt; +use crate::MirPass; +use rustc_data_structures::stable_map::FxHashMap; +use rustc_middle::mir::interpret::AllocId; +use rustc_middle::mir::*; +use rustc_middle::ty::{self, AdtDef, Const, ParamEnv, Ty, TyCtxt}; +use rustc_target::abi::{HasDataLayout, Size, TagEncoding, Variants}; + +/// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large +/// enough discrepancy between them. +/// +/// i.e. If there is are two variants: +/// ``` +/// enum Example { +/// Small, +/// Large([u32; 1024]), +/// } +/// ``` +/// Instead of emitting moves of the large variant, +/// Perform a memcpy instead. +/// Based off of [this HackMD](https://hackmd.io/@ft4bxUsFT5CEUBmRKYHr7w/rJM8BBPzD). +pub struct EnumSizeOpt { + pub(crate) discrepancy: u64, +} + +impl<'tcx> MirPass<'tcx> for EnumSizeOpt { + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let sess = tcx.sess; + if (!sess.opts.debugging_opts.unsound_mir_opts) || sess.mir_opt_level() < 3 { + return; + } + self.optim(tcx, body); + } +} + +impl EnumSizeOpt { + fn candidate<'tcx>( + &self, + tcx: TyCtxt<'tcx>, + param_env: ParamEnv<'tcx>, + ty: Ty<'tcx>, + alloc_cache: &mut FxHashMap, AllocId>, + ) -> Option<(AdtDef<'tcx>, usize, AllocId)> { + let adt_def = match ty.kind() { + ty::Adt(adt_def, _substs) if adt_def.is_enum() => adt_def, + _ => return None, + }; + let layout = tcx.layout_of(param_env.and(ty)).ok()?; + let variants = match &layout.variants { + Variants::Single { .. } => return None, + Variants::Multiple { tag_encoding, .. } + if matches!(tag_encoding, TagEncoding::Niche { .. }) => + { + return None; + } + Variants::Multiple { variants, .. } if variants.len() <= 1 => return None, + Variants::Multiple { variants, .. } => variants, + }; + let min = variants.iter().map(|v| v.size()).min().unwrap(); + let max = variants.iter().map(|v| v.size()).max().unwrap(); + if max.bytes() - min.bytes() < self.discrepancy { + return None; + } + + let num_discrs = adt_def.discriminants(tcx).count(); + if variants.iter_enumerated().any(|(var_idx, _)| { + let discr_for_var = adt_def.discriminant_for_variant(tcx, var_idx).val; + (discr_for_var > usize::MAX as u128) || (discr_for_var as usize >= num_discrs) + }) { + return None; + } + if let Some(alloc_id) = alloc_cache.get(&ty) { + return Some((*adt_def, num_discrs, *alloc_id)); + } + + let data_layout = tcx.data_layout(); + let ptr_sized_int = data_layout.ptr_sized_integer(); + let target_bytes = ptr_sized_int.size().bytes() as usize; + let mut data = vec![0; target_bytes * num_discrs]; + macro_rules! encode_store { + ($curr_idx: expr, $endian: expr, $bytes: expr) => { + let bytes = match $endian { + rustc_target::abi::Endian::Little => $bytes.to_le_bytes(), + rustc_target::abi::Endian::Big => $bytes.to_be_bytes(), + }; + for (i, b) in bytes.into_iter().enumerate() { + data[$curr_idx + i] = b; + } + }; + } + + for (var_idx, layout) in variants.iter_enumerated() { + let curr_idx = + target_bytes * adt_def.discriminant_for_variant(tcx, var_idx).val as usize; + let sz = layout.size(); + match ptr_sized_int { + rustc_target::abi::Integer::I32 => { + encode_store!(curr_idx, data_layout.endian, sz.bytes() as u32); + } + rustc_target::abi::Integer::I64 => { + encode_store!(curr_idx, data_layout.endian, sz.bytes()); + } + _ => unreachable!(), + }; + } + let alloc = interpret::Allocation::from_bytes( + data, + tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi, + Mutability::Not, + ); + let alloc = tcx.create_memory_alloc(tcx.intern_const_alloc(alloc)); + Some((*adt_def, num_discrs, *alloc_cache.entry(ty).or_insert(alloc))) + } + fn optim<'tcx>(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + let mut alloc_cache = FxHashMap::default(); + let body_did = body.source.def_id(); + let param_env = tcx.param_env(body_did); + let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); + for bb in bbs { + bb.expand_statements(|st| { + if let StatementKind::Assign(box ( + lhs, + Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)), + )) = &st.kind + { + let ty = lhs.ty(local_decls, tcx).ty; + + let source_info = st.source_info; + let span = source_info.span; + + let (adt_def, num_variants, alloc_id) = + self.candidate(tcx, param_env, ty, &mut alloc_cache)?; + let alloc = tcx.global_alloc(alloc_id).unwrap_memory(); + + let tmp_ty = tcx.mk_ty(ty::Array( + tcx.types.usize, + Const::from_usize(tcx, num_variants as u64), + )); + + let size_array_local = local_decls.push(LocalDecl::new(tmp_ty, span)); + let store_live = Statement { + source_info, + kind: StatementKind::StorageLive(size_array_local), + }; + + let place = Place::from(size_array_local); + let constant_vals = Constant { + span, + user_ty: None, + literal: ConstantKind::Val( + interpret::ConstValue::ByRef { alloc, offset: Size::ZERO }, + tmp_ty, + ), + }; + let rval = Rvalue::Use(Operand::Constant(box (constant_vals))); + + let const_assign = + Statement { source_info, kind: StatementKind::Assign(box (place, rval)) }; + + let discr_place = Place::from( + local_decls + .push(LocalDecl::new(adt_def.repr().discr_type().to_ty(tcx), span)), + ); + + let store_discr = Statement { + source_info, + kind: StatementKind::Assign(box (discr_place, Rvalue::Discriminant(*rhs))), + }; + + let discr_cast_place = + Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); + + let cast_discr = Statement { + source_info, + kind: StatementKind::Assign(box ( + discr_cast_place, + Rvalue::Cast( + CastKind::Misc, + Operand::Copy(discr_place), + tcx.types.usize, + ), + )), + }; + + let size_place = + Place::from(local_decls.push(LocalDecl::new(tcx.types.usize, span))); + + let store_size = Statement { + source_info, + kind: StatementKind::Assign(box ( + size_place, + Rvalue::Use(Operand::Copy(Place { + local: size_array_local, + projection: tcx.intern_place_elems(&[PlaceElem::Index( + discr_cast_place.local, + )]), + })), + )), + }; + + let dst = + Place::from(local_decls.push(LocalDecl::new(tcx.mk_mut_ptr(ty), span))); + + let dst_ptr = Statement { + source_info, + kind: StatementKind::Assign(box ( + dst, + Rvalue::AddressOf(Mutability::Mut, *lhs), + )), + }; + + let dst_cast_ty = tcx.mk_mut_ptr(tcx.types.u8); + let dst_cast_place = + Place::from(local_decls.push(LocalDecl::new(dst_cast_ty, span))); + + let dst_cast = Statement { + source_info, + kind: StatementKind::Assign(box ( + dst_cast_place, + Rvalue::Cast(CastKind::Misc, Operand::Copy(dst), dst_cast_ty), + )), + }; + + let src = + Place::from(local_decls.push(LocalDecl::new(tcx.mk_imm_ptr(ty), span))); + + let src_ptr = Statement { + source_info, + kind: StatementKind::Assign(box ( + src, + Rvalue::AddressOf(Mutability::Not, *rhs), + )), + }; + + let src_cast_ty = tcx.mk_imm_ptr(tcx.types.u8); + let src_cast_place = + Place::from(local_decls.push(LocalDecl::new(src_cast_ty, span))); + + let src_cast = Statement { + source_info, + kind: StatementKind::Assign(box ( + src_cast_place, + Rvalue::Cast(CastKind::Misc, Operand::Copy(src), src_cast_ty), + )), + }; + + let copy_bytes = Statement { + source_info, + kind: StatementKind::CopyNonOverlapping(box CopyNonOverlapping { + src: Operand::Copy(src_cast_place), + dst: Operand::Copy(dst_cast_place), + count: Operand::Copy(size_place), + }), + }; + + let store_dead = Statement { + source_info, + kind: StatementKind::StorageDead(size_array_local), + }; + let iter = [ + store_live, + const_assign, + store_discr, + cast_discr, + store_size, + dst_ptr, + dst_cast, + src_ptr, + src_cast, + copy_bytes, + store_dead, + ] + .into_iter(); + + st.make_nop(); + Some(iter) + } else { + None + } + }); + } + } +} diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 00ec4b3e75457..8cd268eb6ce13 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -3,7 +3,6 @@ #![feature(drain_filter)] #![feature(let_chains)] #![feature(let_else)] -#![feature(entry_insert)] #![feature(map_try_insert)] #![feature(min_specialization)] #![feature(never_type)] @@ -75,6 +74,7 @@ mod function_item_references; mod generator; mod inline; mod instcombine; +mod large_enums; mod lower_intrinsics; mod lower_slice_len; mod match_branches; @@ -547,6 +547,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { tcx, body, &[ + &large_enums::EnumSizeOpt { discrepancy: 128 }, &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode. &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first &unreachable_prop::UnreachablePropagation, diff --git a/tests/codegen/consts.rs b/tests/codegen/consts.rs index 9d75356b03dc9..260d9de867087 100644 --- a/tests/codegen/consts.rs +++ b/tests/codegen/consts.rs @@ -1,4 +1,4 @@ -// compile-flags: -C no-prepopulate-passes -Zmir-opt-level=0 +// compile-flags: -C no-prepopulate-passes // min-llvm-version: 14.0 #![crate_type = "lib"] diff --git a/tests/codegen/function-arguments.rs b/tests/codegen/function-arguments.rs index 020d9234e57cc..96dfde18683e3 100644 --- a/tests/codegen/function-arguments.rs +++ b/tests/codegen/function-arguments.rs @@ -1,4 +1,4 @@ -// compile-flags: -O -C no-prepopulate-passes -Zmir-opt-level=0 +// compile-flags: -O -C no-prepopulate-passes #![crate_type = "lib"] diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff new file mode 100644 index 0000000000000..d9923ec7cba7d --- /dev/null +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff @@ -0,0 +1,55 @@ +- // MIR for `cand` before EnumSizeOpt ++ // MIR for `cand` after EnumSizeOpt + + fn cand() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:49:15: 49:15 + let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:50:7: 50:12 + let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:34 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:51:24: 51:33 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:50:7: 50:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:50:7: 50:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 + ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _4 = const [2_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:49:15: 52:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:52:1: 52:2 + return; // scope 0 at $DIR/enum_opt.rs:52:2: 52:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:49:1: 52:2 + } + } + diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff new file mode 100644 index 0000000000000..e79d2f67a8398 --- /dev/null +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff @@ -0,0 +1,55 @@ +- // MIR for `cand` before EnumSizeOpt ++ // MIR for `cand` after EnumSizeOpt + + fn cand() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:49:15: 49:15 + let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:50:7: 50:12 + let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:34 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:51:24: 51:33 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:50:7: 50:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:50:7: 50:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 + ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _4 = const [2_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:49:15: 52:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:52:1: 52:2 + return; // scope 0 at $DIR/enum_opt.rs:52:2: 52:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:49:1: 52:2 + } + } + diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff new file mode 100644 index 0000000000000..d8b6a79401518 --- /dev/null +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff @@ -0,0 +1,35 @@ +- // MIR for `invalid` before EnumSizeOpt ++ // MIR for `invalid` after EnumSizeOpt + + fn invalid() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:55:18: 55:18 + let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:56:7: 56:12 + let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:36 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:57:26: 57:35 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:56:7: 56:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:56:7: 56:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 + _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:57:3: 57:36 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:55:18: 58:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:58:1: 58:2 + return; // scope 0 at $DIR/enum_opt.rs:58:2: 58:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:55:1: 58:2 + } + } + diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff new file mode 100644 index 0000000000000..d8b6a79401518 --- /dev/null +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff @@ -0,0 +1,35 @@ +- // MIR for `invalid` before EnumSizeOpt ++ // MIR for `invalid` after EnumSizeOpt + + fn invalid() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:55:18: 55:18 + let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:56:7: 56:12 + let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:36 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:57:26: 57:35 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:56:7: 56:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:56:7: 56:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 + _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:57:3: 57:36 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:55:18: 58:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:58:1: 58:2 + return; // scope 0 at $DIR/enum_opt.rs:58:2: 58:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:55:1: 58:2 + } + } + diff --git a/tests/mir-opt/enum_opt.rs b/tests/mir-opt/enum_opt.rs new file mode 100644 index 0000000000000..bc72c93da09f3 --- /dev/null +++ b/tests/mir-opt/enum_opt.rs @@ -0,0 +1,79 @@ +// EMIT_MIR_FOR_EACH_BIT_WIDTH +// compile-flags: -Zunsound-mir-opts -Zmir-opt-level=3 +#![feature(arbitrary_enum_discriminant, repr128)] + +// Tests that an enum with a variant with no data gets correctly transformed. +pub enum NoData { + None, + Large([u64; 1024]), +} + +// Tests that an enum with a variant with data that is a valid candidate gets transformed. +pub enum Candidate { + Small(u8), + Large([u64; 1024]), +} + +// Tests that an enum which has a discriminant much higher than the variant does not get +// tformed. +#[repr(u32)] +pub enum InvalidIdxs { + A = 302, + Large([u64; 1024]), +} + +// Tests that an enum with too high of a discriminant index (not in bounds of usize) does not +// get tformed. +#[repr(u128)] +pub enum Truncatable { + A = 0, + B([u8; 1024]) = 1, + C([u8; 4096]) = 0x10000000000000001, +} + +// Tests that an enum with discriminants in random order still gets tformed correctly. +#[repr(u32)] +pub enum RandOrderDiscr { + A = 13, + B([u8; 1024]) = 5, + C = 7, +} + +// EMIT_MIR enum_opt.unin.EnumSizeOpt.diff +pub fn unin() { + let mut a = NoData::None; + a = NoData::Large([1; 1024]); +} + +// EMIT_MIR enum_opt.cand.EnumSizeOpt.diff +pub fn cand() { + let mut a = Candidate::Small(1); + a = Candidate::Large([1; 1024]); +} + +// EMIT_MIR enum_opt.invalid.EnumSizeOpt.diff +pub fn invalid() { + let mut a = InvalidIdxs::A; + a = InvalidIdxs::Large([0; 1024]); +} + +// EMIT_MIR enum_opt.trunc.EnumSizeOpt.diff +pub fn trunc() { + let mut a = Truncatable::A; + a = Truncatable::B([0; 1024]); + a = Truncatable::C([0; 4096]); +} + +pub fn rand_order() { + let mut a = RandOrderDiscr::A; + a = RandOrderDiscr::B([0; 1024]); + a = RandOrderDiscr::C; +} + +pub fn main() { + unin(); + cand(); + invalid(); + trunc(); + rand_order(); +} diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff new file mode 100644 index 0000000000000..650c6695f3f00 --- /dev/null +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff @@ -0,0 +1,46 @@ +- // MIR for `trunc` before EnumSizeOpt ++ // MIR for `trunc` after EnumSizeOpt + + fn trunc() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:61:16: 61:16 + let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:62:7: 62:12 + let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:32 + let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:63:22: 63:31 + let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 + let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:62:7: 62:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:62:7: 62:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 + _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:63:3: 63:32 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + Deinit(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + _1 = move _4; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:61:16: 65:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:65:1: 65:2 + return; // scope 0 at $DIR/enum_opt.rs:65:2: 65:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:61:1: 65:2 + } + } + diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff new file mode 100644 index 0000000000000..650c6695f3f00 --- /dev/null +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff @@ -0,0 +1,46 @@ +- // MIR for `trunc` before EnumSizeOpt ++ // MIR for `trunc` after EnumSizeOpt + + fn trunc() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:61:16: 61:16 + let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:62:7: 62:12 + let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:32 + let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:63:22: 63:31 + let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 + let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:62:7: 62:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:62:7: 62:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 + _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:63:3: 63:32 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + Deinit(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + _1 = move _4; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:61:16: 65:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:65:1: 65:2 + return; // scope 0 at $DIR/enum_opt.rs:65:2: 65:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:61:1: 65:2 + } + } + diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff new file mode 100644 index 0000000000000..c034c127ecad0 --- /dev/null +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff @@ -0,0 +1,54 @@ +- // MIR for `unin` before EnumSizeOpt ++ // MIR for `unin` after EnumSizeOpt + + fn unin() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:43:15: 43:15 + let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:44:7: 44:12 + let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:31 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:45:21: 45:30 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:44:7: 44:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:44:7: 44:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _4 = const [4_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:43:15: 46:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:46:1: 46:2 + return; // scope 0 at $DIR/enum_opt.rs:46:2: 46:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:43:1: 46:2 + } + } + diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff new file mode 100644 index 0000000000000..9389c0f12a048 --- /dev/null +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff @@ -0,0 +1,54 @@ +- // MIR for `unin` before EnumSizeOpt ++ // MIR for `unin` after EnumSizeOpt + + fn unin() -> () { + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:43:15: 43:15 + let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:44:7: 44:12 + let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:31 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:45:21: 45:30 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 + scope 1 { + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:44:7: 44:12 + } + + bb0: { + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:44:7: 44:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _4 = const [8_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:43:15: 46:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:46:1: 46:2 + return; // scope 0 at $DIR/enum_opt.rs:46:2: 46:2 + } + + bb1 (cleanup): { + resume; // scope 0 at $DIR/enum_opt.rs:43:1: 46:2 + } + } + From 610e1a1e05e7160d775f433855138f6063169c56 Mon Sep 17 00:00:00 2001 From: kadmin Date: Tue, 19 Apr 2022 15:39:49 +0000 Subject: [PATCH 304/501] Add tag for ignoring wasm --- .../enum_opt.cand.EnumSizeOpt.32bit.diff | 82 +++++++++---------- .../enum_opt.cand.EnumSizeOpt.64bit.diff | 82 +++++++++---------- .../enum_opt.invalid.EnumSizeOpt.32bit.diff | 42 +++++----- .../enum_opt.invalid.EnumSizeOpt.64bit.diff | 42 +++++----- tests/mir-opt/enum_opt.rs | 1 + .../enum_opt.trunc.EnumSizeOpt.32bit.diff | 64 +++++++-------- .../enum_opt.trunc.EnumSizeOpt.64bit.diff | 64 +++++++-------- .../enum_opt.unin.EnumSizeOpt.32bit.diff | 80 +++++++++--------- .../enum_opt.unin.EnumSizeOpt.64bit.diff | 80 +++++++++--------- 9 files changed, 269 insertions(+), 268 deletions(-) diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff index d9923ec7cba7d..859fddd65c1ae 100644 --- a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff @@ -2,54 +2,54 @@ + // MIR for `cand` after EnumSizeOpt fn cand() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:49:15: 49:15 - let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:50:7: 50:12 - let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:34 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:51:24: 51:33 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:50:15: 50:15 + let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:12 + let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:52:7: 52:34 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:52:24: 52:33 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:50:7: 50:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:51:7: 51:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:50:7: 50:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 - ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _4 = const [2_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:49:15: 52:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:52:1: 52:2 - return; // scope 0 at $DIR/enum_opt.rs:52:2: 52:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:51:7: 51:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 + ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _4 = const [2_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:50:15: 53:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:53:1: 53:2 + return; // scope 0 at $DIR/enum_opt.rs:53:2: 53:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:49:1: 52:2 + resume; // scope 0 at $DIR/enum_opt.rs:50:1: 53:2 } } diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff index e79d2f67a8398..c3325f2d81f6e 100644 --- a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff @@ -2,54 +2,54 @@ + // MIR for `cand` after EnumSizeOpt fn cand() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:49:15: 49:15 - let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:50:7: 50:12 - let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:34 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:51:24: 51:33 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:51:3: 51:34 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:50:15: 50:15 + let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:12 + let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:52:7: 52:34 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:52:24: 52:33 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:50:7: 50:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:51:7: 51:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:50:7: 50:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 - ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:50:15: 50:34 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:51:24: 51:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:51:7: 51:34 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _4 = const [2_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:51:3: 51:34 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:51:33: 51:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:49:15: 52:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:52:1: 52:2 - return; // scope 0 at $DIR/enum_opt.rs:52:2: 52:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:51:7: 51:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 + ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _4 = const [2_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:50:15: 53:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:53:1: 53:2 + return; // scope 0 at $DIR/enum_opt.rs:53:2: 53:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:49:1: 52:2 + resume; // scope 0 at $DIR/enum_opt.rs:50:1: 53:2 } } diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff index d8b6a79401518..bc16a780683af 100644 --- a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff @@ -2,34 +2,34 @@ + // MIR for `invalid` after EnumSizeOpt fn invalid() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:55:18: 55:18 - let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:56:7: 56:12 - let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:36 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:57:26: 57:35 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:56:18: 56:18 + let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:12 + let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:58:7: 58:36 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:58:26: 58:35 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:56:7: 56:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:57:7: 57:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:56:7: 56:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 - _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:57:3: 57:36 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:55:18: 58:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:58:1: 58:2 - return; // scope 0 at $DIR/enum_opt.rs:58:2: 58:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:57:7: 57:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 + _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:58:3: 58:36 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:56:18: 59:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:59:1: 59:2 + return; // scope 0 at $DIR/enum_opt.rs:59:2: 59:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:55:1: 58:2 + resume; // scope 0 at $DIR/enum_opt.rs:56:1: 59:2 } } diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff index d8b6a79401518..bc16a780683af 100644 --- a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff @@ -2,34 +2,34 @@ + // MIR for `invalid` after EnumSizeOpt fn invalid() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:55:18: 55:18 - let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:56:7: 56:12 - let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:36 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:57:26: 57:35 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:56:18: 56:18 + let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:12 + let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:58:7: 58:36 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:58:26: 58:35 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:56:7: 56:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:57:7: 57:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:56:7: 56:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:56:15: 56:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 - _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:57:26: 57:35 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:57:7: 57:36 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:57:3: 57:36 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:57:35: 57:36 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:55:18: 58:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:58:1: 58:2 - return; // scope 0 at $DIR/enum_opt.rs:58:2: 58:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:57:7: 57:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 + _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:58:3: 58:36 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:56:18: 59:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:59:1: 59:2 + return; // scope 0 at $DIR/enum_opt.rs:59:2: 59:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:55:1: 58:2 + resume; // scope 0 at $DIR/enum_opt.rs:56:1: 59:2 } } diff --git a/tests/mir-opt/enum_opt.rs b/tests/mir-opt/enum_opt.rs index bc72c93da09f3..65f77af0592ea 100644 --- a/tests/mir-opt/enum_opt.rs +++ b/tests/mir-opt/enum_opt.rs @@ -1,5 +1,6 @@ // EMIT_MIR_FOR_EACH_BIT_WIDTH // compile-flags: -Zunsound-mir-opts -Zmir-opt-level=3 +// ignore-wasm32-bare compiled with panic=abort by default #![feature(arbitrary_enum_discriminant, repr128)] // Tests that an enum with a variant with no data gets correctly transformed. diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff index 650c6695f3f00..654e385bfe785 100644 --- a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff @@ -2,45 +2,45 @@ + // MIR for `trunc` after EnumSizeOpt fn trunc() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:61:16: 61:16 - let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:62:7: 62:12 - let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:32 - let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:63:22: 63:31 - let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 - let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:62:16: 62:16 + let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:12 + let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 + let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 + let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:65:7: 65:32 + let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:65:22: 65:31 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:62:7: 62:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:63:7: 63:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:62:7: 62:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 - _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:63:3: 63:32 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 - StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - Deinit(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - _1 = move _4; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 - StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:61:16: 65:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:65:1: 65:2 - return; // scope 0 at $DIR/enum_opt.rs:65:2: 65:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:63:7: 63:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 + _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 + Deinit(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 + _1 = move _4; // scope 1 at $DIR/enum_opt.rs:65:3: 65:32 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:62:16: 66:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:66:1: 66:2 + return; // scope 0 at $DIR/enum_opt.rs:66:2: 66:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:61:1: 65:2 + resume; // scope 0 at $DIR/enum_opt.rs:62:1: 66:2 } } diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff index 650c6695f3f00..654e385bfe785 100644 --- a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff @@ -2,45 +2,45 @@ + // MIR for `trunc` after EnumSizeOpt fn trunc() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:61:16: 61:16 - let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:62:7: 62:12 - let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:32 - let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:63:22: 63:31 - let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 - let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:62:16: 62:16 + let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:12 + let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 + let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 + let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:65:7: 65:32 + let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:65:22: 65:31 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:62:7: 62:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:63:7: 63:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:62:7: 62:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:62:15: 62:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 - _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:63:22: 63:31 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:63:7: 63:32 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:63:3: 63:32 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:63:31: 63:32 - StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - Deinit(_4); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - _1 = move _4; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 - StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:61:16: 65:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:65:1: 65:2 - return; // scope 0 at $DIR/enum_opt.rs:65:2: 65:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:63:7: 63:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 + _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 + Deinit(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 + StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 + _1 = move _4; // scope 1 at $DIR/enum_opt.rs:65:3: 65:32 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:62:16: 66:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:66:1: 66:2 + return; // scope 0 at $DIR/enum_opt.rs:66:2: 66:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:61:1: 65:2 + resume; // scope 0 at $DIR/enum_opt.rs:62:1: 66:2 } } diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff index c034c127ecad0..bc72e507d5ab7 100644 --- a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff @@ -2,53 +2,53 @@ + // MIR for `unin` after EnumSizeOpt fn unin() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:43:15: 43:15 - let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:44:7: 44:12 - let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:31 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:45:21: 45:30 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:44:15: 44:15 + let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:12 + let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:46:7: 46:31 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:46:21: 46:30 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:44:7: 44:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:45:7: 45:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:44:7: 44:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _4 = const [4_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:43:15: 46:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:46:1: 46:2 - return; // scope 0 at $DIR/enum_opt.rs:46:2: 46:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:45:7: 45:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _4 = const [4_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:44:15: 47:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:47:1: 47:2 + return; // scope 0 at $DIR/enum_opt.rs:47:2: 47:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:43:1: 46:2 + resume; // scope 0 at $DIR/enum_opt.rs:44:1: 47:2 } } diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff index 9389c0f12a048..9001268dc2087 100644 --- a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff @@ -2,53 +2,53 @@ + // MIR for `unin` after EnumSizeOpt fn unin() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:43:15: 43:15 - let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:44:7: 44:12 - let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:31 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:45:21: 45:30 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:45:3: 45:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:44:15: 44:15 + let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:12 + let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:46:7: 46:31 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:46:21: 46:30 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:44:7: 44:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:45:7: 45:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:44:7: 44:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:44:15: 44:27 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:45:21: 45:30 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:45:7: 45:31 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _4 = const [8_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:45:3: 45:31 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:45:30: 45:31 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:43:15: 46:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:46:1: 46:2 - return; // scope 0 at $DIR/enum_opt.rs:46:2: 46:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:45:7: 45:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _4 = const [8_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:44:15: 47:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:47:1: 47:2 + return; // scope 0 at $DIR/enum_opt.rs:47:2: 47:2 } bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:43:1: 46:2 + resume; // scope 0 at $DIR/enum_opt.rs:44:1: 47:2 } } From 15f4eec7a986e6c9125ff3e0115d70aef6d5c711 Mon Sep 17 00:00:00 2001 From: kadmin Date: Fri, 24 Jun 2022 08:48:07 +0000 Subject: [PATCH 305/501] Leave FIXME for wasm layout difference. There is a distinction between running this on wasm and i686, even though they should be identical. This technically is not _incorrect_, it's just an unexpected difference, which is worth investigating, but not for correctness. --- .../rustc_mir_transform/src/large_enums.rs | 42 ++++++---- compiler/rustc_mir_transform/src/lib.rs | 2 +- .../enum_opt.cand.EnumSizeOpt.32bit.diff | 84 +++++++++---------- .../enum_opt.cand.EnumSizeOpt.64bit.diff | 84 +++++++++---------- .../enum_opt.invalid.EnumSizeOpt.32bit.diff | 44 +++++----- .../enum_opt.invalid.EnumSizeOpt.64bit.diff | 44 +++++----- tests/mir-opt/enum_opt.rs | 16 ++-- .../enum_opt.trunc.EnumSizeOpt.32bit.diff | 66 +++++++-------- .../enum_opt.trunc.EnumSizeOpt.64bit.diff | 66 +++++++-------- .../enum_opt.unin.EnumSizeOpt.32bit.diff | 82 +++++++++--------- .../enum_opt.unin.EnumSizeOpt.64bit.diff | 82 +++++++++--------- 11 files changed, 297 insertions(+), 315 deletions(-) diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs index 1919720de4996..3f8662ad6971f 100644 --- a/compiler/rustc_mir_transform/src/large_enums.rs +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -1,6 +1,6 @@ use crate::rustc_middle::ty::util::IntTypeExt; use crate::MirPass; -use rustc_data_structures::stable_map::FxHashMap; +use rustc_data_structures::fx::FxHashMap; use rustc_middle::mir::interpret::AllocId; use rustc_middle::mir::*; use rustc_middle::ty::{self, AdtDef, Const, ParamEnv, Ty, TyCtxt}; @@ -19,6 +19,10 @@ use rustc_target::abi::{HasDataLayout, Size, TagEncoding, Variants}; /// Instead of emitting moves of the large variant, /// Perform a memcpy instead. /// Based off of [this HackMD](https://hackmd.io/@ft4bxUsFT5CEUBmRKYHr7w/rJM8BBPzD). +/// +/// In summary, what this does is at runtime determine which enum variant is active, +/// and instead of copying all the bytes of the largest possible variant, +/// copy only the bytes for the currently active variant. pub struct EnumSizeOpt { pub(crate) discrepancy: u64, } @@ -26,7 +30,10 @@ pub struct EnumSizeOpt { impl<'tcx> MirPass<'tcx> for EnumSizeOpt { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let sess = tcx.sess; - if (!sess.opts.debugging_opts.unsound_mir_opts) || sess.mir_opt_level() < 3 { + // FIXME(julianknodt): one thing noticed while testing this mir-opt is that there is a + // different layout of large enums on wasm. It's not clear what is causing this layout + // difference, as it should be identical to i686 (32 bit). + if (!sess.opts.unstable_opts.unsound_mir_opts) || sess.mir_opt_level() < 3 { return; } self.optim(tcx, body); @@ -56,8 +63,8 @@ impl EnumSizeOpt { Variants::Multiple { variants, .. } if variants.len() <= 1 => return None, Variants::Multiple { variants, .. } => variants, }; - let min = variants.iter().map(|v| v.size()).min().unwrap(); - let max = variants.iter().map(|v| v.size()).max().unwrap(); + let min = variants.iter().map(|v| v.size).min().unwrap(); + let max = variants.iter().map(|v| v.size).max().unwrap(); if max.bytes() - min.bytes() < self.discrepancy { return None; } @@ -92,7 +99,7 @@ impl EnumSizeOpt { for (var_idx, layout) in variants.iter_enumerated() { let curr_idx = target_bytes * adt_def.discriminant_for_variant(tcx, var_idx).val as usize; - let sz = layout.size(); + let sz = layout.size; match ptr_sized_int { rustc_target::abi::Integer::I32 => { encode_store!(curr_idx, data_layout.endian, sz.bytes() as u32); @@ -115,8 +122,11 @@ impl EnumSizeOpt { let mut alloc_cache = FxHashMap::default(); let body_did = body.source.def_id(); let param_env = tcx.param_env(body_did); - let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); - for bb in bbs { + + let blocks = body.basic_blocks.as_mut(); + let local_decls = &mut body.local_decls; + + for bb in blocks { bb.expand_statements(|st| { if let StatementKind::Assign(box ( lhs, @@ -175,7 +185,7 @@ impl EnumSizeOpt { kind: StatementKind::Assign(box ( discr_cast_place, Rvalue::Cast( - CastKind::Misc, + CastKind::IntToInt, Operand::Copy(discr_place), tcx.types.usize, ), @@ -217,7 +227,7 @@ impl EnumSizeOpt { source_info, kind: StatementKind::Assign(box ( dst_cast_place, - Rvalue::Cast(CastKind::Misc, Operand::Copy(dst), dst_cast_ty), + Rvalue::Cast(CastKind::PtrToPtr, Operand::Copy(dst), dst_cast_ty), )), }; @@ -240,17 +250,19 @@ impl EnumSizeOpt { source_info, kind: StatementKind::Assign(box ( src_cast_place, - Rvalue::Cast(CastKind::Misc, Operand::Copy(src), src_cast_ty), + Rvalue::Cast(CastKind::PtrToPtr, Operand::Copy(src), src_cast_ty), )), }; let copy_bytes = Statement { source_info, - kind: StatementKind::CopyNonOverlapping(box CopyNonOverlapping { - src: Operand::Copy(src_cast_place), - dst: Operand::Copy(dst_cast_place), - count: Operand::Copy(size_place), - }), + kind: StatementKind::Intrinsic( + box NonDivergingIntrinsic::CopyNonOverlapping(CopyNonOverlapping { + src: Operand::Copy(src_cast_place), + dst: Operand::Copy(dst_cast_place), + count: Operand::Copy(size_place), + }), + ), }; let store_dead = Statement { diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 8cd268eb6ce13..be3652dd3e7bd 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -1,8 +1,8 @@ #![allow(rustc::potential_query_instability)] #![feature(box_patterns)] #![feature(drain_filter)] +#![feature(box_syntax)] #![feature(let_chains)] -#![feature(let_else)] #![feature(map_try_insert)] #![feature(min_specialization)] #![feature(never_type)] diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff index 859fddd65c1ae..e0ba46c15f4a9 100644 --- a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff @@ -2,54 +2,50 @@ + // MIR for `cand` after EnumSizeOpt fn cand() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:50:15: 50:15 - let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:12 - let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:52:7: 52:34 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:52:24: 52:33 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:51:7: 51:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:51:7: 51:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 - ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _4 = const [2_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:50:15: 53:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:53:1: 53:2 - return; // scope 0 at $DIR/enum_opt.rs:53:2: 53:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:50:1: 53:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _4 = const [2_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 + return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 } } diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff index c3325f2d81f6e..67439dba9c947 100644 --- a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff @@ -2,54 +2,50 @@ + // MIR for `cand` after EnumSizeOpt fn cand() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:50:15: 50:15 - let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:51:7: 51:12 - let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:52:7: 52:34 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:52:24: 52:33 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:52:3: 52:34 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _8: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:51:7: 51:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:51:7: 51:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 - ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:51:15: 51:34 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:52:24: 52:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:52:7: 52:34 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _4 = const [2_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:52:3: 52:34 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:52:33: 52:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:50:15: 53:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:53:1: 53:2 - return; // scope 0 at $DIR/enum_opt.rs:53:2: 53:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:50:1: 53:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _4 = const [2_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 + return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 } } diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff index bc16a780683af..db2efa195a369 100644 --- a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff @@ -2,34 +2,30 @@ + // MIR for `invalid` after EnumSizeOpt fn invalid() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:56:18: 56:18 - let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:12 - let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:58:7: 58:36 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:58:26: 58:35 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:18 + let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:36 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:26: +2:35 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:57:7: 57:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:57:7: 57:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 - _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:58:3: 58:36 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:56:18: 59:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:59:1: 59:2 - return; // scope 0 at $DIR/enum_opt.rs:59:2: 59:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:56:1: 59:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 + _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:36 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:18: +3:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 + return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 } } diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff index bc16a780683af..db2efa195a369 100644 --- a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff @@ -2,34 +2,30 @@ + // MIR for `invalid` after EnumSizeOpt fn invalid() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:56:18: 56:18 - let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:57:7: 57:12 - let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:58:7: 58:36 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:58:26: 58:35 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:18 + let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:36 + let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:26: +2:35 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:57:7: 57:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:57:7: 57:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:57:15: 57:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 - _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:58:26: 58:35 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:58:7: 58:36 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:58:3: 58:36 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:58:35: 58:36 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:56:18: 59:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:59:1: 59:2 - return; // scope 0 at $DIR/enum_opt.rs:59:2: 59:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:56:1: 59:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 + _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:36 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:18: +3:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 + return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 } } diff --git a/tests/mir-opt/enum_opt.rs b/tests/mir-opt/enum_opt.rs index 65f77af0592ea..6876c56cfa4ed 100644 --- a/tests/mir-opt/enum_opt.rs +++ b/tests/mir-opt/enum_opt.rs @@ -1,12 +1,14 @@ // EMIT_MIR_FOR_EACH_BIT_WIDTH // compile-flags: -Zunsound-mir-opts -Zmir-opt-level=3 -// ignore-wasm32-bare compiled with panic=abort by default +// ignore-wasm32 + #![feature(arbitrary_enum_discriminant, repr128)] // Tests that an enum with a variant with no data gets correctly transformed. +#[repr(C)] pub enum NoData { + Large([u8; 8192]), None, - Large([u64; 1024]), } // Tests that an enum with a variant with data that is a valid candidate gets transformed. @@ -26,7 +28,7 @@ pub enum InvalidIdxs { // Tests that an enum with too high of a discriminant index (not in bounds of usize) does not // get tformed. #[repr(u128)] -pub enum Truncatable { +pub enum NotTrunctable { A = 0, B([u8; 1024]) = 1, C([u8; 4096]) = 0x10000000000000001, @@ -43,7 +45,7 @@ pub enum RandOrderDiscr { // EMIT_MIR enum_opt.unin.EnumSizeOpt.diff pub fn unin() { let mut a = NoData::None; - a = NoData::Large([1; 1024]); + a = NoData::Large([1; 8192]); } // EMIT_MIR enum_opt.cand.EnumSizeOpt.diff @@ -60,9 +62,9 @@ pub fn invalid() { // EMIT_MIR enum_opt.trunc.EnumSizeOpt.diff pub fn trunc() { - let mut a = Truncatable::A; - a = Truncatable::B([0; 1024]); - a = Truncatable::C([0; 4096]); + let mut a = NotTrunctable::A; + a = NotTrunctable::B([0; 1024]); + a = NotTrunctable::C([0; 4096]); } pub fn rand_order() { diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff index 654e385bfe785..b9d6765d8c1ca 100644 --- a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff @@ -2,45 +2,41 @@ + // MIR for `trunc` after EnumSizeOpt fn trunc() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:62:16: 62:16 - let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:12 - let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 - let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 - let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:65:7: 65:32 - let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:65:22: 65:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:16: +0:16 + let mut _1: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 + let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 + let mut _4: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+3:7: +3:34 + let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:+3:24: +3:33 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:63:7: 63:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:63:7: 63:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 - _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 - Deinit(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 - _1 = move _4; // scope 1 at $DIR/enum_opt.rs:65:3: 65:32 - StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:62:16: 66:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:66:1: 66:2 - return; // scope 0 at $DIR/enum_opt.rs:66:2: 66:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:62:1: 66:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 + _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 + Deinit(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 + _1 = move _4; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:34 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:16: +4:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff index 654e385bfe785..b9d6765d8c1ca 100644 --- a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff @@ -2,45 +2,41 @@ + // MIR for `trunc` after EnumSizeOpt fn trunc() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:62:16: 62:16 - let mut _1: Truncatable; // in scope 0 at $DIR/enum_opt.rs:63:7: 63:12 - let mut _2: Truncatable; // in scope 0 at $DIR/enum_opt.rs:64:7: 64:32 - let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:64:22: 64:31 - let mut _4: Truncatable; // in scope 0 at $DIR/enum_opt.rs:65:7: 65:32 - let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:65:22: 65:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:16: +0:16 + let mut _1: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 + let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 + let mut _4: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+3:7: +3:34 + let mut _5: [u8; 4096]; // in scope 0 at $DIR/enum_opt.rs:+3:24: +3:33 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:63:7: 63:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:63:7: 63:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:63:15: 63:29 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:64:22: 64:31 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:64:7: 64:32 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:64:3: 64:32 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:64:31: 64:32 - StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 - _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:65:22: 65:31 - Deinit(_4); // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:65:7: 65:32 - StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 - _1 = move _4; // scope 1 at $DIR/enum_opt.rs:65:3: 65:32 - StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:65:31: 65:32 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:62:16: 66:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:66:1: 66:2 - return; // scope 0 at $DIR/enum_opt.rs:66:2: 66:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:62:1: 66:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 + discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 + _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 + _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 + Deinit(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 + _1 = move _4; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:34 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:16: +4:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff index bc72e507d5ab7..168677b173d53 100644 --- a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff @@ -2,53 +2,49 @@ + // MIR for `unin` after EnumSizeOpt fn unin() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:44:15: 44:15 - let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:12 - let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:46:7: 46:31 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:46:21: 46:30 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:31 + let mut _3: [u8; 8192]; // in scope 0 at $DIR/enum_opt.rs:+2:21: +2:30 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:45:7: 45:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:45:7: 45:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _4 = const [4_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:44:15: 47:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:47:1: 47:2 - return; // scope 0 at $DIR/enum_opt.rs:47:2: 47:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:44:1: 47:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 + discriminant(_1) = 1; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 + _3 = [const 1_u8; 8192]; // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + ((_2 as Large).0: [u8; 8192]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + discriminant(_2) = 0; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _4 = const [8196_usize, 4_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 + return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 } } diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff index 9001268dc2087..168677b173d53 100644 --- a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff @@ -2,53 +2,49 @@ + // MIR for `unin` after EnumSizeOpt fn unin() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:44:15: 44:15 - let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:45:7: 45:12 - let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:46:7: 46:31 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:46:21: 46:30 -+ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 -+ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:46:3: 46:31 + let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:31 + let mut _3: [u8; 8192]; // in scope 0 at $DIR/enum_opt.rs:+2:21: +2:30 ++ let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _7: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _8: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 scope 1 { - debug a => _1; // in scope 1 at $DIR/enum_opt.rs:45:7: 45:12 + debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { - StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:45:7: 45:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:45:15: 45:27 - StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:46:21: 46:30 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:46:7: 46:31 - StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 -- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _4 = const [8_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _6 = _5 as usize (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _9 = _8 as *mut u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ _11 = _10 as *const u8 (Misc); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ copy_nonoverlapping(src=_11, dst=_9, count=_7); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 -+ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:46:3: 46:31 - StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:46:30: 46:31 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:44:15: 47:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:47:1: 47:2 - return; // scope 0 at $DIR/enum_opt.rs:47:2: 47:2 - } - - bb1 (cleanup): { - resume; // scope 0 at $DIR/enum_opt.rs:44:1: 47:2 + StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 + Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 + discriminant(_1) = 1; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 + StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 + _3 = [const 1_u8; 8192]; // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 + Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + ((_2 as Large).0: [u8; 8192]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + discriminant(_2) = 0; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 +- _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _4 = const [8196_usize, 4_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _8 = &raw mut _1; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 + _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 + return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 } } From b7c38981ff38936841fdd2bdd81d1594b4f88b09 Mon Sep 17 00:00:00 2001 From: Kartavya Vashishtha Date: Tue, 7 Feb 2023 15:39:19 +0530 Subject: [PATCH 306/501] bless new output --- tests/ui/suspicious_to_owned.stderr | 32 ++++++++++++++++++----------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/tests/ui/suspicious_to_owned.stderr b/tests/ui/suspicious_to_owned.stderr index 2d8bbf0d6f11a..c4ec7aa88a2a3 100644 --- a/tests/ui/suspicious_to_owned.stderr +++ b/tests/ui/suspicious_to_owned.stderr @@ -5,12 +5,14 @@ LL | let _ = cow.to_owned(); | ^^^^^^^^^^^^^^ | = note: `-D clippy::suspicious-to-owned` implied by `-D warnings` -help: depending on intent, either make the Cow an Owned variant or clone the Cow itself +help: depending on intent, either make the Cow an Owned variant | -LL | let _ = cow.clone(); - | ~~~~~~~~~~~ LL | let _ = cow.into_owned(); | ~~~~~~~~~~~~~~~~ +help: or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ error: this `to_owned` call clones the Cow<'_, [char; 3]> itself and does not cause the Cow<'_, [char; 3]> contents to become owned --> $DIR/suspicious_to_owned.rs:26:13 @@ -18,12 +20,14 @@ error: this `to_owned` call clones the Cow<'_, [char; 3]> itself and does not ca LL | let _ = cow.to_owned(); | ^^^^^^^^^^^^^^ | -help: depending on intent, either make the Cow an Owned variant or clone the Cow itself +help: depending on intent, either make the Cow an Owned variant | -LL | let _ = cow.clone(); - | ~~~~~~~~~~~ LL | let _ = cow.into_owned(); | ~~~~~~~~~~~~~~~~ +help: or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ error: this `to_owned` call clones the Cow<'_, Vec> itself and does not cause the Cow<'_, Vec> contents to become owned --> $DIR/suspicious_to_owned.rs:36:13 @@ -31,12 +35,14 @@ error: this `to_owned` call clones the Cow<'_, Vec> itself and does not ca LL | let _ = cow.to_owned(); | ^^^^^^^^^^^^^^ | -help: depending on intent, either make the Cow an Owned variant or clone the Cow itself +help: depending on intent, either make the Cow an Owned variant | -LL | let _ = cow.clone(); - | ~~~~~~~~~~~ LL | let _ = cow.into_owned(); | ~~~~~~~~~~~~~~~~ +help: or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ error: this `to_owned` call clones the Cow<'_, str> itself and does not cause the Cow<'_, str> contents to become owned --> $DIR/suspicious_to_owned.rs:46:13 @@ -44,12 +50,14 @@ error: this `to_owned` call clones the Cow<'_, str> itself and does not cause th LL | let _ = cow.to_owned(); | ^^^^^^^^^^^^^^ | -help: depending on intent, either make the Cow an Owned variant or clone the Cow itself +help: depending on intent, either make the Cow an Owned variant | -LL | let _ = cow.clone(); - | ~~~~~~~~~~~ LL | let _ = cow.into_owned(); | ~~~~~~~~~~~~~~~~ +help: or clone the Cow itself + | +LL | let _ = cow.clone(); + | ~~~~~~~~~~~ error: implicitly cloning a `String` by calling `to_owned` on its dereferenced type --> $DIR/suspicious_to_owned.rs:60:13 From c6e7917d6ed2ae36534b064a51697b54f497e02e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 7 Feb 2023 15:21:37 +0100 Subject: [PATCH 307/501] Fix up token_tree_to_syntax_node float split handling --- crates/hir-def/src/item_tree.rs | 5 ++- .../src/macro_expansion_tests/proc_macros.rs | 7 ++-- crates/mbe/src/syntax_bridge.rs | 37 ++++++++++++++++++- crates/mbe/src/tt_iter.rs | 35 ++++++++++++++++-- crates/parser/src/lib.rs | 4 +- 5 files changed, 76 insertions(+), 12 deletions(-) diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index 3e1f7d44460f1..19d01630ef088 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -111,7 +111,8 @@ impl ItemTree { Some(node) => node, None => return Default::default(), }; - if never!(syntax.kind() == SyntaxKind::ERROR) { + if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax) + { // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic return Default::default(); } @@ -133,7 +134,7 @@ impl ItemTree { ctx.lower_macro_stmts(stmts) }, _ => { - panic!("cannot create item tree from {syntax:?} {syntax}"); + panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}"); }, } }; diff --git a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index 118c14ed843fe..822bdcc122dc6 100644 --- a/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -104,7 +104,7 @@ macro_rules! id { $($t)* }; } -id /*+errors*/! { +id! { #[proc_macros::identity] impl Foo for WrapBj { async fn foo(&self) { @@ -113,18 +113,17 @@ id /*+errors*/! { } } "#, - expect![[r##" + expect![[r#" macro_rules! id { ($($t:tt)*) => { $($t)* }; } -/* parse error: expected SEMICOLON */ #[proc_macros::identity] impl Foo for WrapBj { async fn foo(&self ) { self .0.id().await ; } } -"##]], +"#]], ); } diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 7fe4fcfc68e42..8b9a3bca021de 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -95,7 +95,7 @@ pub fn token_tree_to_syntax_node( parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => { tree_sink.token(kind, n_raw_tokens) } - parser::Step::FloatSplit { .. } => tree_sink.token(SyntaxKind::FLOAT_NUMBER, 1), + parser::Step::FloatSplit { has_pseudo_dot } => tree_sink.float_split(has_pseudo_dot), parser::Step::Enter { kind } => tree_sink.start_node(kind), parser::Step::Exit => tree_sink.finish_node(), parser::Step::Error { msg } => tree_sink.error(msg.to_string()), @@ -797,6 +797,41 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { } impl<'a> TtTreeSink<'a> { + fn float_split(&mut self, has_pseudo_dot: bool) { + let (text, _span) = match self.cursor.token_tree() { + Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => { + (lit.text.as_str(), lit.span) + } + _ => unreachable!(), + }; + match text.split_once('.') { + Some((left, right)) => { + assert!(!left.is_empty()); + self.inner.start_node(SyntaxKind::NAME_REF); + self.inner.token(SyntaxKind::INT_NUMBER, left); + self.inner.finish_node(); + + // here we move the exit up, the original exit has been deleted in process + self.inner.finish_node(); + + self.inner.token(SyntaxKind::DOT, "."); + + if has_pseudo_dot { + assert!(right.is_empty()); + } else { + self.inner.start_node(SyntaxKind::NAME_REF); + self.inner.token(SyntaxKind::INT_NUMBER, right); + self.inner.finish_node(); + + // the parser creates an unbalanced start node, we are required to close it here + self.inner.finish_node(); + } + } + None => unreachable!(), + } + self.cursor = self.cursor.bump(); + } + fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { if kind == LIFETIME_IDENT { n_tokens = 2; diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index b38243caeefd0..c05a2ca15023d 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -170,11 +170,38 @@ impl<'a> TtIter<'a> { let mut res = vec![]; if cursor.is_root() { - while curr != cursor { - if let Some(token) = curr.token_tree() { - res.push(token.cloned()); + if float_splits.is_empty() { + while curr != cursor { + if let Some(token) = curr.token_tree() { + res.push(token.cloned()); + } + curr = curr.bump(); + } + } else { + // let mut float_splits = float_splits.into_iter().peekable(); + // while let Some(tt) = curr.token_tree() { + // let mut tt = tt.cloned(); + // let mut tt_mut_ref = &mut tt; + // if let Some(fs) = float_splits.peek() { + // loop { + // curr = curr.bump_subtree(); + // if curr == *fs { + // float_splits.next(); + // } + // if curr.is_root() { + // break; + // } + // } + // } + // res.push(tt); + // } + + while curr != cursor { + if let Some(token) = curr.token_tree() { + res.push(token.cloned()); + } + curr = curr.bump(); } - curr = curr.bump(); } } self.inner = self.inner.as_slice()[res.len()..].iter(); diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index f20d32d6cf877..6c72b5994b58c 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -102,10 +102,12 @@ impl TopEntryPoint { match step { Step::Enter { .. } => depth += 1, Step::Exit => depth -= 1, - Step::FloatSplit { .. } | Step::Token { .. } | Step::Error { .. } => (), + Step::FloatSplit { .. } => depth -= 1, + Step::Token { .. } | Step::Error { .. } => (), } } assert!(!first, "no tree at all"); + assert_eq!(depth, 0, "unbalanced tree"); } res From 7d53619064ab7045c383644cb445052d2a3d46db Mon Sep 17 00:00:00 2001 From: bjorn3 <17426603+bjorn3@users.noreply.github.com> Date: Mon, 6 Feb 2023 19:27:27 +0100 Subject: [PATCH 308/501] Force the allocator to be looked up from the perspective of the rustc binary Fixes #1303 --- .github/workflows/main.yml | 5 +++-- src/compiler_builtins.rs | 38 +++++++++++++++++++++++++++++++++++--- 2 files changed, 38 insertions(+), 5 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9d67886ba8698..9d3ed3ac5d0c3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,7 +34,7 @@ jobs: fail-fast: false matrix: include: - - os: ubuntu-20.04 # FIXME switch to ubuntu-22.04 once #1303 is fixed + - os: ubuntu-latest env: TARGET_TRIPLE: x86_64-unknown-linux-gnu - os: macos-latest @@ -226,7 +226,8 @@ jobs: fail-fast: false matrix: include: - - os: ubuntu-20.04 # FIXME switch to ubuntu-22.04 once #1303 is fixed + # FIXME update at some point in the future once most distros use a newer glibc + - os: ubuntu-20.04 env: TARGET_TRIPLE: x86_64-unknown-linux-gnu - os: macos-latest diff --git a/src/compiler_builtins.rs b/src/compiler_builtins.rs index c6a247cf59eed..8a53baa763a7d 100644 --- a/src/compiler_builtins.rs +++ b/src/compiler_builtins.rs @@ -1,14 +1,33 @@ +#[cfg(all(unix, feature = "jit"))] +use std::ffi::c_int; +#[cfg(feature = "jit")] +use std::ffi::c_void; + +// FIXME replace with core::ffi::c_size_t once stablized +#[allow(non_camel_case_types)] +#[cfg(feature = "jit")] +type size_t = usize; + macro_rules! builtin_functions { - ($register:ident; $(fn $name:ident($($arg_name:ident: $arg_ty:ty),*) -> $ret_ty:ty;)*) => { + ( + $register:ident; + $( + $(#[$attr:meta])? + fn $name:ident($($arg_name:ident: $arg_ty:ty),*) -> $ret_ty:ty; + )* + ) => { #[cfg(feature = "jit")] #[allow(improper_ctypes)] extern "C" { - $(fn $name($($arg_name: $arg_ty),*) -> $ret_ty;)* + $( + $(#[$attr])? + fn $name($($arg_name: $arg_ty),*) -> $ret_ty; + )* } #[cfg(feature = "jit")] pub(crate) fn $register(builder: &mut cranelift_jit::JITBuilder) { - for (name, val) in [$((stringify!($name), $name as *const u8)),*] { + for (name, val) in [$($(#[$attr])? (stringify!($name), $name as *const u8)),*] { builder.symbol(name, val); } } @@ -40,4 +59,17 @@ builtin_functions! { fn __fixdfti(f: f64) -> i128; fn __fixunssfti(f: f32) -> u128; fn __fixunsdfti(f: f64) -> u128; + + // allocator + // NOTE: These need to be mentioned here despite not being part of compiler_builtins because + // newer glibc resolve dlsym("malloc") to libc.so despite the override in the rustc binary to + // use jemalloc. Libraries opened with dlopen still get the jemalloc version, causing multiple + // allocators to be mixed, resulting in a crash. + fn calloc(nobj: size_t, size: size_t) -> *mut c_void; + #[cfg(unix)] + fn posix_memalign(memptr: *mut *mut c_void, align: size_t, size: size_t) -> c_int; + fn malloc(size: size_t) -> *mut c_void; + fn realloc(p: *mut c_void, size: size_t) -> *mut c_void; + fn free(p: *mut c_void) -> (); + } From f6539b139e185c5bb08de5b8ff0275b47c70df43 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 7 Feb 2023 15:31:51 +0100 Subject: [PATCH 309/501] fix depth check for float split step --- crates/parser/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index 6c72b5994b58c..9b895ff3ca63d 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -102,7 +102,7 @@ impl TopEntryPoint { match step { Step::Enter { .. } => depth += 1, Step::Exit => depth -= 1, - Step::FloatSplit { .. } => depth -= 1, + Step::FloatSplit { has_pseudo_dot } => depth -= 1 + !has_pseudo_dot as usize, Step::Token { .. } | Step::Error { .. } => (), } } From a7df61f1d35d05100722af1ec90bccd68dd8ad9c Mon Sep 17 00:00:00 2001 From: Duong Quoc Khanh Date: Tue, 7 Feb 2023 23:55:44 +0900 Subject: [PATCH 310/501] Add postfix completion for `unsafe`. Wrap receiver_text with { } if it's not a BlockExpr. --- .../ide-completion/src/completions/keyword.rs | 3 + .../ide-completion/src/completions/postfix.rs | 112 +++++++++++------- crates/ide-completion/src/render.rs | 2 + .../ide-completion/src/tests/proc_macros.rs | 84 ++++++------- 4 files changed, 118 insertions(+), 83 deletions(-) diff --git a/crates/ide-completion/src/completions/keyword.rs b/crates/ide-completion/src/completions/keyword.rs index 1d03c8cc5ca6d..b9ab2afca2b59 100644 --- a/crates/ide-completion/src/completions/keyword.rs +++ b/crates/ide-completion/src/completions/keyword.rs @@ -86,6 +86,7 @@ fn foo(a: A) { a.$0 } sn match match expr {} sn ref &expr sn refm &mut expr + sn unsafe unsafe {} "#]], ); @@ -110,6 +111,7 @@ fn foo() { sn match match expr {} sn ref &expr sn refm &mut expr + sn unsafe unsafe {} "#]], ); } @@ -136,6 +138,7 @@ fn foo(a: A) { a.$0 } sn match match expr {} sn ref &expr sn refm &mut expr + sn unsafe unsafe {} "#]], ); } diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index f4f37d77d81f3..19df8e2b20679 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -123,6 +123,12 @@ pub(crate) fn complete_postfix( postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc); postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc); + let unsafe_completion_string = match dot_receiver { + ast::Expr::BlockExpr(_) => format!("unsafe {receiver_text}"), + _ => format!("unsafe {{ {receiver_text} }}"), + }; + postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc); + // The rest of the postfix completions create an expression that moves an argument, // so it's better to consider references now to avoid breaking the compilation @@ -329,18 +335,19 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn if if expr {} - sn let let - sn letm let mut - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn if if expr {} + sn let let + sn letm let mut + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } @@ -359,16 +366,17 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn if if expr {} - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn if if expr {} + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } @@ -383,15 +391,16 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} "#]], ) } @@ -406,18 +415,19 @@ fn main() { } "#, expect![[r#" - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn if if expr {} - sn let let - sn letm let mut - sn match match expr {} - sn not !expr - sn ref &expr - sn refm &mut expr - sn while while expr {} + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn if if expr {} + sn let let + sn letm let mut + sn match match expr {} + sn not !expr + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} + sn while while expr {} "#]], ); } @@ -517,6 +527,22 @@ fn main() { ) } + #[test] + fn postfix_completion_for_unsafe() { + check_edit("unsafe", r#"fn main() { foo.$0 }"#, r#"fn main() { unsafe { foo } }"#); + check_edit("unsafe", r#"fn main() { { foo }.$0 }"#, r#"fn main() { unsafe { foo } }"#); + check_edit( + "unsafe", + r#"fn main() { if x { foo }.$0 }"#, + r#"fn main() { unsafe { if x { foo } } }"#, + ); + check_edit( + "unsafe", + r#"fn main() { loop { foo }.$0 }"#, + r#"fn main() { unsafe { loop { foo } } }"#, + ); + } + #[test] fn custom_postfix_completion() { let config = CompletionConfig { diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index e48d1aecd04fa..d6476c10258ec 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -1691,6 +1691,7 @@ fn main() { sn while [] sn ref [] sn refm [] + sn unsafe [] sn match [] sn box [] sn dbg [] @@ -1718,6 +1719,7 @@ fn main() { me f() [] sn ref [] sn refm [] + sn unsafe [] sn match [] sn box [] sn dbg [] diff --git a/crates/ide-completion/src/tests/proc_macros.rs b/crates/ide-completion/src/tests/proc_macros.rs index fec149e56a9e2..92ea4d15b8512 100644 --- a/crates/ide-completion/src/tests/proc_macros.rs +++ b/crates/ide-completion/src/tests/proc_macros.rs @@ -24,16 +24,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} "#]], ) } @@ -54,16 +55,17 @@ fn main() { } "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} "#]], ) } @@ -86,16 +88,17 @@ impl Foo { fn main() {} "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} "#]], ) } @@ -118,16 +121,17 @@ impl Foo { fn main() {} "#, expect![[r#" - me foo() fn(&self) - sn box Box::new(expr) - sn call function(expr) - sn dbg dbg!(expr) - sn dbgr dbg!(&expr) - sn let let - sn letm let mut - sn match match expr {} - sn ref &expr - sn refm &mut expr + me foo() fn(&self) + sn box Box::new(expr) + sn call function(expr) + sn dbg dbg!(expr) + sn dbgr dbg!(&expr) + sn let let + sn letm let mut + sn match match expr {} + sn ref &expr + sn refm &mut expr + sn unsafe unsafe {} "#]], ) } From e59487de38b1be5b06b92eb4a9d30c0adb32d9db Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 7 Feb 2023 17:12:24 +0100 Subject: [PATCH 311/501] Add tests for float access macro call inputs --- .../hir-def/src/macro_expansion_tests/mbe.rs | 35 ++++++++++++++++ crates/mbe/src/syntax_bridge.rs | 2 +- crates/mbe/src/to_parser_input.rs | 4 ++ crates/mbe/src/tt_iter.rs | 41 ++++--------------- crates/tt/src/buffer.rs | 13 +++++- 5 files changed, 58 insertions(+), 37 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs index 2d5f2a692e5da..49bbc64bff180 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -97,6 +97,41 @@ fn#19 main#20(#21)#21 {#22 "##]], ); } +#[test] +fn float_field_acces_macro_input() { + check( + r#" +macro_rules! foo { + ($expr:expr) => { + fn foo() { + $expr; + } + }; +} +foo!(x .0.1); +foo!(x .2. 3); +foo!(x .4 .5); +"#, + expect![[r#" +macro_rules! foo { + ($expr:expr) => { + fn foo() { + $expr; + } + }; +} +fn foo() { + (x.0.1); +} +fn foo() { + (x.2.3); +} +fn foo() { + (x.4.5); +} +"#]], + ); +} #[test] fn mbe_smoke_test() { diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 8b9a3bca021de..a4e3efaeb5210 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -817,7 +817,7 @@ impl<'a> TtTreeSink<'a> { self.inner.token(SyntaxKind::DOT, "."); if has_pseudo_dot { - assert!(right.is_empty()); + assert!(right.is_empty(), "{left}.{right}"); } else { self.inner.start_node(SyntaxKind::NAME_REF); self.inner.token(SyntaxKind::INT_NUMBER, right); diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index d4c19b3ab8cd1..6d20998bb4df1 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -45,6 +45,10 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &lit)); res.push(kind); + + if kind == FLOAT_NUMBER && !inner_text.ends_with('.') { + res.was_joint(); + } } tt::Leaf::Ident(ident) => match ident.text.as_ref() { "_" => res.push(T![_]), diff --git a/crates/mbe/src/tt_iter.rs b/crates/mbe/src/tt_iter.rs index c05a2ca15023d..f744481f3aecb 100644 --- a/crates/mbe/src/tt_iter.rs +++ b/crates/mbe/src/tt_iter.rs @@ -140,7 +140,6 @@ impl<'a> TtIter<'a> { let mut cursor = buffer.begin(); let mut error = false; - let mut float_splits = vec![]; for step in tree_traversal.iter() { match step { parser::Step::Token { kind, mut n_input_tokens } => { @@ -152,7 +151,8 @@ impl<'a> TtIter<'a> { } } parser::Step::FloatSplit { .. } => { - float_splits.push(cursor); + // FIXME: We need to split the tree properly here, but mutating the token trees + // in the buffer is somewhat tricky to pull off. cursor = cursor.bump_subtree(); } parser::Step::Enter { .. } | parser::Step::Exit => (), @@ -170,40 +170,13 @@ impl<'a> TtIter<'a> { let mut res = vec![]; if cursor.is_root() { - if float_splits.is_empty() { - while curr != cursor { - if let Some(token) = curr.token_tree() { - res.push(token.cloned()); - } - curr = curr.bump(); - } - } else { - // let mut float_splits = float_splits.into_iter().peekable(); - // while let Some(tt) = curr.token_tree() { - // let mut tt = tt.cloned(); - // let mut tt_mut_ref = &mut tt; - // if let Some(fs) = float_splits.peek() { - // loop { - // curr = curr.bump_subtree(); - // if curr == *fs { - // float_splits.next(); - // } - // if curr.is_root() { - // break; - // } - // } - // } - // res.push(tt); - // } - - while curr != cursor { - if let Some(token) = curr.token_tree() { - res.push(token.cloned()); - } - curr = curr.bump(); - } + while curr != cursor { + let Some(token) = curr.token_tree() else { break }; + res.push(token.cloned()); + curr = curr.bump(); } } + self.inner = self.inner.as_slice()[res.len()..].iter(); let res = match res.len() { 0 | 1 => res.pop(), diff --git a/crates/tt/src/buffer.rs b/crates/tt/src/buffer.rs index c4b455e3f138a..0615a3763dfa1 100644 --- a/crates/tt/src/buffer.rs +++ b/crates/tt/src/buffer.rs @@ -7,7 +7,12 @@ use crate::{Leaf, Subtree, TokenTree}; struct EntryId(usize); #[derive(Copy, Clone, Debug, Eq, PartialEq)] -struct EntryPtr(EntryId, usize); +struct EntryPtr( + /// The index of the buffer containing the entry. + EntryId, + /// The index of the entry within the buffer. + usize, +); /// Internal type which is used instead of `TokenTree` to represent a token tree /// within a `TokenBuffer`. @@ -229,7 +234,11 @@ impl<'a, Span> Cursor<'a, Span> { Some(&Entry::Subtree(_, _, entry_id)) => { Cursor::create(self.buffer, EntryPtr(entry_id, 0)) } - _ => self.bump(), + Some(Entry::End(exit)) => match exit { + Some(exit) => Cursor::create(self.buffer, *exit), + None => self, + }, + _ => Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1)), } } From 27cd509558a0fd9b47d267e445097363eb9db8ff Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 7 Feb 2023 17:41:16 +0100 Subject: [PATCH 312/501] fix jointess for floats not being set properly --- crates/parser/src/shortcuts.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/crates/parser/src/shortcuts.rs b/crates/parser/src/shortcuts.rs index 18a6f838face5..21939c349435d 100644 --- a/crates/parser/src/shortcuts.rs +++ b/crates/parser/src/shortcuts.rs @@ -43,18 +43,16 @@ impl<'a> LexedStr<'a> { res.was_joint(); } res.push(kind); - } - if kind == SyntaxKind::FLOAT_NUMBER { // we set jointness for floating point numbers as a hack to inform the // parser about whether we have a `0.` or `0.1` style float - if self.text(i).split_once('.').map_or(false, |(_, it)| it.is_empty()) { - was_joint = false; - } else { - was_joint = true; + if kind == SyntaxKind::FLOAT_NUMBER { + if !self.text(i).split_once('.').map_or(true, |(_, it)| it.is_empty()) { + res.was_joint(); + } } - } else { - was_joint = true; } + + was_joint = true; } } res @@ -202,7 +200,7 @@ impl Builder<'_, '_> { (self.sink)(StrStep::Token { kind: SyntaxKind::DOT, text: "." }); if has_pseudo_dot { - assert!(right.is_empty()); + assert!(right.is_empty(), "{left}.{right}"); self.state = State::Normal; } else { (self.sink)(StrStep::Enter { kind: SyntaxKind::NAME_REF }); From 370ba94ca2c610fb91288c8198ccd02db0eb54f5 Mon Sep 17 00:00:00 2001 From: Duong Quoc Khanh Date: Wed, 8 Feb 2023 01:50:09 +0900 Subject: [PATCH 313/501] Add more tests. Add tests for control flows and `let`. --- .../ide-completion/src/completions/postfix.rs | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index 19df8e2b20679..c68d106d8356b 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -541,6 +541,26 @@ fn main() { r#"fn main() { loop { foo }.$0 }"#, r#"fn main() { unsafe { loop { foo } } }"#, ); + check_edit( + "unsafe", + r#"fn main() { if true {}.$0 }"#, + r#"fn main() { unsafe { if true {} } }"#, + ); + check_edit( + "unsafe", + r#"fn main() { while true {}.$0 }"#, + r#"fn main() { unsafe { while true {} } }"#, + ); + check_edit( + "unsafe", + r#"fn main() { for i in 0..10 {}.$0 }"#, + r#"fn main() { unsafe { for i in 0..10 {} } }"#, + ); + check_edit( + "unsafe", + r#"fn main() { let x = if true {1} else {2}.$0 }"#, + r#"fn main() { let x = unsafe { if true {1} else {2} } }"#, + ); } #[test] From a756c9ad0825d0a113e406adddda8629f2db1214 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 7 Feb 2023 18:08:05 +0100 Subject: [PATCH 314/501] Fixup comments --- crates/mbe/src/syntax_bridge.rs | 6 +++++- crates/mbe/src/to_parser_input.rs | 3 +++ crates/parser/src/event.rs | 11 +++++++---- crates/parser/src/lib.rs | 4 +++- crates/parser/src/output.rs | 8 ++++---- crates/parser/src/parser.rs | 21 +++++++++------------ crates/parser/src/shortcuts.rs | 11 +++++++---- 7 files changed, 38 insertions(+), 26 deletions(-) diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index a4e3efaeb5210..fb5313401088d 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -95,7 +95,9 @@ pub fn token_tree_to_syntax_node( parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => { tree_sink.token(kind, n_raw_tokens) } - parser::Step::FloatSplit { has_pseudo_dot } => tree_sink.float_split(has_pseudo_dot), + parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => { + tree_sink.float_split(has_pseudo_dot) + } parser::Step::Enter { kind } => tree_sink.start_node(kind), parser::Step::Exit => tree_sink.finish_node(), parser::Step::Error { msg } => tree_sink.error(msg.to_string()), @@ -797,6 +799,8 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { } impl<'a> TtTreeSink<'a> { + /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. + /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { let (text, _span) = match self.cursor.token_tree() { Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => { diff --git a/crates/mbe/src/to_parser_input.rs b/crates/mbe/src/to_parser_input.rs index 6d20998bb4df1..051e20b3a3f9c 100644 --- a/crates/mbe/src/to_parser_input.rs +++ b/crates/mbe/src/to_parser_input.rs @@ -47,6 +47,9 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input { res.push(kind); if kind == FLOAT_NUMBER && !inner_text.ends_with('.') { + // Tag the token as joint if it is float with a fractional part + // we use this jointness to inform the parser about what token split + // event to emit when we encounter a float literal in a field access res.was_joint(); } } diff --git a/crates/parser/src/event.rs b/crates/parser/src/event.rs index fb2616cf01375..577eb0967b426 100644 --- a/crates/parser/src/event.rs +++ b/crates/parser/src/event.rs @@ -72,11 +72,14 @@ pub(crate) enum Event { /// `n_raw_tokens = 2` is used to produced a single `>>`. Token { kind: SyntaxKind, - // Consider custom enum here? n_raw_tokens: u8, }, + /// When we parse `foo.0.0` or `foo. 0. 0` the lexer will hand us a float literal + /// instead of an integer literal followed by a dot as the lexer has no contextual knowledge. + /// This event instructs whatever consumes the events to split the float literal into + /// the corresponding parts. FloatSplitHack { - has_pseudo_dot: bool, + ends_in_dot: bool, }, Error { msg: String, @@ -128,8 +131,8 @@ pub(super) fn process(mut events: Vec) -> Output { Event::Token { kind, n_raw_tokens } => { res.token(kind, n_raw_tokens); } - Event::FloatSplitHack { has_pseudo_dot } => { - res.float_split_hack(has_pseudo_dot); + Event::FloatSplitHack { ends_in_dot } => { + res.float_split_hack(ends_in_dot); let ev = mem::replace(&mut events[i + 1], Event::tombstone()); assert!(matches!(ev, Event::Finish), "{ev:?}"); } diff --git a/crates/parser/src/lib.rs b/crates/parser/src/lib.rs index 9b895ff3ca63d..8c5aed0232ba3 100644 --- a/crates/parser/src/lib.rs +++ b/crates/parser/src/lib.rs @@ -102,7 +102,9 @@ impl TopEntryPoint { match step { Step::Enter { .. } => depth += 1, Step::Exit => depth -= 1, - Step::FloatSplit { has_pseudo_dot } => depth -= 1 + !has_pseudo_dot as usize, + Step::FloatSplit { ends_in_dot: has_pseudo_dot } => { + depth -= 1 + !has_pseudo_dot as usize + } Step::Token { .. } | Step::Error { .. } => (), } } diff --git a/crates/parser/src/output.rs b/crates/parser/src/output.rs index 9587c8cb1ba62..41d4c68b2d748 100644 --- a/crates/parser/src/output.rs +++ b/crates/parser/src/output.rs @@ -25,7 +25,7 @@ pub struct Output { #[derive(Debug)] pub enum Step<'a> { Token { kind: SyntaxKind, n_input_tokens: u8 }, - FloatSplit { has_pseudo_dot: bool }, + FloatSplit { ends_in_dot: bool }, Enter { kind: SyntaxKind }, Exit, Error { msg: &'a str }, @@ -70,7 +70,7 @@ impl Output { } Self::EXIT_EVENT => Step::Exit, Self::SPLIT_EVENT => { - Step::FloatSplit { has_pseudo_dot: event & Self::N_INPUT_TOKEN_MASK != 0 } + Step::FloatSplit { ends_in_dot: event & Self::N_INPUT_TOKEN_MASK != 0 } } _ => unreachable!(), } @@ -84,9 +84,9 @@ impl Output { self.event.push(e) } - pub(crate) fn float_split_hack(&mut self, has_pseudo_dot: bool) { + pub(crate) fn float_split_hack(&mut self, ends_in_dot: bool) { let e = (Self::SPLIT_EVENT as u32) << Self::TAG_SHIFT - | ((has_pseudo_dot as u32) << Self::N_INPUT_TOKEN_SHIFT) + | ((ends_in_dot as u32) << Self::N_INPUT_TOKEN_SHIFT) | Self::EVENT_MASK; self.event.push(e); } diff --git a/crates/parser/src/parser.rs b/crates/parser/src/parser.rs index 0f4fa6022919c..280416ae7c994 100644 --- a/crates/parser/src/parser.rs +++ b/crates/parser/src/parser.rs @@ -182,7 +182,7 @@ impl<'t> Parser<'t> { } /// Advances the parser by one token - pub(crate) fn split_float(&mut self, marker: Marker) -> (bool, Marker) { + pub(crate) fn split_float(&mut self, mut marker: Marker) -> (bool, Marker) { assert!(self.at(SyntaxKind::FLOAT_NUMBER)); // we have parse `.` // ``.0.1 @@ -191,26 +191,23 @@ impl<'t> Parser<'t> { // ``. 0. 1; // here we need to change the follow up parse, the return value will cause us to emulate a dot // the actual splitting happens later - let has_pseudo_dot = !self.inp.is_joint(self.pos); - let marker = if !has_pseudo_dot { - let new_pos = self.start(); + let ends_in_dot = !self.inp.is_joint(self.pos); + if !ends_in_dot { + let new_marker = self.start(); let idx = marker.pos as usize; match &mut self.events[idx] { Event::Start { forward_parent, kind } => { *kind = SyntaxKind::FIELD_EXPR; - *forward_parent = Some(new_pos.pos - marker.pos); + *forward_parent = Some(new_marker.pos - marker.pos); } _ => unreachable!(), } - // NOTE: This brings the start / finish pairs out of balance! - std::mem::forget(marker); - new_pos - } else { - marker + marker.bomb.defuse(); + marker = new_marker; }; self.pos += 1 as usize; - self.push_event(Event::FloatSplitHack { has_pseudo_dot }); - (has_pseudo_dot, marker) + self.push_event(Event::FloatSplitHack { ends_in_dot }); + (ends_in_dot, marker) } /// Advances the parser by one token, remapping its kind. diff --git a/crates/parser/src/shortcuts.rs b/crates/parser/src/shortcuts.rs index 21939c349435d..47e4adcbbe695 100644 --- a/crates/parser/src/shortcuts.rs +++ b/crates/parser/src/shortcuts.rs @@ -43,10 +43,11 @@ impl<'a> LexedStr<'a> { res.was_joint(); } res.push(kind); - // we set jointness for floating point numbers as a hack to inform the - // parser about whether we have a `0.` or `0.1` style float + // Tag the token as joint if it is float with a fractional part + // we use this jointness to inform the parser about what token split + // event to emit when we encounter a float literal in a field access if kind == SyntaxKind::FLOAT_NUMBER { - if !self.text(i).split_once('.').map_or(true, |(_, it)| it.is_empty()) { + if !self.text(i).ends_with('.') { res.was_joint(); } } @@ -71,7 +72,9 @@ impl<'a> LexedStr<'a> { Step::Token { kind, n_input_tokens: n_raw_tokens } => { builder.token(kind, n_raw_tokens) } - Step::FloatSplit { has_pseudo_dot } => builder.float_split(has_pseudo_dot), + Step::FloatSplit { ends_in_dot: has_pseudo_dot } => { + builder.float_split(has_pseudo_dot) + } Step::Enter { kind } => builder.enter(kind), Step::Exit => builder.exit(), Step::Error { msg } => { From 8535f2bb1b1f554e3ff1633b5428b426ce50892b Mon Sep 17 00:00:00 2001 From: Duong Quoc Khanh Date: Wed, 8 Feb 2023 03:37:20 +0900 Subject: [PATCH 315/501] Handle edge cases. Handle case when BlockExpr is child of IfExpr, WhileExpr, LoopExpr, ForExpr. An additional { } will be added when: - It is not a BlockExpr - It is a BlockExpr and a child of IfExpr, WhileExpr, LoopExpr, ForExpr. --- .../ide-completion/src/completions/postfix.rs | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index c68d106d8356b..83d457c215964 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -6,7 +6,7 @@ use hir::{Documentation, HasAttrs}; use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap}; use syntax::{ ast::{self, make, AstNode, AstToken}, - SyntaxKind::{EXPR_STMT, STMT_LIST}, + SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR}, TextRange, TextSize, }; use text_edit::TextEdit; @@ -123,9 +123,19 @@ pub(crate) fn complete_postfix( postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc); postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc); - let unsafe_completion_string = match dot_receiver { - ast::Expr::BlockExpr(_) => format!("unsafe {receiver_text}"), - _ => format!("unsafe {{ {receiver_text} }}"), + let mut unsafe_should_be_wrapped = true; + if dot_receiver.syntax().kind() == BLOCK_EXPR { + unsafe_should_be_wrapped = false; + if let Some(parent) = dot_receiver.syntax().parent() { + if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) { + unsafe_should_be_wrapped = true; + } + } + }; + let unsafe_completion_string = if unsafe_should_be_wrapped { + format!("unsafe {{ {receiver_text} }}") + } else { + format!("unsafe {receiver_text}") }; postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc); From 2666e6eda8220af036973881a35bdc2d85d6d8a2 Mon Sep 17 00:00:00 2001 From: Duong Quoc Khanh Date: Wed, 8 Feb 2023 04:04:10 +0900 Subject: [PATCH 316/501] Add test for `LetElse` case. Postfix completion is not triggered. --- crates/ide-completion/src/completions/postfix.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/crates/ide-completion/src/completions/postfix.rs b/crates/ide-completion/src/completions/postfix.rs index 83d457c215964..90c523735da80 100644 --- a/crates/ide-completion/src/completions/postfix.rs +++ b/crates/ide-completion/src/completions/postfix.rs @@ -571,6 +571,13 @@ fn main() { r#"fn main() { let x = if true {1} else {2}.$0 }"#, r#"fn main() { let x = unsafe { if true {1} else {2} } }"#, ); + + // completion will not be triggered + check_edit( + "unsafe", + r#"fn main() { let x = true else {panic!()}.$0}"#, + r#"fn main() { let x = true else {panic!()}.unsafe}"#, + ); } #[test] From c7010eda1ba4e619952be9fe87487c19ec924eaa Mon Sep 17 00:00:00 2001 From: Pascal Kuthe Date: Tue, 7 Feb 2023 22:36:44 +0100 Subject: [PATCH 317/501] Support DidChangeWorkspaceFolders capability --- crates/rust-analyzer/src/bin/main.rs | 35 ++++++++----------- crates/rust-analyzer/src/caps.rs | 8 +++-- crates/rust-analyzer/src/config.rs | 20 ++++++++++- .../rust-analyzer/src/diagnostics/to_proto.rs | 2 +- crates/rust-analyzer/src/main_loop.rs | 26 +++++++++++++- .../rust-analyzer/tests/slow-tests/support.rs | 1 + 6 files changed, 66 insertions(+), 26 deletions(-) diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index a4902d63c6832..4de022b6ed607 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -10,7 +10,6 @@ mod rustc_wrapper; use std::{env, fs, path::Path, process}; use lsp_server::Connection; -use project_model::ProjectManifest; use rust_analyzer::{cli::flags, config::Config, from_json, Result}; use vfs::AbsPathBuf; @@ -168,7 +167,18 @@ fn run_server() -> Result<()> { } }; - let mut config = Config::new(root_path, initialize_params.capabilities); + let workspace_roots = initialize_params + .workspace_folders + .map(|workspaces| { + workspaces + .into_iter() + .filter_map(|it| it.uri.to_file_path().ok()) + .filter_map(|it| AbsPathBuf::try_from(it).ok()) + .collect::>() + }) + .filter(|workspaces| !workspaces.is_empty()) + .unwrap_or_else(|| vec![root_path.clone()]); + let mut config = Config::new(root_path, initialize_params.capabilities, workspace_roots); if let Some(json) = initialize_params.initialization_options { if let Err(e) = config.update(json) { use lsp_types::{ @@ -202,25 +212,8 @@ fn run_server() -> Result<()> { tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default()); } - if config.linked_projects().is_empty() && config.detached_files().is_empty() { - let workspace_roots = initialize_params - .workspace_folders - .map(|workspaces| { - workspaces - .into_iter() - .filter_map(|it| it.uri.to_file_path().ok()) - .filter_map(|it| AbsPathBuf::try_from(it).ok()) - .collect::>() - }) - .filter(|workspaces| !workspaces.is_empty()) - .unwrap_or_else(|| vec![config.root_path().clone()]); - - let discovered = ProjectManifest::discover_all(&workspace_roots); - tracing::info!("discovered projects: {:?}", discovered); - if discovered.is_empty() { - tracing::error!("failed to find any projects in {:?}", workspace_roots); - } - config.discovered_projects = Some(discovered); + if !config.has_linked_projects() && config.detached_files().is_empty() { + config.rediscover_workspaces(); } rust_analyzer::main_loop(config, connection)?; diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 122d2e6ff1b76..841861635c6ee 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs @@ -10,7 +10,8 @@ use lsp_types::{ SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions, - WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities, + WorkspaceFileOperationsServerCapabilities, WorkspaceFoldersServerCapabilities, + WorkspaceServerCapabilities, }; use serde_json::json; @@ -80,7 +81,10 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities { color_provider: None, execute_command_provider: None, workspace: Some(WorkspaceServerCapabilities { - workspace_folders: None, + workspace_folders: Some(WorkspaceFoldersServerCapabilities { + supported: Some(true), + change_notifications: Some(OneOf::Left(true)), + }), file_operations: Some(WorkspaceFileOperationsServerCapabilities { did_create: None, will_create: None, diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index c8075aefbbeca..67091dc7f2281 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -524,6 +524,7 @@ impl Default for ConfigData { #[derive(Debug, Clone)] pub struct Config { pub discovered_projects: Option>, + pub workspace_roots: Vec, caps: lsp_types::ClientCapabilities, root_path: AbsPathBuf, data: ConfigData, @@ -720,7 +721,11 @@ impl fmt::Display for ConfigUpdateError { } impl Config { - pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self { + pub fn new( + root_path: AbsPathBuf, + caps: ClientCapabilities, + workspace_roots: Vec, + ) -> Self { Config { caps, data: ConfigData::default(), @@ -728,9 +733,19 @@ impl Config { discovered_projects: None, root_path, snippets: Default::default(), + workspace_roots, } } + pub fn rediscover_workspaces(&mut self) { + let discovered = ProjectManifest::discover_all(&self.workspace_roots); + tracing::info!("discovered projects: {:?}", discovered); + if discovered.is_empty() { + tracing::error!("failed to find any projects in {:?}", &self.workspace_roots); + } + self.discovered_projects = Some(discovered); + } + pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> { tracing::info!("updating config from JSON: {:#}", json); if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) { @@ -827,6 +842,9 @@ macro_rules! try_or_def { } impl Config { + pub fn has_linked_projects(&self) -> bool { + !self.data.linkedProjects.is_empty() + } pub fn linked_projects(&self) -> Vec { match self.data.linkedProjects.as_slice() { [] => match self.discovered_projects.as_ref() { diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index acb416a068932..55b89019b47a8 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs @@ -534,7 +534,7 @@ mod tests { let (sender, _) = crossbeam_channel::unbounded(); let state = GlobalState::new( sender, - Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()), + Config::new(workspace_root.to_path_buf(), ClientCapabilities::default(), Vec::new()), ); let snap = state.snapshot(); let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap); diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 4290b7760687e..346a74e270f99 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -14,7 +14,7 @@ use ide_db::base_db::{SourceDatabaseExt, VfsPath}; use itertools::Itertools; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; -use vfs::{ChangeKind, FileId}; +use vfs::{AbsPathBuf, ChangeKind, FileId}; use crate::{ config::Config, @@ -933,6 +933,30 @@ impl GlobalState { Ok(()) })? + .on::(|this, params| { + let config = Arc::make_mut(&mut this.config); + + for workspace in params.event.removed { + let Ok(path) = workspace.uri.to_file_path() else { continue }; + let Ok(path) = AbsPathBuf::try_from(path) else { continue }; + let Some(position) = config.workspace_roots.iter().position(|it| it == &path) else { continue }; + config.workspace_roots.remove(position); + } + + let added = params + .event + .added + .into_iter() + .filter_map(|it| it.uri.to_file_path().ok()) + .filter_map(|it| AbsPathBuf::try_from(it).ok()); + config.workspace_roots.extend(added); + if !config.has_linked_projects() && config.detached_files().is_empty() { + config.rediscover_workspaces(); + this.fetch_workspaces_queue.request_op("client workspaces changed".to_string()) + } + + Ok(()) + })? .on::(|this, params| { for change in params.changes { if let Ok(path) = from_proto::abs_path(&change.uri) { diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 269212ebb99c1..b7275df0f4019 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -137,6 +137,7 @@ impl<'a> Project<'a> { })), ..Default::default() }, + Vec::new(), ); config.discovered_projects = Some(discovered_projects); config.update(self.config).expect("invalid config"); From 2b4a5374ee12e8c7094cc8cd261366756cf6d81c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 7 Feb 2023 22:42:03 +0100 Subject: [PATCH 318/501] Properly use location links for type hints of impl Future and its assoc type --- crates/hir-ty/src/display.rs | 28 ++++++++++++++-- crates/ide/src/inlay_hints.rs | 15 ++++++--- crates/ide/src/inlay_hints/bind_pat.rs | 30 +++++++++++++++-- crates/ide/src/inlay_hints/chaining.rs | 45 ++++++++++++++++++++++++-- 4 files changed, 106 insertions(+), 12 deletions(-) diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index ae2162dd7cd24..1d4bf9ecb7473 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -12,7 +12,7 @@ use hir_def::{ find_path, generics::{TypeOrConstParamData, TypeParamProvenance}, item_scope::ItemInNs, - lang_item::LangItem, + lang_item::{LangItem, LangItemTarget}, path::{Path, PathKind}, type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef}, visibility::Visibility, @@ -731,8 +731,30 @@ impl HirDisplay for Ty { )?; // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution } - ImplTraitId::AsyncBlockTypeImplTrait(..) => { - write!(f, "impl Future { + let future_trait = db + .lang_item(body.module(db.upcast()).krate(), LangItem::Future) + .and_then(LangItemTarget::as_trait); + let output = future_trait.and_then(|t| { + db.trait_data(t).associated_type_by_name(&hir_expand::name!(Output)) + }); + write!(f, "impl ")?; + if let Some(t) = future_trait { + f.start_location_link(t.into()); + } + write!(f, "Future")?; + if let Some(_) = future_trait { + f.end_location_link(); + } + write!(f, "<")?; + if let Some(t) = output { + f.start_location_link(t.into()); + } + write!(f, "Output")?; + if let Some(_) = output { + f.end_location_link(); + } + write!(f, " = ")?; parameters.at(Interner, 0).hir_fmt(f)?; write!(f, ">")?; } diff --git a/crates/ide/src/inlay_hints.rs b/crates/ide/src/inlay_hints.rs index a9581db9ad493..ac477339ec233 100644 --- a/crates/ide/src/inlay_hints.rs +++ b/crates/ide/src/inlay_hints.rs @@ -294,10 +294,12 @@ fn label_of_ty( ) -> Result<(), HirDisplayError> { let iter_item_type = hint_iterator(sema, famous_defs, &ty); match iter_item_type { - Some((iter_trait, ty)) => { + Some((iter_trait, item, ty)) => { const LABEL_START: &str = "impl "; const LABEL_ITERATOR: &str = "Iterator"; - const LABEL_MIDDLE: &str = ", famous_defs: &FamousDefs<'_, '_>, ty: &hir::Type, -) -> Option<(hir::Trait, hir::Type)> { +) -> Option<(hir::Trait, hir::TypeAlias, hir::Type)> { let db = sema.db; let strukt = ty.strip_references().as_adt()?; let krate = strukt.module(db).krate(); @@ -460,7 +467,7 @@ fn hint_iterator( _ => None, })?; if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) { - return Some((iter_trait, ty)); + return Some((iter_trait, assoc_type_item, ty)); } } diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index da44d952970b3..f5b5c44737438 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -330,7 +330,20 @@ fn main(a: SliceIter<'_, Container>) { ), tooltip: "", }, - ") { ), tooltip: "", }, - ">", + "<", + InlayHintLabelPart { + text: "Item", + linked_location: Some( + FileRange { + file_id: FileId( + 1, + ), + range: 2643..2647, + }, + ), + tooltip: "", + }, + " = &&str>>", ], }, InlayHint { diff --git a/crates/ide/src/inlay_hints/chaining.rs b/crates/ide/src/inlay_hints/chaining.rs index 222ee59be8b48..0c54f084c19eb 100644 --- a/crates/ide/src/inlay_hints/chaining.rs +++ b/crates/ide/src/inlay_hints/chaining.rs @@ -440,7 +440,20 @@ fn main() { ), tooltip: "", }, - "", + "<", + InlayHintLabelPart { + text: "Item", + linked_location: Some( + FileRange { + file_id: FileId( + 1, + ), + range: 2643..2647, + }, + ), + tooltip: "", + }, + " = ()>", ], }, InlayHint { @@ -460,7 +473,20 @@ fn main() { ), tooltip: "", }, - "", + "<", + InlayHintLabelPart { + text: "Item", + linked_location: Some( + FileRange { + file_id: FileId( + 1, + ), + range: 2643..2647, + }, + ), + tooltip: "", + }, + " = ()>", ], }, InlayHint { @@ -480,7 +506,20 @@ fn main() { ), tooltip: "", }, - "", + "<", + InlayHintLabelPart { + text: "Item", + linked_location: Some( + FileRange { + file_id: FileId( + 1, + ), + range: 2643..2647, + }, + ), + tooltip: "", + }, + " = ()>", ], }, InlayHint { From 15d4728cda673e90b4db1ea2c60d18a6fae306d0 Mon Sep 17 00:00:00 2001 From: kadmin Date: Sun, 5 Feb 2023 22:14:40 +0000 Subject: [PATCH 319/501] Add de-init to destination place --- .../rustc_mir_transform/src/large_enums.rs | 17 ++++--- compiler/rustc_mir_transform/src/lib.rs | 2 +- .../enum_opt.cand.EnumSizeOpt.32bit.diff | 45 +++++++++++++------ .../enum_opt.cand.EnumSizeOpt.64bit.diff | 45 +++++++++++++------ .../enum_opt.invalid.EnumSizeOpt.32bit.diff | 17 +++---- .../enum_opt.invalid.EnumSizeOpt.64bit.diff | 17 +++---- tests/mir-opt/enum_opt.rs | 28 +++++++----- .../enum_opt.trunc.EnumSizeOpt.32bit.diff | 21 ++++----- .../enum_opt.trunc.EnumSizeOpt.64bit.diff | 21 ++++----- .../enum_opt.unin.EnumSizeOpt.32bit.diff | 44 ++++++++++++------ .../enum_opt.unin.EnumSizeOpt.64bit.diff | 44 ++++++++++++------ 11 files changed, 181 insertions(+), 120 deletions(-) diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs index 3f8662ad6971f..89f8de235835a 100644 --- a/compiler/rustc_mir_transform/src/large_enums.rs +++ b/compiler/rustc_mir_transform/src/large_enums.rs @@ -4,6 +4,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_middle::mir::interpret::AllocId; use rustc_middle::mir::*; use rustc_middle::ty::{self, AdtDef, Const, ParamEnv, Ty, TyCtxt}; +use rustc_session::Session; use rustc_target::abi::{HasDataLayout, Size, TagEncoding, Variants}; /// A pass that seeks to optimize unnecessary moves of large enum types, if there is a large @@ -28,14 +29,12 @@ pub struct EnumSizeOpt { } impl<'tcx> MirPass<'tcx> for EnumSizeOpt { + fn is_enabled(&self, sess: &Session) -> bool { + sess.opts.unstable_opts.unsound_mir_opts || sess.mir_opt_level() >= 3 + } fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let sess = tcx.sess; - // FIXME(julianknodt): one thing noticed while testing this mir-opt is that there is a - // different layout of large enums on wasm. It's not clear what is causing this layout - // difference, as it should be identical to i686 (32 bit). - if (!sess.opts.unstable_opts.unsound_mir_opts) || sess.mir_opt_level() < 3 { - return; - } + // NOTE: This pass may produce different MIR based on the alignment of the target + // platform, but it will still be valid. self.optim(tcx, body); } } @@ -254,6 +253,9 @@ impl EnumSizeOpt { )), }; + let deinit_old = + Statement { source_info, kind: StatementKind::Deinit(box dst) }; + let copy_bytes = Statement { source_info, kind: StatementKind::Intrinsic( @@ -279,6 +281,7 @@ impl EnumSizeOpt { dst_cast, src_ptr, src_cast, + deinit_old, copy_bytes, store_dead, ] diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index be3652dd3e7bd..45cd4024c9f57 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -547,7 +547,6 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { tcx, body, &[ - &large_enums::EnumSizeOpt { discrepancy: 128 }, &reveal_all::RevealAll, // has to be done before inlining, since inlined code is in RevealAll mode. &lower_slice_len::LowerSliceLenCalls, // has to be done before inlining, otherwise actual call will be almost always inlined. Also simple, so can just do first &unreachable_prop::UnreachablePropagation, @@ -586,6 +585,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { &simplify::SimplifyLocals::new("final"), &multiple_return_terminators::MultipleReturnTerminators, &deduplicate_blocks::DeduplicateBlocks, + &large_enums::EnumSizeOpt { discrepancy: 128 }, // Some cleanup necessary at least for LLVM and potentially other codegen backends. &add_call_guards::CriticalCallEdges, // Dump the end result for testing and debugging purposes. diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff index e0ba46c15f4a9..b139deeee1fc9 100644 --- a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.32bit.diff @@ -1,11 +1,11 @@ - // MIR for `cand` before EnumSizeOpt + // MIR for `cand` after EnumSizeOpt - fn cand() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + fn cand() -> Candidate { + let mut _0: Candidate; // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:27 let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 + let mut _3: [u8; 8196]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 + let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 @@ -14,25 +14,29 @@ + let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _12: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _13: isize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _14: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _15: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _16: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _17: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _18: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _19: *const u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 scope 1 { debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 - ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + _1 = Candidate::Small(const 1_u8); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + _3 = [const 1_u8; 8196]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + _2 = Candidate::Large(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 -+ _4 = const [2_usize, 8196_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _4 = const [2_usize, 8197_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 @@ -40,12 +44,25 @@ + _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ Deinit(_8); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 - return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 +- _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageLive(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _12 = const [2_usize, 8197_usize]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _13 = discriminant(_1); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _14 = _13 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _15 = _12[_14]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _16 = &raw mut _0; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _17 = _16 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _18 = &raw const _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _19 = _18 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ Deinit(_16); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ copy_nonoverlapping(dst = _17, src = _19, count = _15); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageDead(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff index 67439dba9c947..b139deeee1fc9 100644 --- a/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.cand.EnumSizeOpt.64bit.diff @@ -1,11 +1,11 @@ - // MIR for `cand` before EnumSizeOpt + // MIR for `cand` after EnumSizeOpt - fn cand() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + fn cand() -> Candidate { + let mut _0: Candidate; // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:27 let mut _1: Candidate; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 - let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 + let mut _3: [u8; 8196]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 + let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 @@ -14,25 +14,29 @@ + let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _10: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 + let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:34 ++ let mut _12: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _13: isize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _14: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _15: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _16: *mut Candidate; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _17: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _18: *const Candidate; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _19: *const u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 scope 1 { debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 - ((_1 as Small).0: u8) = const 1_u8; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 + _1 = Candidate::Small(const 1_u8); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:34 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 - _3 = [const 1_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + _3 = [const 1_u8; 8196]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 + _2 = Candidate::Large(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 -+ _4 = const [2_usize, 8200_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ _4 = const [2_usize, 8197_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 @@ -40,12 +44,25 @@ + _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 ++ Deinit(_8); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 - return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 +- _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageLive(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _12 = const [2_usize, 8197_usize]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _13 = discriminant(_1); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _14 = _13 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _15 = _12[_14]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _16 = &raw mut _0; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _17 = _16 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _18 = &raw const _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _19 = _18 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ Deinit(_16); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ copy_nonoverlapping(dst = _17, src = _19, count = _15); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageDead(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff index db2efa195a369..a80001149ebb7 100644 --- a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.32bit.diff @@ -1,8 +1,8 @@ - // MIR for `invalid` before EnumSizeOpt + // MIR for `invalid` after EnumSizeOpt - fn invalid() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:18 + fn invalid() -> InvalidIdxs { + let mut _0: InvalidIdxs; // return place in scope 0 at $DIR/enum_opt.rs:+0:21: +0:32 let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:36 let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:26: +2:35 @@ -12,20 +12,17 @@ bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 + _1 = InvalidIdxs::A; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + _2 = InvalidIdxs::Large(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:36 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:18: +3:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 - return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 + _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff index db2efa195a369..a80001149ebb7 100644 --- a/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.invalid.EnumSizeOpt.64bit.diff @@ -1,8 +1,8 @@ - // MIR for `invalid` before EnumSizeOpt + // MIR for `invalid` after EnumSizeOpt - fn invalid() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:18 + fn invalid() -> InvalidIdxs { + let mut _0: InvalidIdxs; // return place in scope 0 at $DIR/enum_opt.rs:+0:21: +0:32 let mut _1: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: InvalidIdxs; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:36 let mut _3: [u64; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:26: +2:35 @@ -12,20 +12,17 @@ bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 + _1 = InvalidIdxs::A; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:29 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 _3 = [const 0_u64; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:26: +2:35 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 - ((_2 as Large).0: [u64; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 + _2 = InvalidIdxs::Large(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:36 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:36 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:35: +2:36 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:18: +3:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 - return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 + _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.rs b/tests/mir-opt/enum_opt.rs index 6876c56cfa4ed..2768d70804926 100644 --- a/tests/mir-opt/enum_opt.rs +++ b/tests/mir-opt/enum_opt.rs @@ -1,20 +1,19 @@ +// unit-test: EnumSizeOpt // EMIT_MIR_FOR_EACH_BIT_WIDTH -// compile-flags: -Zunsound-mir-opts -Zmir-opt-level=3 -// ignore-wasm32 +// compile-flags: -Zunsound-mir-opts #![feature(arbitrary_enum_discriminant, repr128)] // Tests that an enum with a variant with no data gets correctly transformed. -#[repr(C)] pub enum NoData { - Large([u8; 8192]), + Large([u8; 8196]), None, } // Tests that an enum with a variant with data that is a valid candidate gets transformed. pub enum Candidate { Small(u8), - Large([u64; 1024]), + Large([u8; 8196]), } // Tests that an enum which has a discriminant much higher than the variant does not get @@ -43,34 +42,39 @@ pub enum RandOrderDiscr { } // EMIT_MIR enum_opt.unin.EnumSizeOpt.diff -pub fn unin() { +pub fn unin() -> NoData { let mut a = NoData::None; - a = NoData::Large([1; 8192]); + a = NoData::Large([1; 8196]); + a } // EMIT_MIR enum_opt.cand.EnumSizeOpt.diff -pub fn cand() { +pub fn cand() -> Candidate { let mut a = Candidate::Small(1); - a = Candidate::Large([1; 1024]); + a = Candidate::Large([1; 8196]); + a } // EMIT_MIR enum_opt.invalid.EnumSizeOpt.diff -pub fn invalid() { +pub fn invalid() -> InvalidIdxs { let mut a = InvalidIdxs::A; a = InvalidIdxs::Large([0; 1024]); + a } // EMIT_MIR enum_opt.trunc.EnumSizeOpt.diff -pub fn trunc() { +pub fn trunc() -> NotTrunctable { let mut a = NotTrunctable::A; a = NotTrunctable::B([0; 1024]); a = NotTrunctable::C([0; 4096]); + a } -pub fn rand_order() { +pub fn rand_order() -> RandOrderDiscr { let mut a = RandOrderDiscr::A; a = RandOrderDiscr::B([0; 1024]); a = RandOrderDiscr::C; + a } pub fn main() { diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff index b9d6765d8c1ca..1ef79044d4fc6 100644 --- a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.32bit.diff @@ -1,8 +1,8 @@ - // MIR for `trunc` before EnumSizeOpt + // MIR for `trunc` after EnumSizeOpt - fn trunc() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:16: +0:16 + fn trunc() -> NotTrunctable { + let mut _0: NotTrunctable; // return place in scope 0 at $DIR/enum_opt.rs:+0:19: +0:32 let mut _1: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 @@ -14,29 +14,24 @@ bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 + _1 = NotTrunctable::A; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + _2 = NotTrunctable::B(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 - Deinit(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 - ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 - discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + _4 = NotTrunctable::C(move _5); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 _1 = move _4; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:34 StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:16: +4:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 - return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 + _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+4:3: +4:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+5:1: +5:2 + return; // scope 0 at $DIR/enum_opt.rs:+5:2: +5:2 } } diff --git a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff index b9d6765d8c1ca..1ef79044d4fc6 100644 --- a/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.trunc.EnumSizeOpt.64bit.diff @@ -1,8 +1,8 @@ - // MIR for `trunc` before EnumSizeOpt + // MIR for `trunc` after EnumSizeOpt - fn trunc() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:16: +0:16 + fn trunc() -> NotTrunctable { + let mut _0: NotTrunctable; // return place in scope 0 at $DIR/enum_opt.rs:+0:19: +0:32 let mut _1: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: NotTrunctable; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:34 let mut _3: [u8; 1024]; // in scope 0 at $DIR/enum_opt.rs:+2:24: +2:33 @@ -14,29 +14,24 @@ bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 - discriminant(_1) = 0; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 + _1 = NotTrunctable::A; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:31 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 _3 = [const 0_u8; 1024]; // scope 1 at $DIR/enum_opt.rs:+2:24: +2:33 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - ((_2 as B).0: [u8; 1024]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 - discriminant(_2) = 1; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 + _2 = NotTrunctable::B(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:34 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:34 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:33: +2:34 StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 StorageLive(_5); // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 _5 = [const 0_u8; 4096]; // scope 1 at $DIR/enum_opt.rs:+3:24: +3:33 - Deinit(_4); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 - ((_4 as C).0: [u8; 4096]) = move _5; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 - discriminant(_4) = 2; // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 + _4 = NotTrunctable::C(move _5); // scope 1 at $DIR/enum_opt.rs:+3:7: +3:34 StorageDead(_5); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 _1 = move _4; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:34 StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+3:33: +3:34 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:16: +4:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 - return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 + _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+4:3: +4:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+5:1: +5:2 + return; // scope 0 at $DIR/enum_opt.rs:+5:2: +5:2 } } diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff index 168677b173d53..ad9f12cf95953 100644 --- a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.32bit.diff @@ -1,11 +1,11 @@ - // MIR for `unin` before EnumSizeOpt + // MIR for `unin` after EnumSizeOpt - fn unin() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + fn unin() -> NoData { + let mut _0: NoData; // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:24 let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:31 - let mut _3: [u8; 8192]; // in scope 0 at $DIR/enum_opt.rs:+2:21: +2:30 + let mut _3: [u8; 8196]; // in scope 0 at $DIR/enum_opt.rs:+2:21: +2:30 + let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 @@ -14,24 +14,29 @@ + let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _12: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _13: isize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _14: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _15: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _16: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _17: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _18: *const NoData; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _19: *const u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 scope 1 { debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 - discriminant(_1) = 1; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 + _1 = NoData::None; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 - _3 = [const 1_u8; 8192]; // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 - ((_2 as Large).0: [u8; 8192]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 - discriminant(_2) = 0; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + _3 = [const 1_u8; 8196]; // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 + _2 = NoData::Large(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 -+ _4 = const [8196_usize, 4_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _4 = const [8197_usize, 1_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 @@ -39,12 +44,25 @@ + _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ Deinit(_8); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 - return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 +- _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageLive(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _12 = const [8197_usize, 1_usize]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _13 = discriminant(_1); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _14 = _13 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _15 = _12[_14]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _16 = &raw mut _0; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _17 = _16 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _18 = &raw const _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _19 = _18 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ Deinit(_16); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ copy_nonoverlapping(dst = _17, src = _19, count = _15); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageDead(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } diff --git a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff index 168677b173d53..ad9f12cf95953 100644 --- a/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff +++ b/tests/mir-opt/enum_opt.unin.EnumSizeOpt.64bit.diff @@ -1,11 +1,11 @@ - // MIR for `unin` before EnumSizeOpt + // MIR for `unin` after EnumSizeOpt - fn unin() -> () { - let mut _0: (); // return place in scope 0 at $DIR/enum_opt.rs:+0:15: +0:15 + fn unin() -> NoData { + let mut _0: NoData; // return place in scope 0 at $DIR/enum_opt.rs:+0:18: +0:24 let mut _1: NoData; // in scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 let mut _2: NoData; // in scope 0 at $DIR/enum_opt.rs:+2:7: +2:31 - let mut _3: [u8; 8192]; // in scope 0 at $DIR/enum_opt.rs:+2:21: +2:30 + let mut _3: [u8; 8196]; // in scope 0 at $DIR/enum_opt.rs:+2:21: +2:30 + let mut _4: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _5: isize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _6: usize; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 @@ -14,24 +14,29 @@ + let mut _9: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _10: *const NoData; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 + let mut _11: *const u8; // in scope 0 at $DIR/enum_opt.rs:+2:3: +2:31 ++ let mut _12: [usize; 2]; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _13: isize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _14: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _15: usize; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _16: *mut NoData; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _17: *mut u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _18: *const NoData; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 ++ let mut _19: *const u8; // in scope 0 at $DIR/enum_opt.rs:+3:3: +3:4 scope 1 { debug a => _1; // in scope 1 at $DIR/enum_opt.rs:+1:7: +1:12 } bb0: { StorageLive(_1); // scope 0 at $DIR/enum_opt.rs:+1:7: +1:12 - Deinit(_1); // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 - discriminant(_1) = 1; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 + _1 = NoData::None; // scope 0 at $DIR/enum_opt.rs:+1:15: +1:27 StorageLive(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 StorageLive(_3); // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 - _3 = [const 1_u8; 8192]; // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 - Deinit(_2); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 - ((_2 as Large).0: [u8; 8192]) = move _3; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 - discriminant(_2) = 0; // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 + _3 = [const 1_u8; 8196]; // scope 1 at $DIR/enum_opt.rs:+2:21: +2:30 + _2 = NoData::Large(move _3); // scope 1 at $DIR/enum_opt.rs:+2:7: +2:31 StorageDead(_3); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 - _1 = move _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + StorageLive(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 -+ _4 = const [8196_usize, 4_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ _4 = const [8197_usize, 1_usize]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _5 = discriminant(_2); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _6 = _5 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _7 = _4[_6]; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 @@ -39,12 +44,25 @@ + _9 = _8 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _10 = &raw const _2; // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + _11 = _10 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 ++ Deinit(_8); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + copy_nonoverlapping(dst = _9, src = _11, count = _7); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 + StorageDead(_4); // scope 1 at $DIR/enum_opt.rs:+2:3: +2:31 StorageDead(_2); // scope 1 at $DIR/enum_opt.rs:+2:30: +2:31 - _0 = const (); // scope 0 at $DIR/enum_opt.rs:+0:15: +3:2 - StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+3:1: +3:2 - return; // scope 0 at $DIR/enum_opt.rs:+3:2: +3:2 +- _0 = move _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageLive(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _12 = const [8197_usize, 1_usize]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _13 = discriminant(_1); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _14 = _13 as usize (IntToInt); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _15 = _12[_14]; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _16 = &raw mut _0; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _17 = _16 as *mut u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _18 = &raw const _1; // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ _19 = _18 as *const u8 (PtrToPtr); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ Deinit(_16); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ copy_nonoverlapping(dst = _17, src = _19, count = _15); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 ++ StorageDead(_12); // scope 1 at $DIR/enum_opt.rs:+3:3: +3:4 + StorageDead(_1); // scope 0 at $DIR/enum_opt.rs:+4:1: +4:2 + return; // scope 0 at $DIR/enum_opt.rs:+4:2: +4:2 } } From 064fcfa016e9adac35b07ae6c63e1472c31b1616 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maria=20Jos=C3=A9=20Solano?= Date: Tue, 7 Feb 2023 18:41:28 -0800 Subject: [PATCH 320/501] Sort and truncate final response --- crates/rust-analyzer/src/to_proto.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index f5cee5f907a86..5bdc1bf8d9bb7 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -216,15 +216,10 @@ pub(crate) fn completion_items( let mut res = Vec::with_capacity(items.len()); for item in items { completion_item(&mut res, config, line_index, &tdpp, max_relevance, item); - - if let Some(limit) = config.completion().limit { - if res.len() >= limit { - break; - } - } } if let Some(limit) = config.completion().limit { + res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text)); res.truncate(limit); } From f03fb262f7333a25ebf21f9f252e9505f9d9885b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 8 Feb 2023 11:05:34 +0100 Subject: [PATCH 321/501] Allow specifying what proc-macro server to run in rust_analyzer::load_cargo API --- .../rust-analyzer/src/cli/analysis_stats.rs | 12 +++---- crates/rust-analyzer/src/cli/diagnostics.rs | 4 +-- crates/rust-analyzer/src/cli/load_cargo.rs | 33 ++++++++++++------- crates/rust-analyzer/src/cli/lsif.rs | 3 +- crates/rust-analyzer/src/cli/scip.rs | 7 ++-- crates/rust-analyzer/src/cli/ssr.rs | 6 ++-- .../src/integrated_benchmarks.rs | 6 ++-- 7 files changed, 42 insertions(+), 29 deletions(-) diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 4b91433f63a68..93297faa664d1 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -33,7 +33,7 @@ use vfs::{AbsPathBuf, Vfs, VfsPath}; use crate::cli::{ flags::{self, OutputFormat}, - load_cargo::{load_workspace, LoadCargoConfig}, + load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}, print_memory_usage, progress_report::ProgressReport, report_metric, Result, Verbosity, @@ -59,11 +59,6 @@ impl flags::AnalysisStats { true => None, false => Some(RustcSource::Discover), }; - let load_cargo_config = LoadCargoConfig { - load_out_dirs_from_check: !self.disable_build_scripts, - with_proc_macro: !self.disable_proc_macros, - prefill_caches: false, - }; let no_progress = &|_| (); let mut db_load_sw = self.stop_watch(); @@ -73,6 +68,11 @@ impl flags::AnalysisStats { let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?; let metadata_time = db_load_sw.elapsed(); + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: !self.disable_build_scripts, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, + prefill_caches: false, + }; let build_scripts_time = if self.disable_build_scripts { None diff --git a/crates/rust-analyzer/src/cli/diagnostics.rs b/crates/rust-analyzer/src/cli/diagnostics.rs index fd5b3ce61f78d..ff821be53d83d 100644 --- a/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/crates/rust-analyzer/src/cli/diagnostics.rs @@ -9,7 +9,7 @@ use ide_db::base_db::SourceDatabaseExt; use crate::cli::{ flags, - load_cargo::{load_workspace_at, LoadCargoConfig}, + load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}, }; impl flags::Diagnostics { @@ -17,7 +17,7 @@ impl flags::Diagnostics { let cargo_config = Default::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: !self.disable_build_scripts, - with_proc_macro: !self.disable_proc_macros, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: false, }; let (host, _vfs, _proc_macro) = diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs index 762d7d3a18e8b..5a958d963e4b5 100644 --- a/crates/rust-analyzer/src/cli/load_cargo.rs +++ b/crates/rust-analyzer/src/cli/load_cargo.rs @@ -1,6 +1,6 @@ //! Loads a Cargo project into a static instance of analysis, without support //! for incorporating changes. -use std::{path::Path, sync::Arc}; +use std::{convert::identity, path::Path, sync::Arc}; use anyhow::Result; use crossbeam_channel::{unbounded, Receiver}; @@ -17,10 +17,17 @@ use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig}; // what otherwise would be `pub(crate)` has to be `pub` here instead. pub struct LoadCargoConfig { pub load_out_dirs_from_check: bool, - pub with_proc_macro: bool, + pub with_proc_macro_server: ProcMacroServerChoice, pub prefill_caches: bool, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ProcMacroServerChoice { + Sysroot, + Explicit(AbsPathBuf, Vec), + None, +} + // Note: Since this function is used by external tools that use rust-analyzer as a library // what otherwise would be `pub(crate)` has to be `pub` here instead. pub fn load_workspace_at( @@ -59,15 +66,17 @@ pub fn load_workspace( Box::new(loader) }; - let proc_macro_client = if load_config.with_proc_macro { - let (server_path, args): (_, &[_]) = match ws.find_sysroot_proc_macro_srv() { - Some(server_path) => (server_path, &[]), - None => (AbsPathBuf::assert(std::env::current_exe()?), &["proc-macro"]), - }; - - ProcMacroServer::spawn(server_path, args).map_err(|e| e.to_string()) - } else { - Err("proc macro server disabled".to_owned()) + let proc_macro_client = match &load_config.with_proc_macro_server { + ProcMacroServerChoice::Sysroot => ws + .find_sysroot_proc_macro_srv() + .ok_or_else(|| "failed to find sysroot proc-macro server".to_owned()) + .and_then(|it| { + ProcMacroServer::spawn(it, identity::<&[&str]>(&[])).map_err(|e| e.to_string()) + }), + ProcMacroServerChoice::Explicit(path, args) => { + ProcMacroServer::spawn(path.clone(), args).map_err(|e| e.to_string()) + } + ProcMacroServerChoice::None => Err("proc macro server disabled".to_owned()), }; let crate_graph = ws.to_crate_graph( @@ -157,7 +166,7 @@ mod tests { let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: false, - with_proc_macro: false, + with_proc_macro_server: ProcMacroServerChoice::None, prefill_caches: false, }; let (host, _vfs, _proc_macro) = diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index af8356d041f83..60a7f99ccdb85 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -15,6 +15,7 @@ use lsp_types::{self, lsif}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; use vfs::{AbsPathBuf, Vfs}; +use crate::cli::load_cargo::ProcMacroServerChoice; use crate::cli::{ flags, load_cargo::{load_workspace, LoadCargoConfig}, @@ -291,7 +292,7 @@ impl flags::Lsif { let no_progress = &|_| (); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, - with_proc_macro: true, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: false, }; let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path)); diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index b050d1e95ac1f..9a04fbea7747b 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -5,7 +5,10 @@ use std::{ time::Instant, }; -use crate::line_index::{LineEndings, LineIndex, PositionEncoding}; +use crate::{ + cli::load_cargo::ProcMacroServerChoice, + line_index::{LineEndings, LineIndex, PositionEncoding}, +}; use hir::Name; use ide::{ LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId, @@ -31,7 +34,7 @@ impl flags::Scip { let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}")); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, - with_proc_macro: true, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: true, }; let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path)); diff --git a/crates/rust-analyzer/src/cli/ssr.rs b/crates/rust-analyzer/src/cli/ssr.rs index 84c48917167b6..3552f840a1b76 100644 --- a/crates/rust-analyzer/src/cli/ssr.rs +++ b/crates/rust-analyzer/src/cli/ssr.rs @@ -5,7 +5,7 @@ use project_model::CargoConfig; use crate::cli::{ flags, - load_cargo::{load_workspace_at, LoadCargoConfig}, + load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}, Result, }; @@ -15,7 +15,7 @@ impl flags::Ssr { let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, - with_proc_macro: true, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: false, }; let (host, vfs, _proc_macro) = load_workspace_at( @@ -51,7 +51,7 @@ impl flags::Search { let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, - with_proc_macro: true, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, prefill_caches: false, }; let (host, _vfs, _proc_macro) = load_workspace_at( diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index 405d261db6fb4..a6810e61499c3 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -21,7 +21,7 @@ use project_model::CargoConfig; use test_utils::project_root; use vfs::{AbsPathBuf, VfsPath}; -use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig}; +use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; #[test] fn integrated_highlighting_benchmark() { @@ -36,7 +36,7 @@ fn integrated_highlighting_benchmark() { let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, - with_proc_macro: false, + with_proc_macro_server: ProcMacroServerChoice::None, prefill_caches: false, }; @@ -90,7 +90,7 @@ fn integrated_completion_benchmark() { let cargo_config = CargoConfig::default(); let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, - with_proc_macro: false, + with_proc_macro_server: ProcMacroServerChoice::None, prefill_caches: true, }; From 4788c7a0569107372d78e1a1619a644ae5ac72bc Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 8 Feb 2023 11:39:20 +0100 Subject: [PATCH 322/501] Bump `rustc-ap-rustc_lexer` --- Cargo.lock | 4 ++-- crates/parser/Cargo.toml | 2 +- crates/parser/src/lexed_str.rs | 5 +++++ crates/syntax/Cargo.toml | 2 +- 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9cc34a876dc70..ef0316f30fb93 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1494,9 +1494,9 @@ dependencies = [ [[package]] name = "rustc-ap-rustc_lexer" -version = "725.0.0" +version = "727.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e" +checksum = "8f40f26e7abdcd3b982f36c09a634cc6187988fbf6ec466c91f8d30a12ac0237" dependencies = [ "unicode-xid", ] diff --git a/crates/parser/Cargo.toml b/crates/parser/Cargo.toml index ab48b1392a968..08359133f1aa7 100644 --- a/crates/parser/Cargo.toml +++ b/crates/parser/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] drop_bomb = "0.1.5" -rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } +rustc_lexer = { version = "727.0.0", package = "rustc-ap-rustc_lexer" } limit.workspace = true diff --git a/crates/parser/src/lexed_str.rs b/crates/parser/src/lexed_str.rs index b48921f19177f..100deff462d25 100644 --- a/crates/parser/src/lexed_str.rs +++ b/crates/parser/src/lexed_str.rs @@ -82,6 +82,7 @@ impl<'a> LexedStr<'a> { pub fn text(&self, i: usize) -> &str { self.range_text(i..i + 1) } + pub fn range_text(&self, r: ops::Range) -> &str { assert!(r.start < r.end && r.end <= self.len()); let lo = self.start[r.start] as usize; @@ -216,6 +217,10 @@ impl<'a> Converter<'a> { rustc_lexer::TokenKind::Caret => T![^], rustc_lexer::TokenKind::Percent => T![%], rustc_lexer::TokenKind::Unknown => ERROR, + rustc_lexer::TokenKind::UnknownPrefix => { + err = "unknown literal prefix"; + IDENT + } } }; diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 1a845a6ee713c..8fc493a23f5e7 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -16,7 +16,7 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.5" rowan = "0.15.10" -rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } +rustc_lexer = { version = "727.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" once_cell = "1.17.0" indexmap = "1.9.1" From 5e6208b1dfdca994420f1d3b9b964ed2bc08b0b3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 8 Feb 2023 12:03:11 +0100 Subject: [PATCH 323/501] fix: Don't insert a semicolon when typing = if parse errors are encountered --- crates/ide/src/typing.rs | 51 ++++++++++++++++++++++++---------------- 1 file changed, 31 insertions(+), 20 deletions(-) diff --git a/crates/ide/src/typing.rs b/crates/ide/src/typing.rs index c265487562508..c7e403f6b1ab9 100644 --- a/crates/ide/src/typing.rs +++ b/crates/ide/src/typing.rs @@ -253,6 +253,10 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option { if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') { return None; } + // Good indicator that we will insert into a bad spot, so bail out. + if expr.syntax().descendants().any(|it| it.kind() == SyntaxKind::ERROR) { + return None; + } let offset = let_stmt.syntax().text_range().end(); Some(TextEdit::insert(offset, ";".to_string())) } @@ -407,15 +411,14 @@ mod tests { #[test] fn test_semi_after_let() { - // do_check(r" - // fn foo() { - // let foo =$0 - // } - // ", r" - // fn foo() { - // let foo =; - // } - // "); + type_char_noop( + '=', + r" +fn foo() { + let foo =$0 +} +", + ); type_char( '=', r#" @@ -429,17 +432,25 @@ fn foo() { } "#, ); - // do_check(r" - // fn foo() { - // let foo =$0 - // let bar = 1; - // } - // ", r" - // fn foo() { - // let foo =; - // let bar = 1; - // } - // "); + type_char_noop( + '=', + r#" +fn foo() { + let difference $0(counts: &HashMap<(char, char), u64>, last: char) -> u64 { + // ... + } +} +"#, + ); + type_char_noop( + '=', + r" +fn foo() { + let foo =$0 + let bar = 1; +} +", + ); } #[test] From f8f1cb93e0afefb5613494c95834f5cf0b9a4357 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 8 Feb 2023 12:57:08 +0100 Subject: [PATCH 324/501] fix: Implement Expactation::Castable and add a test case for it --- crates/hir-ty/src/infer.rs | 14 ++++++++------ crates/hir-ty/src/infer/expr.rs | 4 ++-- crates/hir-ty/src/tests/simple.rs | 31 +++++++++++++++++++++++++++++++ 3 files changed, 41 insertions(+), 8 deletions(-) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index a76d33c0cded3..43a24c7136dac 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -1024,7 +1024,7 @@ impl<'a> InferenceContext<'a> { pub(crate) enum Expectation { None, HasType(Ty), - // Castable(Ty), // rustc has this, we currently just don't propagate an expectation for casts + Castable(Ty), RValueLikeUnsized(Ty), } @@ -1077,6 +1077,7 @@ impl Expectation { match self { Expectation::None => Expectation::None, Expectation::HasType(t) => Expectation::HasType(table.resolve_ty_shallow(t)), + Expectation::Castable(t) => Expectation::Castable(table.resolve_ty_shallow(t)), Expectation::RValueLikeUnsized(t) => { Expectation::RValueLikeUnsized(table.resolve_ty_shallow(t)) } @@ -1086,17 +1087,18 @@ impl Expectation { fn to_option(&self, table: &mut unify::InferenceTable<'_>) -> Option { match self.resolve(table) { Expectation::None => None, - Expectation::HasType(t) | - // Expectation::Castable(t) | - Expectation::RValueLikeUnsized(t) => Some(t), + Expectation::HasType(t) + | Expectation::Castable(t) + | Expectation::RValueLikeUnsized(t) => Some(t), } } fn only_has_type(&self, table: &mut unify::InferenceTable<'_>) -> Option { match self { Expectation::HasType(t) => Some(table.resolve_ty_shallow(t)), - // Expectation::Castable(_) | - Expectation::RValueLikeUnsized(_) | Expectation::None => None, + Expectation::Castable(_) | Expectation::RValueLikeUnsized(_) | Expectation::None => { + None + } } } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 7b9bf0c5cf3de..f0655291b8bba 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -610,9 +610,9 @@ impl<'a> InferenceContext<'a> { } } Expr::Cast { expr, type_ref } => { - // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary) - let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); let cast_ty = self.make_ty(type_ref); + let _inner_ty = + self.infer_expr_inner(*expr, &Expectation::Castable(cast_ty.clone())); // FIXME check the cast... cast_ty } diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 146145523b242..6f6b334c9476a 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3200,3 +3200,34 @@ fn func() { "#, ); } +#[test] +fn castable_to() { + check_infer( + r#" +//- minicore: sized +#[lang = "owned_box"] +pub struct Box { + inner: *mut T, +} +impl Box { + fn new(t: T) -> Self { loop {} } +} + +fn func() { + let x = Box::new([]) as Box<[i32; 0]>; +} +"#, + expect![[r#" + 99..100 't': T + 113..124 '{ loop {} }': Box + 115..122 'loop {}': ! + 120..122 '{}': () + 138..184 '{ ...0]>; }': () + 148..149 'x': Box<[i32; 0]> + 152..160 'Box::new': fn new<[i32; 0]>([i32; 0]) -> Box<[i32; 0]> + 152..164 'Box::new([])': Box<[i32; 0]> + 152..181 'Box::n...2; 0]>': Box<[i32; 0]> + 161..163 '[]': [i32; 0] + "#]], + ); +} From 62edacf738ca4bf9ad0e56874e460f00ca065103 Mon Sep 17 00:00:00 2001 From: KaDiWa Date: Wed, 8 Feb 2023 19:19:23 +0100 Subject: [PATCH 325/501] bootstrap.py: fix build-failure message --- src/bootstrap/bootstrap.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 5b19a658fb543..0c896733a26d1 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -935,6 +935,7 @@ def main(): ) exit_code = 0 + success_word = "successfully" try: bootstrap(args) except (SystemExit, KeyboardInterrupt) as error: @@ -943,9 +944,10 @@ def main(): else: exit_code = 1 print(error) + success_word = "unsuccessfully" if not help_triggered: - print("Build completed successfully in", format_build_time(time() - start_time)) + print("Build completed", success_word, "in", format_build_time(time() - start_time)) sys.exit(exit_code) From 38ec810c37439a6a3742d85b950978d904b4f826 Mon Sep 17 00:00:00 2001 From: Alan Egerton Date: Wed, 8 Feb 2023 19:55:50 +0000 Subject: [PATCH 326/501] Do not assemble supertraits for trait aliases --- compiler/rustc_hir_typeck/src/method/probe.rs | 48 ++++++++++++------- ...ssue-107747-do-not-assemble-supertraits.rs | 21 ++++++++ 2 files changed, 52 insertions(+), 17 deletions(-) create mode 100644 tests/ui/traits/alias/issue-107747-do-not-assemble-supertraits.rs diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index 9ab29a6778fc9..d6b054d9626c3 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -951,24 +951,38 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { let trait_ref = self.tcx.mk_trait_ref(trait_def_id, trait_substs); if self.tcx.is_trait_alias(trait_def_id) { - // For trait aliases, assume all supertraits are relevant. - let bounds = iter::once(ty::Binder::dummy(trait_ref)); - self.elaborate_bounds(bounds, |this, new_trait_ref, item| { - let new_trait_ref = this.erase_late_bound_regions(new_trait_ref); + // For trait aliases, recursively assume all explicitly named traits are relevant + for expansion in traits::expand_trait_aliases( + self.tcx, + iter::once((ty::Binder::dummy(trait_ref), self.span)), + ) { + let bound_trait_ref = expansion.trait_ref(); + for item in self.impl_or_trait_item(bound_trait_ref.def_id()) { + if !self.has_applicable_self(&item) { + self.record_static_candidate(CandidateSource::Trait( + bound_trait_ref.def_id(), + )); + } else { + let new_trait_ref = self.erase_late_bound_regions(bound_trait_ref); - let (xform_self_ty, xform_ret_ty) = - this.xform_self_ty(&item, new_trait_ref.self_ty(), new_trait_ref.substs); - this.push_candidate( - Candidate { - xform_self_ty, - xform_ret_ty, - item, - import_ids: import_ids.clone(), - kind: TraitCandidate(new_trait_ref), - }, - false, - ); - }); + let (xform_self_ty, xform_ret_ty) = self.xform_self_ty( + &item, + new_trait_ref.self_ty(), + new_trait_ref.substs, + ); + self.push_candidate( + Candidate { + xform_self_ty, + xform_ret_ty, + item, + import_ids: import_ids.clone(), + kind: TraitCandidate(new_trait_ref), + }, + false, + ); + } + } + } } else { debug_assert!(self.tcx.is_trait(trait_def_id)); if self.tcx.trait_is_auto(trait_def_id) { diff --git a/tests/ui/traits/alias/issue-107747-do-not-assemble-supertraits.rs b/tests/ui/traits/alias/issue-107747-do-not-assemble-supertraits.rs new file mode 100644 index 0000000000000..9b41a8096c4e5 --- /dev/null +++ b/tests/ui/traits/alias/issue-107747-do-not-assemble-supertraits.rs @@ -0,0 +1,21 @@ +// Regression test for #107747: methods from trait alias supertraits were brought into scope +// +// check-pass + +#![feature(trait_alias)] + +use std::fmt; + +trait Foo: fmt::Debug {} +trait Bar = Foo; + +#[derive(Debug)] +struct Qux(bool); + +impl fmt::Display for Qux { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +fn main() {} From 8f736a40426bf0182b92637e9c221da12745a180 Mon Sep 17 00:00:00 2001 From: yukang Date: Mon, 28 Nov 2022 06:49:18 +0800 Subject: [PATCH 327/501] fix #104961, Add parentheses properly for borrowing suggestion --- .../src/traits/error_reporting/suggestions.rs | 42 +++++++++++++++---- tests/ui/suggestions/issue-104961.fixed | 16 +++++++ tests/ui/suggestions/issue-104961.rs | 16 +++++++ tests/ui/suggestions/issue-104961.stderr | 37 ++++++++++++++++ 4 files changed, 104 insertions(+), 7 deletions(-) create mode 100644 tests/ui/suggestions/issue-104961.fixed create mode 100644 tests/ui/suggestions/issue-104961.rs create mode 100644 tests/ui/suggestions/issue-104961.stderr diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 91da690a00056..8ece53dd05cba 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -19,6 +19,7 @@ use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::Visitor; +use rustc_hir::is_range_literal; use rustc_hir::lang_items::LangItem; use rustc_hir::{AsyncGeneratorKind, GeneratorKind, Node}; use rustc_hir::{Expr, HirId}; @@ -1350,14 +1351,41 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { Applicability::MaybeIncorrect, ); } else { + // Issue #104961, we need to add parentheses properly for compond expressions + // for example, `x.starts_with("hi".to_string() + "you")` + // should be `x.starts_with(&("hi".to_string() + "you"))` + let Some(body_id) = self.tcx.hir().maybe_body_owned_by(obligation.cause.body_id) else { return false; }; + let body = self.tcx.hir().body(body_id); + let mut expr_finder = FindExprBySpan::new(span); + expr_finder.visit_expr(body.value); + let Some(expr) = expr_finder.result else { return false; }; + let needs_parens = match expr.kind { + // parenthesize if needed (Issue #46756) + hir::ExprKind::Cast(_, _) | hir::ExprKind::Binary(_, _, _) => true, + // parenthesize borrows of range literals (Issue #54505) + _ if is_range_literal(expr) => true, + _ => false, + }; + let is_mut = mut_ref_self_ty_satisfies_pred || ref_inner_ty_mut; - err.span_suggestion_verbose( - span.shrink_to_lo(), - &format!( - "consider{} borrowing here", - if is_mut { " mutably" } else { "" } - ), - format!("&{}", if is_mut { "mut " } else { "" }), + let span = if needs_parens { span } else { span.shrink_to_lo() }; + let sugg_prefix = format!("&{}", if is_mut { "mut " } else { "" }); + let sugg_msg = &format!( + "consider{} borrowing here", + if is_mut { " mutably" } else { "" } + ); + + let suggestions = if !needs_parens { + vec![(span.shrink_to_lo(), format!("{}", sugg_prefix))] + } else { + vec![ + (span.shrink_to_lo(), format!("{}(", sugg_prefix)), + (span.shrink_to_hi(), ")".to_string()), + ] + }; + err.multipart_suggestion_verbose( + sugg_msg, + suggestions, Applicability::MaybeIncorrect, ); } diff --git a/tests/ui/suggestions/issue-104961.fixed b/tests/ui/suggestions/issue-104961.fixed new file mode 100644 index 0000000000000..520d638b1748f --- /dev/null +++ b/tests/ui/suggestions/issue-104961.fixed @@ -0,0 +1,16 @@ +// run-rustfix + +fn foo(x: &str) -> bool { + x.starts_with(&("hi".to_string() + " you")) + //~^ ERROR expected a `FnMut<(char,)>` closure, found `String` +} + +fn foo2(x: &str) -> bool { + x.starts_with(&"hi".to_string()) + //~^ ERROR expected a `FnMut<(char,)>` closure, found `String` +} + +fn main() { + foo("hi you"); + foo2("hi"); +} diff --git a/tests/ui/suggestions/issue-104961.rs b/tests/ui/suggestions/issue-104961.rs new file mode 100644 index 0000000000000..aeb787abb6fc8 --- /dev/null +++ b/tests/ui/suggestions/issue-104961.rs @@ -0,0 +1,16 @@ +// run-rustfix + +fn foo(x: &str) -> bool { + x.starts_with("hi".to_string() + " you") + //~^ ERROR expected a `FnMut<(char,)>` closure, found `String` +} + +fn foo2(x: &str) -> bool { + x.starts_with("hi".to_string()) + //~^ ERROR expected a `FnMut<(char,)>` closure, found `String` +} + +fn main() { + foo("hi you"); + foo2("hi"); +} diff --git a/tests/ui/suggestions/issue-104961.stderr b/tests/ui/suggestions/issue-104961.stderr new file mode 100644 index 0000000000000..8cec6a3f8270a --- /dev/null +++ b/tests/ui/suggestions/issue-104961.stderr @@ -0,0 +1,37 @@ +error[E0277]: expected a `FnMut<(char,)>` closure, found `String` + --> $DIR/issue-104961.rs:4:19 + | +LL | x.starts_with("hi".to_string() + " you") + | ----------- ^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Pattern<'_>` is not implemented for `String` + | | + | required by a bound introduced by this call + | + = note: the trait bound `String: Pattern<'_>` is not satisfied + = note: required for `String` to implement `Pattern<'_>` +note: required by a bound in `core::str::::starts_with` + --> $SRC_DIR/core/src/str/mod.rs:LL:COL +help: consider borrowing here + | +LL | x.starts_with(&("hi".to_string() + " you")) + | ++ + + +error[E0277]: expected a `FnMut<(char,)>` closure, found `String` + --> $DIR/issue-104961.rs:9:19 + | +LL | x.starts_with("hi".to_string()) + | ----------- ^^^^^^^^^^^^^^^^ the trait `Pattern<'_>` is not implemented for `String` + | | + | required by a bound introduced by this call + | + = note: the trait bound `String: Pattern<'_>` is not satisfied + = note: required for `String` to implement `Pattern<'_>` +note: required by a bound in `core::str::::starts_with` + --> $SRC_DIR/core/src/str/mod.rs:LL:COL +help: consider borrowing here + | +LL | x.starts_with(&"hi".to_string()) + | + + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0277`. From 0e60df9ed1439cb9d7bcc1a09bf2fc87d03393b1 Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sat, 24 Sep 2022 20:02:44 +0900 Subject: [PATCH 328/501] Parse "-Z instrument-xray" codegen option Recognize all bells and whistles that LLVM's XRay pass is capable of. The always/never settings are a bit dumb without attributes but they're still there. The default instruction count is chosen by the compiler, not LLVM pass. We'll do it later. --- compiler/rustc_interface/src/tests.rs | 2 + compiler/rustc_session/src/config.rs | 26 ++++++- compiler/rustc_session/src/options.rs | 73 +++++++++++++++++++ .../src/compiler-flags/instrument-xray.md | 39 ++++++++++ tests/rustdoc-ui/z-help.stdout | 9 +++ 5 files changed, 146 insertions(+), 3 deletions(-) create mode 100644 src/doc/unstable-book/src/compiler-flags/instrument-xray.md diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index 52a4e0e74181f..5daefadabba02 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -5,6 +5,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig}; use rustc_session::config::rustc_optgroups; use rustc_session::config::Input; +use rustc_session::config::InstrumentXRay; use rustc_session::config::TraitSolver; use rustc_session::config::{build_configuration, build_session_options, to_crate_config}; use rustc_session::config::{ @@ -755,6 +756,7 @@ fn test_unstable_options_tracking_hash() { tracked!(inline_mir_threshold, Some(123)); tracked!(instrument_coverage, Some(InstrumentCoverage::All)); tracked!(instrument_mcount, true); + tracked!(instrument_xray, Some(InstrumentXRay::default())); tracked!(link_only, true); tracked!(llvm_plugins, vec![String::from("plugin_name")]); tracked!(location_detail, LocationDetail { file: true, line: false, column: false }); diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 973d860118ef1..7d2fdf94baa36 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -174,6 +174,25 @@ pub enum InstrumentCoverage { Off, } +/// Settings for `-Z instrument-xray` flag. +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash)] +pub struct InstrumentXRay { + /// `-Z instrument-xray=always`, force instrumentation + pub always: bool, + /// `-Z instrument-xray=never`, disable instrumentation + pub never: bool, + /// `-Z instrument-xray=ignore-loops`, ignore presence of loops, + /// instrument functions based only on instruction count + pub ignore_loops: bool, + /// `-Z instrument-xray=instruction-threshold=N`, explicitly set instruction threshold + /// for instrumentation, or `None` to use compiler's default + pub instruction_threshold: Option, + /// `-Z instrument-xray=skip-entry`, do not instrument function entry + pub skip_entry: bool, + /// `-Z instrument-xray=skip-exit`, do not instrument function exit + pub skip_exit: bool, +} + #[derive(Clone, PartialEq, Hash, Debug)] pub enum LinkerPluginLto { LinkerPlugin(PathBuf), @@ -2805,9 +2824,9 @@ impl PpMode { pub(crate) mod dep_tracking { use super::{ BranchProtection, CFGuard, CFProtection, CrateType, DebugInfo, ErrorOutputType, - InstrumentCoverage, LdImpl, LinkerPluginLto, LocationDetail, LtoCli, OomStrategy, OptLevel, - OutputType, OutputTypes, Passes, SourceFileHashAlgorithm, SplitDwarfKind, - SwitchWithOptPath, SymbolManglingVersion, TraitSolver, TrimmedDefPaths, + InstrumentCoverage, InstrumentXRay, LdImpl, LinkerPluginLto, LocationDetail, LtoCli, + OomStrategy, OptLevel, OutputType, OutputTypes, Passes, SourceFileHashAlgorithm, + SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion, TraitSolver, TrimmedDefPaths, }; use crate::lint; use crate::options::WasiExecModel; @@ -2876,6 +2895,7 @@ pub(crate) mod dep_tracking { CodeModel, TlsModel, InstrumentCoverage, + InstrumentXRay, CrateType, MergeFunctions, PanicStrategy, diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 0db4d85ff4b67..a0a8a42575ef3 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -380,6 +380,7 @@ mod desc { pub const parse_dump_mono_stats: &str = "`markdown` (default) or `json`"; pub const parse_instrument_coverage: &str = "`all` (default), `except-unused-generics`, `except-unused-functions`, or `off`"; + pub const parse_instrument_xray: &str = "either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit`"; pub const parse_unpretty: &str = "`string` or `string=string`"; pub const parse_treat_err_as_bug: &str = "either no value or a number bigger than 0"; pub const parse_trait_solver: &str = @@ -869,6 +870,68 @@ mod parse { true } + pub(crate) fn parse_instrument_xray( + slot: &mut Option, + v: Option<&str>, + ) -> bool { + if v.is_some() { + let mut bool_arg = None; + if parse_opt_bool(&mut bool_arg, v) { + *slot = if bool_arg.unwrap() { Some(InstrumentXRay::default()) } else { None }; + return true; + } + } + + let mut options = slot.get_or_insert_default(); + let mut seen_always = false; + let mut seen_never = false; + let mut seen_ignore_loops = false; + let mut seen_instruction_threshold = false; + let mut seen_skip_entry = false; + let mut seen_skip_exit = false; + for option in v.into_iter().map(|v| v.split(',')).flatten() { + match option { + "always" if !seen_always && !seen_never => { + options.always = true; + options.never = false; + seen_always = true; + } + "never" if !seen_never && !seen_always => { + options.never = true; + options.always = false; + seen_never = true; + } + "ignore-loops" if !seen_ignore_loops => { + options.ignore_loops = true; + seen_ignore_loops = true; + } + option + if option.starts_with("instruction-threshold") + && !seen_instruction_threshold => + { + let Some(("instruction-threshold", n)) = option.split_once('=') else { + return false; + }; + match n.parse() { + Ok(n) => options.instruction_threshold = Some(n), + Err(_) => return false, + } + seen_instruction_threshold = true; + } + "skip-entry" if !seen_skip_entry => { + options.skip_entry = true; + seen_skip_entry = true; + } + "skip-exit" if !seen_skip_exit => { + options.skip_exit = true; + seen_skip_exit = true; + } + _ => return false, + } + } + true + } + pub(crate) fn parse_treat_err_as_bug(slot: &mut Option, v: Option<&str>) -> bool { match v { Some(s) => { @@ -1397,6 +1460,16 @@ options! { `=off` (default)"), instrument_mcount: bool = (false, parse_bool, [TRACKED], "insert function instrument code for mcount-based tracing (default: no)"), + instrument_xray: Option = (None, parse_instrument_xray, [TRACKED], + "insert function instrument code for XRay-based tracing (default: no) + Optional extra settings: + `=always` + `=never` + `=ignore-loops` + `=instruction-threshold=N` + `=skip-entry` + `=skip-exit` + Multiple options can be combined with commas."), keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED], "keep hygiene data after analysis (default: no)"), layout_seed: Option = (None, parse_opt_number, [TRACKED], diff --git a/src/doc/unstable-book/src/compiler-flags/instrument-xray.md b/src/doc/unstable-book/src/compiler-flags/instrument-xray.md new file mode 100644 index 0000000000000..7fb33cd68b4a3 --- /dev/null +++ b/src/doc/unstable-book/src/compiler-flags/instrument-xray.md @@ -0,0 +1,39 @@ +# `instrument-xray` + +The tracking issue for this feature is: [#102921](https://github.com/rust-lang/rust/issues/102921). + +------------------------ + +Enable generation of NOP sleds for XRay function tracing instrumentation. +For more information on XRay, +read [LLVM documentation](https://llvm.org/docs/XRay.html), +and/or the [XRay whitepaper](http://research.google.com/pubs/pub45287.html). + +Set the `-Z instrument-xray` compiler flag in order to enable XRay instrumentation. + + - `-Z instrument-xray` – use the default settings + - `-Z instrument-xray=skip-exit` – configure a custom setting + - `-Z instrument-xray=ignore-loops,instruction-threshold=300` – + multiple settings separated by commas + +Supported options: + + - `always` – force instrumentation of all functions + - `never` – do no instrument any functions + - `ignore-loops` – ignore presence of loops, + instrument functions based only on instruction count + - `instruction-threshold=10` – set a different instruction threshold for instrumentation + - `skip-entry` – do no instrument function entry + - `skip-exit` – do no instrument function exit + +The default settings are: + + - instrument both entry & exit from functions + - instrument functions with at least 200 instructions, + or containing a non-trivial loop + +Note that `-Z instrument-xray` only enables generation of NOP sleds +which on their own don't do anything useful. +In order to actually trace the functions, +you will need to link a separate runtime library of your choice, +such as Clang's [XRay Runtime Library](https://www.llvm.org/docs/XRay.html#xray-runtime-library). diff --git a/tests/rustdoc-ui/z-help.stdout b/tests/rustdoc-ui/z-help.stdout index 4f07fca82d1eb..2169b89c9291e 100644 --- a/tests/rustdoc-ui/z-help.stdout +++ b/tests/rustdoc-ui/z-help.stdout @@ -70,6 +70,15 @@ `=except-unused-functions` `=off` (default) -Z instrument-mcount=val -- insert function instrument code for mcount-based tracing (default: no) + -Z instrument-xray=val -- insert function instrument code for XRay-based tracing (default: no) + Optional extra settings: + `=always` + `=never` + `=ignore-loops` + `=instruction-threshold=N` + `=skip-entry` + `=skip-exit` + Multiple options can be combined with commas. -Z keep-hygiene-data=val -- keep hygiene data after analysis (default: no) -Z layout-seed=val -- seed layout randomization -Z link-native-libraries=val -- link native libraries in the linker invocation (default: yes) From d748f085473a12fc6fdde3874a8f0f276c1657c6 Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sat, 24 Sep 2022 20:02:44 +0900 Subject: [PATCH 329/501] UI tests for -Z instrument-xray I'm tired of testing it manually, just codify my expectations in tests. They're pretty low-maintenance. --- src/tools/tidy/src/ui_tests.rs | 2 +- tests/ui/instrument-xray/flags-always-never-1.rs | 6 ++++++ tests/ui/instrument-xray/flags-always-never-1.stderr | 2 ++ tests/ui/instrument-xray/flags-always-never-2.rs | 8 ++++++++ tests/ui/instrument-xray/flags-basic.rs | 8 ++++++++ tests/ui/instrument-xray/flags-dupe-always.rs | 6 ++++++ tests/ui/instrument-xray/flags-dupe-always.stderr | 2 ++ tests/ui/instrument-xray/flags-dupe-ignore-loops.rs | 6 ++++++ tests/ui/instrument-xray/flags-dupe-ignore-loops.stderr | 2 ++ 9 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 tests/ui/instrument-xray/flags-always-never-1.rs create mode 100644 tests/ui/instrument-xray/flags-always-never-1.stderr create mode 100644 tests/ui/instrument-xray/flags-always-never-2.rs create mode 100644 tests/ui/instrument-xray/flags-basic.rs create mode 100644 tests/ui/instrument-xray/flags-dupe-always.rs create mode 100644 tests/ui/instrument-xray/flags-dupe-always.stderr create mode 100644 tests/ui/instrument-xray/flags-dupe-ignore-loops.rs create mode 100644 tests/ui/instrument-xray/flags-dupe-ignore-loops.stderr diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 83551a1d820ab..ef3abb9514f24 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -9,7 +9,7 @@ use std::path::Path; const ENTRY_LIMIT: usize = 1000; // FIXME: The following limits should be reduced eventually. -const ROOT_ENTRY_LIMIT: usize = 939; +const ROOT_ENTRY_LIMIT: usize = 940; const ISSUES_ENTRY_LIMIT: usize = 2001; fn check_entries(path: &Path, bad: &mut bool) { diff --git a/tests/ui/instrument-xray/flags-always-never-1.rs b/tests/ui/instrument-xray/flags-always-never-1.rs new file mode 100644 index 0000000000000..03274dedd0830 --- /dev/null +++ b/tests/ui/instrument-xray/flags-always-never-1.rs @@ -0,0 +1,6 @@ +// Checks that `-Z instrument-xray` does not allow `always` and `never` simultaneously. +// +// compile-flags: -Z instrument-xray=always,never +// error-pattern: incorrect value `always,never` for unstable option `instrument-xray` + +fn main() {} diff --git a/tests/ui/instrument-xray/flags-always-never-1.stderr b/tests/ui/instrument-xray/flags-always-never-1.stderr new file mode 100644 index 0000000000000..e211c6f602546 --- /dev/null +++ b/tests/ui/instrument-xray/flags-always-never-1.stderr @@ -0,0 +1,2 @@ +error: incorrect value `always,never` for unstable option `instrument-xray` - either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit` was expected + diff --git a/tests/ui/instrument-xray/flags-always-never-2.rs b/tests/ui/instrument-xray/flags-always-never-2.rs new file mode 100644 index 0000000000000..e752890b47ad0 --- /dev/null +++ b/tests/ui/instrument-xray/flags-always-never-2.rs @@ -0,0 +1,8 @@ +// Checks that `-Z instrument-xray` allows `always` and `never` sequentially. +// (The last specified setting wins, like `-Z instrument-xray=no` as well.) +// +// compile-flags: -Z instrument-xray=always +// compile-flags: -Z instrument-xray=never +// check-pass + +fn main() {} diff --git a/tests/ui/instrument-xray/flags-basic.rs b/tests/ui/instrument-xray/flags-basic.rs new file mode 100644 index 0000000000000..5889a20f670c8 --- /dev/null +++ b/tests/ui/instrument-xray/flags-basic.rs @@ -0,0 +1,8 @@ +// Verifies basic `-Z instrument-xray` flags. +// +// compile-flags: -Z instrument-xray +// compile-flags: -Z instrument-xray=skip-exit +// compile-flags: -Z instrument-xray=ignore-loops,instruction-threshold=300 +// check-pass + +fn main() {} diff --git a/tests/ui/instrument-xray/flags-dupe-always.rs b/tests/ui/instrument-xray/flags-dupe-always.rs new file mode 100644 index 0000000000000..36dda4bbd03f4 --- /dev/null +++ b/tests/ui/instrument-xray/flags-dupe-always.rs @@ -0,0 +1,6 @@ +// Checks that `-Z instrument-xray` does not allow duplicates. +// +// compile-flags: -Z instrument-xray=always,always +// error-pattern: incorrect value `always,always` for unstable option `instrument-xray` + +fn main() {} diff --git a/tests/ui/instrument-xray/flags-dupe-always.stderr b/tests/ui/instrument-xray/flags-dupe-always.stderr new file mode 100644 index 0000000000000..d1ac113fa4384 --- /dev/null +++ b/tests/ui/instrument-xray/flags-dupe-always.stderr @@ -0,0 +1,2 @@ +error: incorrect value `always,always` for unstable option `instrument-xray` - either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit` was expected + diff --git a/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs b/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs new file mode 100644 index 0000000000000..227f8557f42ad --- /dev/null +++ b/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs @@ -0,0 +1,6 @@ +// Checks that `-Z instrument-xray` does not allow duplicates. +// +// compile-flags: -Z instrument-xray=ignore-loops,ignore-loops +// error-pattern: incorrect value `ignore-loops,ignore-loops` for unstable option `instrument-xray` + +fn main() {} diff --git a/tests/ui/instrument-xray/flags-dupe-ignore-loops.stderr b/tests/ui/instrument-xray/flags-dupe-ignore-loops.stderr new file mode 100644 index 0000000000000..52f6b33075bc1 --- /dev/null +++ b/tests/ui/instrument-xray/flags-dupe-ignore-loops.stderr @@ -0,0 +1,2 @@ +error: incorrect value `ignore-loops,ignore-loops` for unstable option `instrument-xray` - either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit` was expected + From b3cadd2dcfe37836cdbfd6c5871bd8398748ac3f Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sat, 24 Sep 2022 21:54:47 +0900 Subject: [PATCH 330/501] Allow multiple instrumentation attributes Four because that's the new reasonable maximum for XRay instrumentation attributes in the following commit. --- compiler/rustc_codegen_llvm/src/attributes.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index 54ac7a46cf2f3..0494bd7a19be9 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -118,7 +118,8 @@ pub fn frame_pointer_type_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attr /// Tell LLVM what instrument function to insert. #[inline] -fn instrument_function_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attribute> { +fn instrument_function_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 4]> { + let mut attrs = SmallVec::new(); if cx.sess().opts.unstable_opts.instrument_mcount { // Similar to `clang -pg` behavior. Handled by the // `post-inline-ee-instrument` LLVM pass. @@ -127,14 +128,13 @@ fn instrument_function_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attribu // See test/CodeGen/mcount.c in clang. let mcount_name = cx.sess().target.mcount.as_ref(); - Some(llvm::CreateAttrStringValue( + attrs.push(llvm::CreateAttrStringValue( cx.llcx, "instrument-function-entry-inlined", &mcount_name, - )) - } else { - None + )); } + attrs } fn nojumptables_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attribute> { From bac15db1d0f3ccdb9b9f61ba808cd25fbf400e88 Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sat, 24 Sep 2022 22:05:25 +0900 Subject: [PATCH 331/501] Emit basic XRay instrumentation attributes Add the attributes to functions according to the settings. "xray-always" overrides "xray-never", and they both override "xray-ignore-loops" and "xray-instruction-threshold", but we'll let lints deal with warnings about silly attribute combinations. --- compiler/rustc_codegen_llvm/src/attributes.rs | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs index 0494bd7a19be9..7a4ec494c8e7f 100644 --- a/compiler/rustc_codegen_llvm/src/attributes.rs +++ b/compiler/rustc_codegen_llvm/src/attributes.rs @@ -134,6 +134,34 @@ fn instrument_function_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attr &mcount_name, )); } + if let Some(options) = &cx.sess().opts.unstable_opts.instrument_xray { + // XRay instrumentation is similar to __cyg_profile_func_{enter,exit}. + // Function prologue and epilogue are instrumented with NOP sleds, + // a runtime library later replaces them with detours into tracing code. + if options.always { + attrs.push(llvm::CreateAttrStringValue(cx.llcx, "function-instrument", "xray-always")); + } + if options.never { + attrs.push(llvm::CreateAttrStringValue(cx.llcx, "function-instrument", "xray-never")); + } + if options.ignore_loops { + attrs.push(llvm::CreateAttrString(cx.llcx, "xray-ignore-loops")); + } + // LLVM will not choose the default for us, but rather requires specific + // threshold in absence of "xray-always". Use the same default as Clang. + let threshold = options.instruction_threshold.unwrap_or(200); + attrs.push(llvm::CreateAttrStringValue( + cx.llcx, + "xray-instruction-threshold", + &threshold.to_string(), + )); + if options.skip_entry { + attrs.push(llvm::CreateAttrString(cx.llcx, "xray-skip-entry")); + } + if options.skip_exit { + attrs.push(llvm::CreateAttrString(cx.llcx, "xray-skip-exit")); + } + } attrs } From 0fef658ffeb0fdc858d6c111a992b570ed73c951 Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sun, 2 Oct 2022 10:25:49 +0900 Subject: [PATCH 332/501] Codegen tests for -Z instrument-xray Let's add at least some tests to verify that this option is accepted and produces expected LLVM attributes. More tests can be added later with attribute support. --- tests/codegen/instrument-xray/basic.rs | 8 ++++++++ tests/codegen/instrument-xray/options-combine.rs | 11 +++++++++++ tests/codegen/instrument-xray/options-override.rs | 10 ++++++++++ 3 files changed, 29 insertions(+) create mode 100644 tests/codegen/instrument-xray/basic.rs create mode 100644 tests/codegen/instrument-xray/options-combine.rs create mode 100644 tests/codegen/instrument-xray/options-override.rs diff --git a/tests/codegen/instrument-xray/basic.rs b/tests/codegen/instrument-xray/basic.rs new file mode 100644 index 0000000000000..9c128767747e7 --- /dev/null +++ b/tests/codegen/instrument-xray/basic.rs @@ -0,0 +1,8 @@ +// Checks that `-Z instrument-xray` produces expected instrumentation. +// +// compile-flags: -Z instrument-xray=always + +#![crate_type = "lib"] + +// CHECK: attributes #{{.*}} "function-instrument"="xray-always" +pub fn function() {} diff --git a/tests/codegen/instrument-xray/options-combine.rs b/tests/codegen/instrument-xray/options-combine.rs new file mode 100644 index 0000000000000..0c1992318f576 --- /dev/null +++ b/tests/codegen/instrument-xray/options-combine.rs @@ -0,0 +1,11 @@ +// Checks that `-Z instrument-xray` options can be specified multiple times. +// +// compile-flags: -Z instrument-xray=skip-exit +// compile-flags: -Z instrument-xray=instruction-threshold=123 +// compile-flags: -Z instrument-xray=instruction-threshold=456 + +#![crate_type = "lib"] + +// CHECK: attributes #{{.*}} "xray-instruction-threshold"="456" "xray-skip-exit" +// CHECK-NOT: attributes #{{.*}} "xray-instruction-threshold"="123" +pub fn function() {} diff --git a/tests/codegen/instrument-xray/options-override.rs b/tests/codegen/instrument-xray/options-override.rs new file mode 100644 index 0000000000000..3a7c37f9006be --- /dev/null +++ b/tests/codegen/instrument-xray/options-override.rs @@ -0,0 +1,10 @@ +// Checks that the last `-Z instrument-xray` option wins. +// +// compile-flags: -Z instrument-xray=always +// compile-flags: -Z instrument-xray=never + +#![crate_type = "lib"] + +// CHECK: attributes #{{.*}} "function-instrument"="xray-never" +// CHECK-NOT: attributes #{{.*}} "function-instrument"="xray-always" +pub fn function() {} From 8e49c847400b0ec47d501a72e3c0ad95f81b5fe9 Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sun, 2 Oct 2022 10:45:54 +0900 Subject: [PATCH 333/501] XRay support flag in TargetOptions Specify where XRay is supported. I only test ARM64 and x86_64, but hey those others should work too, right? LLVM documentation says that MIPS and PPC are also supported, but I don't have the hardware, so I won't pretend. Naturally, more targets can be added later with more testing. --- compiler/rustc_target/src/spec/aarch64_linux_android.rs | 1 + compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs | 1 + .../rustc_target/src/spec/aarch64_unknown_linux_musl.rs | 1 + compiler/rustc_target/src/spec/mod.rs | 6 ++++++ compiler/rustc_target/src/spec/x86_64_linux_android.rs | 1 + compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs | 1 + compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs | 1 + compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs | 1 + compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs | 1 + compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs | 1 + 10 files changed, 15 insertions(+) diff --git a/compiler/rustc_target/src/spec/aarch64_linux_android.rs b/compiler/rustc_target/src/spec/aarch64_linux_android.rs index c85f7f62a4239..daa946ccd519c 100644 --- a/compiler/rustc_target/src/spec/aarch64_linux_android.rs +++ b/compiler/rustc_target/src/spec/aarch64_linux_android.rs @@ -19,6 +19,7 @@ pub fn target() -> Target { | SanitizerSet::MEMTAG | SanitizerSet::SHADOWCALLSTACK | SanitizerSet::ADDRESS, + supports_xray: true, ..super::android_base::opts() }, } diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs index 3006044d54a6e..36d54f1d7cc5c 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs @@ -17,6 +17,7 @@ pub fn target() -> Target { | SanitizerSet::MEMTAG | SanitizerSet::THREAD | SanitizerSet::HWADDRESS, + supports_xray: true, ..super::linux_gnu_base::opts() }, } diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs index 002d0dac2a668..9c299fed6be16 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs @@ -3,6 +3,7 @@ use crate::spec::{Target, TargetOptions}; pub fn target() -> Target { let mut base = super::linux_musl_base::opts(); base.max_atomic_width = Some(128); + base.supports_xray = true; Target { llvm_target: "aarch64-unknown-linux-musl".into(), diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index a094c2c545269..bc1920e34249a 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -1718,6 +1718,9 @@ pub struct TargetOptions { /// The ABI of entry function. /// Default value is `Conv::C`, i.e. C call convention pub entry_abi: Conv, + + /// Whether the target supports XRay instrumentation. + pub supports_xray: bool, } /// Add arguments for the given flavor and also for its "twin" flavors @@ -1937,6 +1940,7 @@ impl Default for TargetOptions { supports_stack_protector: true, entry_name: "main".into(), entry_abi: Conv::C, + supports_xray: false, } } } @@ -2592,6 +2596,7 @@ impl Target { key!(supports_stack_protector, bool); key!(entry_name); key!(entry_abi, Conv)?; + key!(supports_xray, bool); if base.is_builtin { // This can cause unfortunate ICEs later down the line. @@ -2845,6 +2850,7 @@ impl ToJson for Target { target_option_val!(supports_stack_protector); target_option_val!(entry_name); target_option_val!(entry_abi); + target_option_val!(supports_xray); if let Some(abi) = self.default_adjusted_cabi { d.insert("default-adjusted-cabi".into(), Abi::name(abi).to_json()); diff --git a/compiler/rustc_target/src/spec/x86_64_linux_android.rs b/compiler/rustc_target/src/spec/x86_64_linux_android.rs index 9c9137848550f..a3bdb5f5465b0 100644 --- a/compiler/rustc_target/src/spec/x86_64_linux_android.rs +++ b/compiler/rustc_target/src/spec/x86_64_linux_android.rs @@ -8,6 +8,7 @@ pub fn target() -> Target { base.max_atomic_width = Some(64); base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); base.stack_probes = StackProbeType::X86; + base.supports_xray = true; Target { llvm_target: "x86_64-linux-android".into(), diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs b/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs index 98988ab359542..b41e5842aad13 100644 --- a/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/x86_64_unknown_freebsd.rs @@ -8,6 +8,7 @@ pub fn target() -> Target { base.stack_probes = StackProbeType::X86; base.supported_sanitizers = SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::MEMORY | SanitizerSet::THREAD; + base.supports_xray = true; Target { llvm_target: "x86_64-unknown-freebsd".into(), diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs index a91ab365b668a..9af1049b87026 100644 --- a/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs +++ b/compiler/rustc_target/src/spec/x86_64_unknown_linux_gnu.rs @@ -12,6 +12,7 @@ pub fn target() -> Target { | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; + base.supports_xray = true; Target { llvm_target: "x86_64-unknown-linux-gnu".into(), diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs index 9087dc3df6007..bf4cf7d7becad 100644 --- a/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs +++ b/compiler/rustc_target/src/spec/x86_64_unknown_linux_musl.rs @@ -12,6 +12,7 @@ pub fn target() -> Target { | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; + base.supports_xray = true; Target { llvm_target: "x86_64-unknown-linux-musl".into(), diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs b/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs index 64ae425d8c0b7..74c434935ba88 100644 --- a/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs +++ b/compiler/rustc_target/src/spec/x86_64_unknown_netbsd.rs @@ -11,6 +11,7 @@ pub fn target() -> Target { | SanitizerSet::LEAK | SanitizerSet::MEMORY | SanitizerSet::THREAD; + base.supports_xray = true; Target { llvm_target: "x86_64-unknown-netbsd".into(), diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs b/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs index 66b8e20226f19..8e4d42a0acaf3 100644 --- a/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs +++ b/compiler/rustc_target/src/spec/x86_64_unknown_openbsd.rs @@ -6,6 +6,7 @@ pub fn target() -> Target { base.max_atomic_width = Some(64); base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); base.stack_probes = StackProbeType::X86; + base.supports_xray = true; Target { llvm_target: "x86_64-unknown-openbsd".into(), From 3561dc948c1a2a24c2b49992099954a96be3c8ee Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sun, 2 Oct 2022 10:55:35 +0900 Subject: [PATCH 334/501] Emit an error if -Z instrument-xray is not supported This is somewhat important because LLVM enables the pass based on target architecture, but support by the target OS also matters. For example, XRay attributes are processed by codegen for macOS targets, but Apple linker fails to process relocations in XRay data sections, so the feature as a whole is not supported there for the time being. --- compiler/rustc_error_messages/locales/en-US/session.ftl | 2 ++ compiler/rustc_session/src/errors.rs | 6 ++++++ compiler/rustc_session/src/session.rs | 4 ++++ tests/ui/instrument-xray/target-not-supported.rs | 9 +++++++++ tests/ui/instrument-xray/target-not-supported.stderr | 4 ++++ 5 files changed, 25 insertions(+) create mode 100644 tests/ui/instrument-xray/target-not-supported.rs create mode 100644 tests/ui/instrument-xray/target-not-supported.stderr diff --git a/compiler/rustc_error_messages/locales/en-US/session.ftl b/compiler/rustc_error_messages/locales/en-US/session.ftl index 5984c201af0d0..fe553edab4276 100644 --- a/compiler/rustc_error_messages/locales/en-US/session.ftl +++ b/compiler/rustc_error_messages/locales/en-US/session.ftl @@ -25,6 +25,8 @@ session_profile_sample_use_file_does_not_exist = file `{$path}` passed to `-C pr session_target_requires_unwind_tables = target requires unwind tables, they cannot be disabled with `-C force-unwind-tables=no` +session_instrumentation_not_supported = {$us} instrumentation is not supported for this target + session_sanitizer_not_supported = {$us} sanitizer is not supported for this target session_sanitizers_not_supported = {$us} sanitizers are not supported for this target diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs index 8e8fba5e236f5..c851145440b86 100644 --- a/compiler/rustc_session/src/errors.rs +++ b/compiler/rustc_session/src/errors.rs @@ -71,6 +71,12 @@ pub struct ProfileSampleUseFileDoesNotExist<'a> { #[diag(session_target_requires_unwind_tables)] pub struct TargetRequiresUnwindTables; +#[derive(Diagnostic)] +#[diag(session_instrumentation_not_supported)] +pub struct InstrumentationNotSupported { + pub us: String, +} + #[derive(Diagnostic)] #[diag(session_sanitizer_not_supported)] pub struct SanitizerNotSupported { diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 8a0176f639174..fe6ac80fde6eb 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -1589,6 +1589,10 @@ fn validate_commandline_args_with_session_available(sess: &Session) { { sess.emit_err(errors::SplitDebugInfoUnstablePlatform { debuginfo: sess.split_debuginfo() }); } + + if sess.opts.unstable_opts.instrument_xray.is_some() && !sess.target.options.supports_xray { + sess.emit_err(errors::InstrumentationNotSupported { us: "XRay".to_string() }); + } } /// Holds data on the current incremental compilation session, if there is one. diff --git a/tests/ui/instrument-xray/target-not-supported.rs b/tests/ui/instrument-xray/target-not-supported.rs new file mode 100644 index 0000000000000..e6bdd23e8fc3a --- /dev/null +++ b/tests/ui/instrument-xray/target-not-supported.rs @@ -0,0 +1,9 @@ +// Verifies that `-Z instrument-xray` cannot be used with unsupported targets, +// +// needs-llvm-components: x86 +// compile-flags: -Z instrument-xray --target x86_64-apple-darwin +// error-pattern: error: XRay instrumentation is not supported for this target + +#![feature(no_core)] +#![no_core] +#![no_main] diff --git a/tests/ui/instrument-xray/target-not-supported.stderr b/tests/ui/instrument-xray/target-not-supported.stderr new file mode 100644 index 0000000000000..6e3b0c8a380b8 --- /dev/null +++ b/tests/ui/instrument-xray/target-not-supported.stderr @@ -0,0 +1,4 @@ +error: XRay instrumentation is not supported for this target + +error: aborting due to previous error + From 54b26f49e6d30aefcbf206ee5cfcf6122503553c Mon Sep 17 00:00:00 2001 From: Oleksii Lozovskyi Date: Sun, 2 Oct 2022 12:58:05 +0900 Subject: [PATCH 335/501] Test XRay only for supported targets Now that the compiler accepts "-Z instrument-xray" option only when targeting one of the supported targets, make sure to not run the codegen tests where the compiler will fail. Like with other compiletests, we don't have access to internals, so simply hardcode a list of supported architectures here. --- src/tools/compiletest/src/header.rs | 2 ++ src/tools/compiletest/src/util.rs | 13 +++++++++++++ tests/codegen/instrument-xray/basic.rs | 1 + tests/codegen/instrument-xray/options-combine.rs | 1 + tests/codegen/instrument-xray/options-override.rs | 1 + tests/ui/instrument-xray/flags-always-never-1.rs | 1 + tests/ui/instrument-xray/flags-always-never-2.rs | 1 + tests/ui/instrument-xray/flags-basic.rs | 1 + tests/ui/instrument-xray/flags-dupe-always.rs | 1 + tests/ui/instrument-xray/flags-dupe-ignore-loops.rs | 1 + 10 files changed, 23 insertions(+) diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index 45fd87bea9bb5..e11ebca6ea9af 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -941,6 +941,7 @@ pub fn make_test_description( let has_hwasan = util::HWASAN_SUPPORTED_TARGETS.contains(&&*config.target); let has_memtag = util::MEMTAG_SUPPORTED_TARGETS.contains(&&*config.target); let has_shadow_call_stack = util::SHADOWCALLSTACK_SUPPORTED_TARGETS.contains(&&*config.target); + let has_xray = util::XRAY_SUPPORTED_TARGETS.contains(&&*config.target); // For tests using the `needs-rust-lld` directive (e.g. for `-Zgcc-ld=lld`), we need to find // whether `rust-lld` is present in the compiler under test. @@ -1019,6 +1020,7 @@ pub fn make_test_description( && config.parse_name_directive(ln, "needs-sanitizer-shadow-call-stack") ); reason!(!config.can_unwind() && config.parse_name_directive(ln, "needs-unwind")); + reason!(!has_xray && config.parse_name_directive(ln, "needs-xray")); reason!( config.target == "wasm32-unknown-unknown" && config.parse_name_directive(ln, directives::CHECK_RUN_RESULTS) diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs index ff7e8df987816..67f49bb6397c2 100644 --- a/src/tools/compiletest/src/util.rs +++ b/src/tools/compiletest/src/util.rs @@ -78,6 +78,19 @@ pub const MEMTAG_SUPPORTED_TARGETS: &[&str] = pub const SHADOWCALLSTACK_SUPPORTED_TARGETS: &[&str] = &["aarch64-linux-android"]; +pub const XRAY_SUPPORTED_TARGETS: &[&str] = &[ + "aarch64-linux-android", + "aarch64-unknown-linux-gnu", + "aarch64-unknown-linux-musl", + "x86_64-linux-android", + "x86_64-unknown-freebsd", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "x86_64-unknown-netbsd", + "x86_64-unknown-none-linuxkernel", + "x86_64-unknown-openbsd", +]; + pub fn make_new_path(path: &str) -> String { assert!(cfg!(windows)); // Windows just uses PATH as the library search path, so we have to diff --git a/tests/codegen/instrument-xray/basic.rs b/tests/codegen/instrument-xray/basic.rs index 9c128767747e7..d3e49d5317442 100644 --- a/tests/codegen/instrument-xray/basic.rs +++ b/tests/codegen/instrument-xray/basic.rs @@ -1,5 +1,6 @@ // Checks that `-Z instrument-xray` produces expected instrumentation. // +// needs-xray // compile-flags: -Z instrument-xray=always #![crate_type = "lib"] diff --git a/tests/codegen/instrument-xray/options-combine.rs b/tests/codegen/instrument-xray/options-combine.rs index 0c1992318f576..f7e500b65f62d 100644 --- a/tests/codegen/instrument-xray/options-combine.rs +++ b/tests/codegen/instrument-xray/options-combine.rs @@ -1,5 +1,6 @@ // Checks that `-Z instrument-xray` options can be specified multiple times. // +// needs-xray // compile-flags: -Z instrument-xray=skip-exit // compile-flags: -Z instrument-xray=instruction-threshold=123 // compile-flags: -Z instrument-xray=instruction-threshold=456 diff --git a/tests/codegen/instrument-xray/options-override.rs b/tests/codegen/instrument-xray/options-override.rs index 3a7c37f9006be..00f81837902dd 100644 --- a/tests/codegen/instrument-xray/options-override.rs +++ b/tests/codegen/instrument-xray/options-override.rs @@ -1,5 +1,6 @@ // Checks that the last `-Z instrument-xray` option wins. // +// needs-xray // compile-flags: -Z instrument-xray=always // compile-flags: -Z instrument-xray=never diff --git a/tests/ui/instrument-xray/flags-always-never-1.rs b/tests/ui/instrument-xray/flags-always-never-1.rs index 03274dedd0830..4dd43439eb7c2 100644 --- a/tests/ui/instrument-xray/flags-always-never-1.rs +++ b/tests/ui/instrument-xray/flags-always-never-1.rs @@ -1,5 +1,6 @@ // Checks that `-Z instrument-xray` does not allow `always` and `never` simultaneously. // +// needs-xray // compile-flags: -Z instrument-xray=always,never // error-pattern: incorrect value `always,never` for unstable option `instrument-xray` diff --git a/tests/ui/instrument-xray/flags-always-never-2.rs b/tests/ui/instrument-xray/flags-always-never-2.rs index e752890b47ad0..7310aa0a0d288 100644 --- a/tests/ui/instrument-xray/flags-always-never-2.rs +++ b/tests/ui/instrument-xray/flags-always-never-2.rs @@ -1,6 +1,7 @@ // Checks that `-Z instrument-xray` allows `always` and `never` sequentially. // (The last specified setting wins, like `-Z instrument-xray=no` as well.) // +// needs-xray // compile-flags: -Z instrument-xray=always // compile-flags: -Z instrument-xray=never // check-pass diff --git a/tests/ui/instrument-xray/flags-basic.rs b/tests/ui/instrument-xray/flags-basic.rs index 5889a20f670c8..b97f0dd8a072c 100644 --- a/tests/ui/instrument-xray/flags-basic.rs +++ b/tests/ui/instrument-xray/flags-basic.rs @@ -1,5 +1,6 @@ // Verifies basic `-Z instrument-xray` flags. // +// needs-xray // compile-flags: -Z instrument-xray // compile-flags: -Z instrument-xray=skip-exit // compile-flags: -Z instrument-xray=ignore-loops,instruction-threshold=300 diff --git a/tests/ui/instrument-xray/flags-dupe-always.rs b/tests/ui/instrument-xray/flags-dupe-always.rs index 36dda4bbd03f4..407f3e2aa5da8 100644 --- a/tests/ui/instrument-xray/flags-dupe-always.rs +++ b/tests/ui/instrument-xray/flags-dupe-always.rs @@ -1,5 +1,6 @@ // Checks that `-Z instrument-xray` does not allow duplicates. // +// needs-xray // compile-flags: -Z instrument-xray=always,always // error-pattern: incorrect value `always,always` for unstable option `instrument-xray` diff --git a/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs b/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs index 227f8557f42ad..75b210a6547ec 100644 --- a/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs +++ b/tests/ui/instrument-xray/flags-dupe-ignore-loops.rs @@ -1,5 +1,6 @@ // Checks that `-Z instrument-xray` does not allow duplicates. // +// needs-xray // compile-flags: -Z instrument-xray=ignore-loops,ignore-loops // error-pattern: incorrect value `ignore-loops,ignore-loops` for unstable option `instrument-xray` From f7b3e39502783c82d4a8d9e02f59aa4268d15dbf Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 7 Feb 2023 16:11:40 +1100 Subject: [PATCH 336/501] Simplify `tls::enter_context`. --- compiler/rustc_interface/src/callbacks.rs | 2 +- compiler/rustc_interface/src/passes.rs | 2 +- compiler/rustc_middle/src/dep_graph/mod.rs | 2 +- compiler/rustc_middle/src/ty/context/tls.rs | 4 ++-- compiler/rustc_query_impl/src/plumbing.rs | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_interface/src/callbacks.rs b/compiler/rustc_interface/src/callbacks.rs index ee0552d77ceee..bc6d7c209971c 100644 --- a/compiler/rustc_interface/src/callbacks.rs +++ b/compiler/rustc_interface/src/callbacks.rs @@ -38,7 +38,7 @@ fn track_diagnostic(diagnostic: &mut Diagnostic, f: &mut dyn FnMut(&mut Diagnost // Diagnostics are tracked, we can ignore the dependency. let icx = tls::ImplicitCtxt { task_deps: TaskDepsRef::Ignore, ..icx.clone() }; - return tls::enter_context(&icx, move |_| (*f)(diagnostic)); + return tls::enter_context(&icx, move || (*f)(diagnostic)); } // In any other case, invoke diagnostics anyway. diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 2a373ebc1324d..304c32574566f 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -748,7 +748,7 @@ impl<'tcx> QueryContext<'tcx> { F: FnOnce(TyCtxt<'tcx>) -> R, { let icx = ty::tls::ImplicitCtxt::new(self.gcx); - ty::tls::enter_context(&icx, |_| f(icx.tcx)) + ty::tls::enter_context(&icx, || f(icx.tcx)) } } diff --git a/compiler/rustc_middle/src/dep_graph/mod.rs b/compiler/rustc_middle/src/dep_graph/mod.rs index 2e62bebc8525b..2e82efba1924e 100644 --- a/compiler/rustc_middle/src/dep_graph/mod.rs +++ b/compiler/rustc_middle/src/dep_graph/mod.rs @@ -55,7 +55,7 @@ impl rustc_query_system::dep_graph::DepKind for DepKind { ty::tls::with_context(|icx| { let icx = ty::tls::ImplicitCtxt { task_deps, ..icx.clone() }; - ty::tls::enter_context(&icx, |_| op()) + ty::tls::enter_context(&icx, op) }) } diff --git a/compiler/rustc_middle/src/ty/context/tls.rs b/compiler/rustc_middle/src/ty/context/tls.rs index 71b025dc1be4b..4d1ddf0c7f163 100644 --- a/compiler/rustc_middle/src/ty/context/tls.rs +++ b/compiler/rustc_middle/src/ty/context/tls.rs @@ -110,9 +110,9 @@ unsafe fn downcast<'a, 'tcx>(context: *const ()) -> &'a ImplicitCtxt<'a, 'tcx> { #[inline] pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R where - F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, + F: FnOnce() -> R, { - tlv::with_tlv(erase(context), || f(&context)) + tlv::with_tlv(erase(context), f) } /// Allows access to the current `ImplicitCtxt` in a closure if one is available. diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs index 4dea03c1ef6a2..49309db564ea8 100644 --- a/compiler/rustc_query_impl/src/plumbing.rs +++ b/compiler/rustc_query_impl/src/plumbing.rs @@ -124,7 +124,7 @@ impl QueryContext for QueryCtxt<'_> { }; // Use the `ImplicitCtxt` while we execute the query. - tls::enter_context(&new_icx, |_| { + tls::enter_context(&new_icx, || { rustc_data_structures::stack::ensure_sufficient_stack(compute) }) }) From 18f751df6adc6342ee0814dd6bc36bf867ff0029 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Wed, 8 Feb 2023 16:24:57 +1100 Subject: [PATCH 337/501] Simplify `with_tlv`. --- compiler/rustc_middle/src/lib.rs | 1 + compiler/rustc_middle/src/ty/context/tls.rs | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index 95148de251824..56df1a66f9d29 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -34,6 +34,7 @@ #![feature(get_mut_unchecked)] #![feature(if_let_guard)] #![feature(iter_from_generator)] +#![feature(local_key_cell_methods)] #![feature(negative_impls)] #![feature(never_type)] #![feature(extern_types)] diff --git a/compiler/rustc_middle/src/ty/context/tls.rs b/compiler/rustc_middle/src/ty/context/tls.rs index 4d1ddf0c7f163..5426ac8d73992 100644 --- a/compiler/rustc_middle/src/ty/context/tls.rs +++ b/compiler/rustc_middle/src/ty/context/tls.rs @@ -89,9 +89,8 @@ mod tlv { /// This is used to set the pointer to the new `ImplicitCtxt`. #[inline] pub(super) fn with_tlv R, R>(value: *const (), f: F) -> R { - let old = get_tlv(); - let _reset = rustc_data_structures::OnDrop(move || TLV.with(|tlv| tlv.set(old))); - TLV.with(|tlv| tlv.set(value)); + let old = TLV.replace(value); + let _reset = rustc_data_structures::OnDrop(move || TLV.set(old)); f() } } From afbe167fbb683fc1ed1c7577ab2eaa12cc44a6bf Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Thu, 9 Feb 2023 10:51:29 +1100 Subject: [PATCH 338/501] Avoid some `tls::with` calls. These are in places where a `tcx` is easily obtained. --- .../rustc_const_eval/src/const_eval/eval_queries.rs | 2 +- .../rustc_infer/src/infer/canonical/canonicalizer.rs | 10 ++++------ .../src/traits/error_reporting/suggestions.rs | 7 +++---- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index 18e01567ca35e..b4a49e1df610c 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -54,7 +54,7 @@ fn eval_body_using_ecx<'mir, 'tcx>( trace!( "eval_body_using_ecx: pushing stack frame for global: {}{}", - with_no_trimmed_paths!(ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id()))), + with_no_trimmed_paths!(ecx.tcx.def_path_str(cid.instance.def_id())), cid.promoted.map_or_else(String::new, |p| format!("::promoted[{:?}]", p)) ); diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 87c6dfad5fa2b..1e2441d984ad6 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -203,12 +203,10 @@ impl CanonicalizeMode for CanonicalizeQueryResponse { // rust-lang/rust#57464: `impl Trait` can leak local // scopes (in manner violating typeck). Therefore, use // `delay_span_bug` to allow type error over an ICE. - ty::tls::with(|tcx| { - tcx.sess.delay_span_bug( - rustc_span::DUMMY_SP, - &format!("unexpected region in query response: `{:?}`", r), - ); - }); + canonicalizer.tcx.sess.delay_span_bug( + rustc_span::DUMMY_SP, + &format!("unexpected region in query response: `{:?}`", r), + ); r } } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 59aef52910ee3..2b543520198fb 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -98,6 +98,7 @@ impl<'tcx, 'a> GeneratorData<'tcx, 'a> { // obligation fn get_from_await_ty( &self, + tcx: TyCtxt<'tcx>, visitor: AwaitsVisitor, hir: map::Map<'tcx>, ty_matches: F, @@ -134,9 +135,7 @@ impl<'tcx, 'a> GeneratorData<'tcx, 'a> { .unwrap_or_else(|| { bug!( "node_type: no type for node {}", - ty::tls::with(|tcx| tcx - .hir() - .node_to_string(await_expr.hir_id)) + tcx.hir().node_to_string(await_expr.hir_id) ) }) }, @@ -2351,7 +2350,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { let mut interior_or_upvar_span = None; - let from_awaited_ty = generator_data.get_from_await_ty(visitor, hir, ty_matches); + let from_awaited_ty = generator_data.get_from_await_ty(self.tcx, visitor, hir, ty_matches); debug!(?from_awaited_ty); // The generator interior types share the same binders From 243944c6535867f2d4e3bc44f4a8b0e300dc83b9 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 7 Feb 2023 16:59:50 +1100 Subject: [PATCH 339/501] Remove `QueryContext`. There is a type `QueryCtxt`, which impls the trait `QueryContext`. Confusingly, there is another type `QueryContext`. The latter is (like `TyCtxt`) just a pointer to a `GlobalContext`. It's not used much, e.g. its `impl` block has a single method. This commit removes `QueryContext`, replacing its use with direct `GlobalCtxt` use. --- compiler/rustc_interface/src/passes.rs | 26 +++++------------------- compiler/rustc_interface/src/queries.rs | 27 +++++++++++++------------ compiler/rustc_middle/src/ty/context.rs | 12 +++++++++++ src/librustdoc/lib.rs | 4 ++-- 4 files changed, 33 insertions(+), 36 deletions(-) diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 304c32574566f..33ebbb411ceb6 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -738,30 +738,16 @@ pub static DEFAULT_EXTERN_QUERY_PROVIDERS: LazyLock = LazyLock: extern_providers }); -pub struct QueryContext<'tcx> { - gcx: &'tcx GlobalCtxt<'tcx>, -} - -impl<'tcx> QueryContext<'tcx> { - pub fn enter(&mut self, f: F) -> R - where - F: FnOnce(TyCtxt<'tcx>) -> R, - { - let icx = ty::tls::ImplicitCtxt::new(self.gcx); - ty::tls::enter_context(&icx, || f(icx.tcx)) - } -} - pub fn create_global_ctxt<'tcx>( compiler: &'tcx Compiler, lint_store: Lrc, dep_graph: DepGraph, untracked: Untracked, queries: &'tcx OnceCell>, - global_ctxt: &'tcx OnceCell>, + gcx_cell: &'tcx OnceCell>, arena: &'tcx WorkerLocal>, hir_arena: &'tcx WorkerLocal>, -) -> QueryContext<'tcx> { +) -> &'tcx GlobalCtxt<'tcx> { // We're constructing the HIR here; we don't care what we will // read, since we haven't even constructed the *input* to // incr. comp. yet. @@ -785,8 +771,8 @@ pub fn create_global_ctxt<'tcx>( TcxQueries::new(local_providers, extern_providers, query_result_on_disk_cache) }); - let gcx = sess.time("setup_global_ctxt", || { - global_ctxt.get_or_init(move || { + sess.time("setup_global_ctxt", || { + gcx_cell.get_or_init(move || { TyCtxt::create_global_ctxt( sess, lint_store, @@ -799,9 +785,7 @@ pub fn create_global_ctxt<'tcx>( rustc_query_impl::query_callbacks(arena), ) }) - }); - - QueryContext { gcx } + }) } /// Runs the resolution, type-checking, region checking and other diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs index 4b0180741c19d..6512695873ef9 100644 --- a/compiler/rustc_interface/src/queries.rs +++ b/compiler/rustc_interface/src/queries.rs @@ -1,6 +1,6 @@ use crate::errors::{FailedWritingFile, RustcErrorFatal, RustcErrorUnexpectedAnnotation}; use crate::interface::{Compiler, Result}; -use crate::passes::{self, BoxedResolver, QueryContext}; +use crate::passes::{self, BoxedResolver}; use rustc_ast as ast; use rustc_codegen_ssa::traits::CodegenBackend; @@ -64,7 +64,7 @@ impl<'a, T> std::ops::DerefMut for QueryResult<'a, T> { } } -impl<'a, 'tcx> QueryResult<'a, QueryContext<'tcx>> { +impl<'a, 'tcx> QueryResult<'a, &'tcx GlobalCtxt<'tcx>> { pub fn enter(&mut self, f: impl FnOnce(TyCtxt<'tcx>) -> T) -> T { (*self.0).get_mut().enter(f) } @@ -78,7 +78,7 @@ impl Default for Query { pub struct Queries<'tcx> { compiler: &'tcx Compiler, - gcx: OnceCell>, + gcx_cell: OnceCell>, queries: OnceCell>, arena: WorkerLocal>, @@ -90,7 +90,8 @@ pub struct Queries<'tcx> { register_plugins: Query<(ast::Crate, Lrc)>, expansion: Query<(Lrc, Rc>, Lrc)>, dep_graph: Query, - global_ctxt: Query>, + // This just points to what's in `gcx_cell`. + gcx: Query<&'tcx GlobalCtxt<'tcx>>, ongoing_codegen: Query>, } @@ -98,7 +99,7 @@ impl<'tcx> Queries<'tcx> { pub fn new(compiler: &'tcx Compiler) -> Queries<'tcx> { Queries { compiler, - gcx: OnceCell::new(), + gcx_cell: OnceCell::new(), queries: OnceCell::new(), arena: WorkerLocal::new(|_| Arena::default()), hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()), @@ -108,7 +109,7 @@ impl<'tcx> Queries<'tcx> { register_plugins: Default::default(), expansion: Default::default(), dep_graph: Default::default(), - global_ctxt: Default::default(), + gcx: Default::default(), ongoing_codegen: Default::default(), } } @@ -207,8 +208,8 @@ impl<'tcx> Queries<'tcx> { }) } - pub fn global_ctxt(&'tcx self) -> Result>> { - self.global_ctxt.compute(|| { + pub fn global_ctxt(&'tcx self) -> Result>> { + self.gcx.compute(|| { let crate_name = *self.crate_name()?.borrow(); let (krate, resolver, lint_store) = self.expansion()?.steal(); @@ -218,18 +219,18 @@ impl<'tcx> Queries<'tcx> { ast_lowering: untracked_resolver_for_lowering, } = BoxedResolver::to_resolver_outputs(resolver); - let mut qcx = passes::create_global_ctxt( + let gcx = passes::create_global_ctxt( self.compiler, lint_store, self.dep_graph()?.steal(), untracked, &self.queries, - &self.gcx, + &self.gcx_cell, &self.arena, &self.hir_arena, ); - qcx.enter(|tcx| { + gcx.enter(|tcx| { let feed = tcx.feed_unit_query(); feed.resolver_for_lowering( tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, krate))), @@ -239,7 +240,7 @@ impl<'tcx> Queries<'tcx> { let feed = tcx.feed_local_crate(); feed.crate_name(crate_name); }); - Ok(qcx) + Ok(gcx) }) } @@ -387,7 +388,7 @@ impl Compiler { // NOTE: intentionally does not compute the global context if it hasn't been built yet, // since that likely means there was a parse error. - if let Some(Ok(gcx)) = &mut *queries.global_ctxt.result.borrow_mut() { + if let Some(Ok(gcx)) = &mut *queries.gcx.result.borrow_mut() { let gcx = gcx.get_mut(); // We assume that no queries are run past here. If there are new queries // after this point, they'll show up as "" in self-profiling data. diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 9205a8a0ffed8..d07d9190e011e 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -468,6 +468,18 @@ pub struct GlobalCtxt<'tcx> { pub(crate) alloc_map: Lock>, } +impl<'tcx> GlobalCtxt<'tcx> { + /// Installs `self` in a `TyCtxt` and `ImplicitCtxt` for the duration of + /// `f`. + pub fn enter<'a: 'tcx, F, R>(&'a self, f: F) -> R + where + F: FnOnce(TyCtxt<'tcx>) -> R, + { + let icx = tls::ImplicitCtxt::new(self); + tls::enter_context(&icx, || f(icx.tcx)) + } +} + impl<'tcx> TyCtxt<'tcx> { /// Expects a body and returns its codegen attributes. /// diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 64108c8828518..90d6388b70cd8 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -814,9 +814,9 @@ fn main_args(at_args: &[String]) -> MainResult { sess.fatal("Compilation failed, aborting rustdoc"); } - let mut global_ctxt = abort_on_err(queries.global_ctxt(), sess); + let mut gcx = abort_on_err(queries.global_ctxt(), sess); - global_ctxt.enter(|tcx| { + gcx.enter(|tcx| { let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || { core::run_global_ctxt( tcx, From b58347a9c607e493ff947ff470492c38f7819c72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AE=B8=E6=9D=B0=E5=8F=8B=20Jieyou=20Xu=20=28Joe=29?= Date: Thu, 9 Feb 2023 00:49:43 +0800 Subject: [PATCH 340/501] Don't expose type parameters and implementation details from macro expansion --- .../infer/error_reporting/need_type_info.rs | 23 +++++++++++-------- ...-107745-avoid-expr-from-macro-expansion.rs | 19 +++++++++++++++ ...745-avoid-expr-from-macro-expansion.stderr | 11 +++++++++ tests/ui/issues/issue-16966.stderr | 6 ++--- ...ing-closing-angle-bracket-eq-constraint.rs | 2 +- ...closing-angle-bracket-eq-constraint.stderr | 12 +++++----- .../cannot_infer_local_or_vec.stderr | 6 ++--- ...cannot_infer_local_or_vec_in_tuples.stderr | 6 ++--- 8 files changed, 59 insertions(+), 26 deletions(-) create mode 100644 tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.rs create mode 100644 tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.stderr diff --git a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs index b8c843a8a5a22..c092efbb557cf 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs @@ -158,8 +158,12 @@ fn fmt_printer<'a, 'tcx>(infcx: &'a InferCtxt<'tcx>, ns: Namespace) -> FmtPrinte if infcx.probe_ty_var(ty_vid).is_ok() { warn!("resolved ty var in error message"); } - if let TypeVariableOriginKind::TypeParameterDefinition(name, _) = - infcx.inner.borrow_mut().type_variables().var_origin(ty_vid).kind + + let mut infcx_inner = infcx.inner.borrow_mut(); + let ty_vars = infcx_inner.type_variables(); + let var_origin = ty_vars.var_origin(ty_vid); + if let TypeVariableOriginKind::TypeParameterDefinition(name, _) = var_origin.kind + && !var_origin.span.from_expansion() { Some(name) } else { @@ -254,7 +258,7 @@ impl<'tcx> InferCtxt<'tcx> { if let TypeVariableOriginKind::TypeParameterDefinition(name, def_id) = var_origin.kind { - if name != kw::SelfUpper { + if name != kw::SelfUpper && !var_origin.span.from_expansion() { return InferenceDiagnosticsData { name: name.to_string(), span: Some(var_origin.span), @@ -780,7 +784,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { // The sources are listed in order of preference here. let tcx = self.infcx.tcx; let ctx = CostCtxt { tcx }; - let base_cost = match source.kind { + match source.kind { InferSourceKind::LetBinding { ty, .. } => ctx.ty_cost(ty), InferSourceKind::ClosureArg { ty, .. } => ctx.ty_cost(ty), InferSourceKind::GenericArg { def_id, generic_args, .. } => { @@ -797,17 +801,17 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { InferSourceKind::ClosureReturn { ty, should_wrap_expr, .. } => { 30 + ctx.ty_cost(ty) + if should_wrap_expr.is_some() { 10 } else { 0 } } - }; - - let suggestion_may_apply = if source.from_expansion() { 10000 } else { 0 }; - - base_cost + suggestion_may_apply + } } /// Uses `fn source_cost` to determine whether this inference source is preferable to /// previous sources. We generally prefer earlier sources. #[instrument(level = "debug", skip(self))] fn update_infer_source(&mut self, mut new_source: InferSource<'tcx>) { + if new_source.from_expansion() { + return; + } + let cost = self.source_cost(&new_source) + self.attempt; debug!(?cost); self.attempt += 1; @@ -819,6 +823,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { // `let x: _ = iter.collect();`, as this is a very common case. *def_id = Some(did); } + if cost < self.infer_source_cost { self.infer_source_cost = cost; self.infer_source = Some(new_source); diff --git a/tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.rs b/tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.rs new file mode 100644 index 0000000000000..7f6758f47f8fe --- /dev/null +++ b/tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.rs @@ -0,0 +1,19 @@ +// ignore-tidy-linelength + +// Regression test for #107745. +// Previously need_type_info::update_infer_source will consider expressions originating from +// macro expressions as candiate "previous sources". This unfortunately can mean that +// for macros expansions such as `format!()` internal implementation details can leak, such as: +// +// ``` +// error[E0282]: type annotations needed +// --> src/main.rs:2:22 +// | +//2 | println!("{:?}", []); +// | ^^ cannot infer type of the type parameter `T` declared on the associated function `new_debug` +// ``` + +fn main() { + println!("{:?}", []); + //~^ ERROR type annotations needed +} diff --git a/tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.stderr b/tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.stderr new file mode 100644 index 0000000000000..464655bbcf451 --- /dev/null +++ b/tests/ui/inference/need_type_info/issue-107745-avoid-expr-from-macro-expansion.stderr @@ -0,0 +1,11 @@ +error[E0282]: type annotations needed + --> $DIR/issue-107745-avoid-expr-from-macro-expansion.rs:17:22 + | +LL | println!("{:?}", []); + | ^^ cannot infer type + | + = note: this error originates in the macro `$crate::format_args_nl` which comes from the expansion of the macro `println` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0282`. diff --git a/tests/ui/issues/issue-16966.stderr b/tests/ui/issues/issue-16966.stderr index 60f5190dbd0d6..8c92505b5eb28 100644 --- a/tests/ui/issues/issue-16966.stderr +++ b/tests/ui/issues/issue-16966.stderr @@ -1,10 +1,8 @@ error[E0282]: type annotations needed - --> $DIR/issue-16966.rs:2:5 + --> $DIR/issue-16966.rs:2:12 | LL | panic!(std::default::Default::default()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type of the type parameter `M` declared on the function `begin_panic` - | - = note: this error originates in the macro `$crate::panic::panic_2015` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ cannot infer type error: aborting due to previous error diff --git a/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.rs b/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.rs index da95c1bfa2709..a56cd17773dc4 100644 --- a/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.rs +++ b/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.rs @@ -17,7 +17,7 @@ fn test2(arg1 : T1, arg2 : T2) { fn test3<'a>(arg : &'a u32) { let v : Vec<'a = vec![]; //~^ ERROR: expected one of - //~| ERROR: type annotations needed for `Vec` + //~| ERROR: type annotations needed for `Vec<_>` } fn main() {} diff --git a/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.stderr b/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.stderr index bad241634cbe6..b2448774ae9d3 100644 --- a/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.stderr +++ b/tests/ui/parser/missing-closing-angle-bracket-eq-constraint.stderr @@ -39,26 +39,26 @@ help: you might have meant to end the type parameters here LL | let v : Vec<'a> = vec![]; | + -error[E0282]: type annotations needed for `Vec` +error[E0282]: type annotations needed for `Vec<_>` --> $DIR/missing-closing-angle-bracket-eq-constraint.rs:7:7 | LL | let v : Vec<(u32,_) = vec![]; | ^ | -help: consider giving `v` an explicit type, where the type for type parameter `T` is specified +help: consider giving `v` an explicit type, where the placeholders `_` are specified | -LL | let v: Vec : Vec<(u32,_) = vec![]; +LL | let v: Vec<_> : Vec<(u32,_) = vec![]; | ++++++++ -error[E0282]: type annotations needed for `Vec` +error[E0282]: type annotations needed for `Vec<_>` --> $DIR/missing-closing-angle-bracket-eq-constraint.rs:18:7 | LL | let v : Vec<'a = vec![]; | ^ | -help: consider giving `v` an explicit type, where the type for type parameter `T` is specified +help: consider giving `v` an explicit type, where the placeholders `_` are specified | -LL | let v: Vec : Vec<'a = vec![]; +LL | let v: Vec<_> : Vec<'a = vec![]; | ++++++++ error: aborting due to 5 previous errors diff --git a/tests/ui/type/type-check/cannot_infer_local_or_vec.stderr b/tests/ui/type/type-check/cannot_infer_local_or_vec.stderr index b63d2a3b61c24..09c4b2053b27e 100644 --- a/tests/ui/type/type-check/cannot_infer_local_or_vec.stderr +++ b/tests/ui/type/type-check/cannot_infer_local_or_vec.stderr @@ -1,12 +1,12 @@ -error[E0282]: type annotations needed for `Vec` +error[E0282]: type annotations needed for `Vec<_>` --> $DIR/cannot_infer_local_or_vec.rs:2:9 | LL | let x = vec![]; | ^ | -help: consider giving `x` an explicit type, where the type for type parameter `T` is specified +help: consider giving `x` an explicit type, where the placeholders `_` are specified | -LL | let x: Vec = vec![]; +LL | let x: Vec<_> = vec![]; | ++++++++ error: aborting due to previous error diff --git a/tests/ui/type/type-check/cannot_infer_local_or_vec_in_tuples.stderr b/tests/ui/type/type-check/cannot_infer_local_or_vec_in_tuples.stderr index e544b36951548..1fa253052e649 100644 --- a/tests/ui/type/type-check/cannot_infer_local_or_vec_in_tuples.stderr +++ b/tests/ui/type/type-check/cannot_infer_local_or_vec_in_tuples.stderr @@ -1,12 +1,12 @@ -error[E0282]: type annotations needed for `(Vec,)` +error[E0282]: type annotations needed for `(Vec<_>,)` --> $DIR/cannot_infer_local_or_vec_in_tuples.rs:2:9 | LL | let (x, ) = (vec![], ); | ^^^^^ ---------- type must be known at this point | -help: consider giving this pattern a type, where the type for type parameter `T` is specified +help: consider giving this pattern a type, where the placeholders `_` are specified | -LL | let (x, ): (Vec,) = (vec![], ); +LL | let (x, ): (Vec<_>,) = (vec![], ); | +++++++++++ error: aborting due to previous error From 41c6c5d4996728b5a635319ef9b077a3d0ccc480 Mon Sep 17 00:00:00 2001 From: Zephaniah Ong Date: Thu, 9 Feb 2023 16:01:29 +0800 Subject: [PATCH 341/501] port over symlink_file function from Build to Config and create symlink for legacy rustfmt path --- src/bootstrap/download.rs | 16 +++++++++++++++- src/bootstrap/lib.rs | 11 +---------- src/bootstrap/native.rs | 2 +- 3 files changed, 17 insertions(+), 12 deletions(-) diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs index bd67978a7662e..5c863015adb27 100644 --- a/src/bootstrap/download.rs +++ b/src/bootstrap/download.rs @@ -2,7 +2,7 @@ use std::{ env, ffi::{OsStr, OsString}, fs::{self, File}, - io::{BufRead, BufReader, ErrorKind}, + io::{self, BufRead, BufReader, ErrorKind}, path::{Path, PathBuf}, process::{Command, Stdio}, }; @@ -26,6 +26,14 @@ impl Config { self.verbose > 0 } + pub fn symlink_file, Q: AsRef>(&self, src: P, link: Q) -> io::Result<()> { + #[cfg(unix)] + use std::os::unix::fs::symlink as symlink_file; + #[cfg(windows)] + use std::os::windows::fs::symlink_file; + if !self.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) } + } + pub(crate) fn create(&self, path: &Path, s: &str) { if self.dry_run() { return; @@ -331,6 +339,12 @@ impl Config { let bin_root = self.out.join(host.triple).join("rustfmt"); let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); + + let legacy_rustfmt = self.initial_rustc.with_file_name(exe("rustfmt", host)); + if !legacy_rustfmt.exists() { + t!(self.symlink_file(&rustfmt_path, &legacy_rustfmt)); + } + if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { return Some(rustfmt_path); } diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index f4abdf1cc5758..f753720b35306 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -20,7 +20,6 @@ use std::cell::{Cell, RefCell}; use std::collections::{HashMap, HashSet}; use std::env; use std::fs::{self, File}; -use std::io; use std::io::ErrorKind; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; @@ -1407,7 +1406,7 @@ impl Build { src = t!(fs::canonicalize(src)); } else { let link = t!(fs::read_link(src)); - t!(self.symlink_file(link, dst)); + t!(self.config.symlink_file(link, dst)); return; } } @@ -1525,14 +1524,6 @@ impl Build { iter.map(|e| t!(e)).collect::>().into_iter() } - fn symlink_file, Q: AsRef>(&self, src: P, link: Q) -> io::Result<()> { - #[cfg(unix)] - use std::os::unix::fs::symlink as symlink_file; - #[cfg(windows)] - use std::os::windows::fs::symlink_file; - if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) } - } - /// Returns if config.ninja is enabled, and checks for ninja existence, /// exiting with a nicer error message if not. fn ninja(&self) -> bool { diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 3acc2d4b5c4b1..07d339c067c86 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -516,7 +516,7 @@ impl Step for Llvm { let lib_llvm = out_dir.join("build").join("lib").join(lib_name); if !lib_llvm.exists() { - t!(builder.symlink_file("libLLVM.dylib", &lib_llvm)); + t!(builder.build.config.symlink_file("libLLVM.dylib", &lib_llvm)); } } From 58136b01ba0cc0c2f598c300db3cc864fcb09f1f Mon Sep 17 00:00:00 2001 From: Duong Quoc Khanh Date: Thu, 9 Feb 2023 17:32:55 +0900 Subject: [PATCH 342/501] Add more tests for completion without body. Add tests for Fn, Const, TypeAlias without body inside Trait. --- crates/ide-completion/src/tests/item_list.rs | 51 ++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/crates/ide-completion/src/tests/item_list.rs b/crates/ide-completion/src/tests/item_list.rs index b62b988885d08..9fc731bb11d57 100644 --- a/crates/ide-completion/src/tests/item_list.rs +++ b/crates/ide-completion/src/tests/item_list.rs @@ -214,6 +214,57 @@ fn in_trait_assoc_item_list() { ); } +#[test] +fn in_trait_assoc_fn_missing_body() { + check( + r#"trait Foo { fn function(); $0 }"#, + expect![[r#" + ma makro!(…) macro_rules! makro + md module + kw const + kw crate:: + kw fn + kw self:: + kw type + kw unsafe + "#]], + ); +} + +#[test] +fn in_trait_assoc_const_missing_body() { + check( + r#"trait Foo { const CONST: (); $0 }"#, + expect![[r#" + ma makro!(…) macro_rules! makro + md module + kw const + kw crate:: + kw fn + kw self:: + kw type + kw unsafe + "#]], + ); +} + +#[test] +fn in_trait_assoc_type_aliases_missing_ty() { + check( + r#"trait Foo { type Type; $0 }"#, + expect![[r#" + ma makro!(…) macro_rules! makro + md module + kw const + kw crate:: + kw fn + kw self:: + kw type + kw unsafe + "#]], + ); +} + #[test] fn in_trait_impl_assoc_item_list() { check( From 74cd8ecc7ebe2782044f6b91ea94f0ac95b9fefa Mon Sep 17 00:00:00 2001 From: Duong Quoc Khanh Date: Thu, 9 Feb 2023 17:37:11 +0900 Subject: [PATCH 343/501] Add completion without body. Add completion for Fn, Const, TypeAlias without body. --- crates/ide-completion/src/context/analysis.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index f606d79ad2040..4bff665ab1d77 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -675,10 +675,10 @@ fn classify_name_ref( { if let Some(item) = ast::Item::cast(n) { let is_inbetween = match &item { - ast::Item::Const(it) => it.body().is_none(), + ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(), ast::Item::Enum(it) => it.variant_list().is_none(), ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), - ast::Item::Fn(it) => it.body().is_none(), + ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(), ast::Item::Impl(it) => it.assoc_item_list().is_none(), ast::Item::Module(it) => { it.item_list().is_none() && it.semicolon_token().is_none() @@ -688,7 +688,7 @@ fn classify_name_ref( it.field_list().is_none() && it.semicolon_token().is_none() } ast::Item::Trait(it) => it.assoc_item_list().is_none(), - ast::Item::TypeAlias(it) => it.ty().is_none(), + ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(), ast::Item::Union(it) => it.record_field_list().is_none(), _ => false, }; From 8828f3494e307e715bea9c0d4d993ad4e550314b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 9 Feb 2023 10:46:36 +0200 Subject: [PATCH 344/501] Hide proc macro server version detection errors --- crates/proc-macro-api/src/process.rs | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index e70b3850d667d..1ccbd780fdda3 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -27,13 +27,13 @@ impl ProcMacroProcessSrv { process_path: AbsPathBuf, args: impl IntoIterator> + Clone, ) -> io::Result { - let create_srv = || { - let mut process = Process::run(process_path.clone(), args.clone())?; + let create_srv = |null_stderr| { + let mut process = Process::run(process_path.clone(), args.clone(), null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 }) }; - let mut srv = create_srv()?; + let mut srv = create_srv(true)?; tracing::info!("sending version check"); match srv.version_check() { Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( @@ -45,12 +45,13 @@ impl ProcMacroProcessSrv { )), Ok(v) => { tracing::info!("got version {v}"); + srv = create_srv(false)?; srv.version = v; Ok(srv) } Err(e) => { tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0"); - create_srv() + create_srv(false) } } } @@ -98,9 +99,10 @@ impl Process { fn run( path: AbsPathBuf, args: impl IntoIterator>, + null_stderr: bool, ) -> io::Result { let args: Vec = args.into_iter().map(|s| s.as_ref().into()).collect(); - let child = JodChild(mk_child(&path, args)?); + let child = JodChild(mk_child(&path, args, null_stderr)?); Ok(Process { child }) } @@ -116,13 +118,14 @@ impl Process { fn mk_child( path: &AbsPath, args: impl IntoIterator>, + null_stderr: bool, ) -> io::Result { Command::new(path.as_os_str()) .args(args) .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) - .stderr(Stdio::inherit()) + .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() }) .spawn() } From b651679d90bf3ed6f22c4fed53d330cf416eaf09 Mon Sep 17 00:00:00 2001 From: Tshepang Mbambo Date: Thu, 9 Feb 2023 10:57:52 +0200 Subject: [PATCH 345/501] use idiomatic formatting Also, remove needless else --- .../src/fn_ctxt/adjust_fulfillment_errors.rs | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs index db1acb5992716..b220a87bc658a 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs @@ -802,18 +802,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut walk = ty.walk(); while let Some(arg) = walk.next() { if arg == param_to_point_at { - return true; - } else if let ty::GenericArgKind::Type(ty) = arg.unpack() - && let ty::Alias(ty::Projection, ..) = ty.kind() - { - // This logic may seem a bit strange, but typically when - // we have a projection type in a function signature, the - // argument that's being passed into that signature is - // not actually constraining that projection's substs in - // a meaningful way. So we skip it, and see improvements - // in some UI tests. - walk.skip_current_subtree(); - } + return true; + } + if let ty::GenericArgKind::Type(ty) = arg.unpack() + && let ty::Alias(ty::Projection, ..) = ty.kind() + { + // This logic may seem a bit strange, but typically when + // we have a projection type in a function signature, the + // argument that's being passed into that signature is + // not actually constraining that projection's substs in + // a meaningful way. So we skip it, and see improvements + // in some UI tests. + walk.skip_current_subtree(); + } } false } From b940f5088e725026820a9f9a47bdd40c01786233 Mon Sep 17 00:00:00 2001 From: yukang Date: Thu, 9 Feb 2023 09:11:25 +0000 Subject: [PATCH 346/501] fix #107822, handle properly when there is no crate attrs --- compiler/rustc_driver_impl/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index a392d70f100a4..5e403d846e1ef 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -628,7 +628,7 @@ fn print_crate_info( println!("{}", serde_json::to_string_pretty(&sess.target.to_json()).unwrap()); } FileNames | CrateName => { - let attrs = attrs.as_ref().unwrap(); + let Some(attrs) = attrs.as_ref() else { return Compilation::Continue; }; let t_outputs = rustc_interface::util::build_output_filenames(attrs, sess); let id = rustc_session::output::find_crate_name(sess, attrs); if *req == PrintRequest::CrateName { From 569c2fe54ba676e6aaeece218e5564c4508d7c73 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 9 Feb 2023 11:18:21 +0100 Subject: [PATCH 347/501] avoid duplicating the RUSTC_LOG env var name --- compiler/rustc_log/src/lib.rs | 6 ------ 1 file changed, 6 deletions(-) diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs index 019fdc30dcec5..e3d850e6a3b02 100644 --- a/compiler/rustc_log/src/lib.rs +++ b/compiler/rustc_log/src/lib.rs @@ -53,12 +53,6 @@ use tracing_subscriber::fmt::{ }; use tracing_subscriber::layer::SubscriberExt; -pub fn init_rustc_env_logger() -> Result<(), Error> { - init_env_logger("RUSTC_LOG") -} - -/// In contrast to `init_rustc_env_logger` this allows you to choose an env var -/// other than `RUSTC_LOG`. pub fn init_env_logger(env: &str) -> Result<(), Error> { let filter = match env::var(env) { Ok(env) => EnvFilter::new(env), From 858a4aa70c74d64d897b27d034c367b7ccc7d679 Mon Sep 17 00:00:00 2001 From: Tharun Suresh Date: Thu, 9 Feb 2023 15:53:20 +0530 Subject: [PATCH 348/501] Handled snap curl issue inside Rust #107722 --- src/bootstrap/download.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs index bd67978a7662e..6b77737cefb27 100644 --- a/src/bootstrap/download.rs +++ b/src/bootstrap/download.rs @@ -221,10 +221,10 @@ impl Config { "--retry", "3", "-Sf", - "-o", ]); - curl.arg(tempfile); curl.arg(url); + let f = File::create(tempfile).unwrap(); + curl.stdout(Stdio::from(f)); if !self.check_run(&mut curl) { if self.build.contains("windows-msvc") { println!("Fallback to PowerShell"); From 30cf7a3f51d6a25006077d3e9ec3222de3104b8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Fri, 3 Feb 2023 23:21:56 +0000 Subject: [PATCH 349/501] Introduce `ReError` CC #69314 --- .../src/diagnostics/region_name.rs | 10 +- .../src/region_infer/opaque_types.rs | 2 +- .../rustc_borrowck/src/universal_regions.rs | 2 + .../rustc_hir_analysis/src/astconv/mod.rs | 12 +- .../src/check/compare_impl_item.rs | 2 +- .../rustc_hir_analysis/src/collect/type_of.rs | 2 +- .../rustc_hir_analysis/src/outlives/utils.rs | 2 + .../src/variance/constraints.rs | 2 + .../src/errors/note_and_explain.rs | 2 + .../src/infer/canonical/canonicalizer.rs | 1 + compiler/rustc_infer/src/infer/combine.rs | 6 +- .../src/infer/error_reporting/mod.rs | 17 ++- .../src/infer/error_reporting/note.rs | 6 +- compiler/rustc_infer/src/infer/freshen.rs | 1 + .../src/infer/lexical_region_resolve/mod.rs | 12 +- .../src/infer/region_constraints/mod.rs | 2 +- compiler/rustc_middle/src/ty/context.rs | 9 +- compiler/rustc_middle/src/ty/generics.rs | 2 +- compiler/rustc_middle/src/ty/opaque_types.rs | 6 +- compiler/rustc_middle/src/ty/print/pretty.rs | 3 +- compiler/rustc_middle/src/ty/sty.rs | 7 ++ compiler/rustc_resolve/src/late.rs | 1 - .../src/typeid/typeid_itanium_cxx_abi.rs | 1 + compiler/rustc_traits/src/chalk/lowering.rs | 3 + compiler/rustc_type_ir/src/sty.rs | 12 +- src/librustdoc/clean/mod.rs | 1 + .../ui/associated-type-bounds/elision.stderr | 4 +- .../async-await/async-fn-path-elision.stderr | 1 - .../const-param-elided-lifetime.min.stderr | 10 +- .../const-param-elided-lifetime.rs | 10 +- .../issues/issue-56445-1.min.stderr | 2 +- .../ui/const-generics/issues/issue-56445-1.rs | 2 +- ...incorrect-explicit-lifetime-name-needed.rs | 1 - ...rrect-explicit-lifetime-name-needed.stderr | 18 +-- .../path-elided.stderr | 1 - .../trait-elided.stderr | 1 - .../in-trait/signature-mismatch.stderr | 4 +- tests/ui/inference/issue-107090.rs | 10 +- tests/ui/inference/issue-107090.stderr | 115 ++---------------- tests/ui/issues/issue-10412.stderr | 1 - tests/ui/lifetimes/issue-26638.stderr | 4 +- tests/ui/lifetimes/issue-69314.fixed | 22 ++++ tests/ui/lifetimes/issue-69314.rs | 22 ++++ tests/ui/lifetimes/issue-69314.stderr | 26 ++++ .../ui/lifetimes/unusual-rib-combinations.rs | 2 +- .../lifetimes/unusual-rib-combinations.stderr | 2 +- .../issue-74918-missing-lifetime.stderr | 4 +- .../generic_type_does_not_live_long_enough.rs | 2 +- ...eric_type_does_not_live_long_enough.stderr | 2 +- .../wf/wf-in-foreign-fn-decls-issue-80468.rs | 2 +- .../wf-in-foreign-fn-decls-issue-80468.stderr | 25 +--- 51 files changed, 208 insertions(+), 211 deletions(-) create mode 100644 tests/ui/lifetimes/issue-69314.fixed create mode 100644 tests/ui/lifetimes/issue-69314.rs create mode 100644 tests/ui/lifetimes/issue-69314.stderr diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs index 2440f20502ab1..d56ca1981ae2f 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs @@ -343,11 +343,11 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { let note = match closure_kind_ty.to_opt_closure_kind() { Some(ty::ClosureKind::Fn) => { "closure implements `Fn`, so references to captured variables \ - can't escape the closure" + can't escape the closure" } Some(ty::ClosureKind::FnMut) => { "closure implements `FnMut`, so references to captured variables \ - can't escape the closure" + can't escape the closure" } Some(ty::ClosureKind::FnOnce) => { bug!("BrEnv in a `FnOnce` closure"); @@ -364,7 +364,11 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { ty::BoundRegionKind::BrAnon(..) => None, }, - ty::ReLateBound(..) | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReErased => None, + ty::ReLateBound(..) + | ty::ReVar(..) + | ty::RePlaceholder(..) + | ty::ReErased + | ty::ReError => None, } } diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs index e0e814cfc0ac8..e598b70d12ae7 100644 --- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs @@ -95,7 +95,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { concrete_type.span, "opaque type with non-universal region substs", ); - infcx.tcx.lifetimes.re_static + infcx.tcx.lifetimes.re_error } } }; diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index 5380913f5c86a..6680c57b0c96e 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -821,6 +821,8 @@ impl<'tcx> UniversalRegionIndices<'tcx> { pub fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { if let ty::ReVar(..) = *r { r.to_region_vid() + } else if let ty::ReError = *r { + RegionVid::new(0) } else { *self .indices diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs index 3d5f189e233bb..a1f5782fbd564 100644 --- a/compiler/rustc_hir_analysis/src/astconv/mod.rs +++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs @@ -264,10 +264,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // reported an error in this case -- but if // not, let's error out. tcx.sess.delay_span_bug(lifetime.ident.span, "unelided lifetime in signature"); - - // Supply some dummy value. We don't have an - // `re_error`, annoyingly, so use `'static`. - tcx.lifetimes.re_static + tcx.lifetimes.re_error }) } } @@ -482,10 +479,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // This indicates an illegal lifetime in a non-assoc-trait position tcx.sess.delay_span_bug(self.span, "unelided lifetime in signature"); - - // Supply some dummy value. We don't have an - // `re_error`, annoyingly, so use `'static`. - tcx.lifetimes.re_static + tcx.lifetimes.re_error }) .into(), GenericParamDefKind::Type { has_default, .. } => { @@ -1629,7 +1623,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } else { err.emit(); } - tcx.lifetimes.re_static + tcx.lifetimes.re_error }) } }) diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs index 236e36f28ca47..facfc4313f427 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs @@ -792,7 +792,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( return_span, "expected ReFree to map to ReEarlyBound" ); - return tcx.lifetimes.re_static; + return tcx.lifetimes.re_error; }; tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { def_id: e.def_id, diff --git a/compiler/rustc_hir_analysis/src/collect/type_of.rs b/compiler/rustc_hir_analysis/src/collect/type_of.rs index c5522c94874dd..e80ff89623a35 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of.rs @@ -928,7 +928,7 @@ fn infer_placeholder_type<'a>( // Typeck doesn't expect erased regions to be returned from `type_of`. tcx.fold_regions(ty, |r, _| match *r { - ty::ReErased => tcx.lifetimes.re_static, + ty::ReErased | ty::ReError => tcx.lifetimes.re_static, _ => r, }) } diff --git a/compiler/rustc_hir_analysis/src/outlives/utils.rs b/compiler/rustc_hir_analysis/src/outlives/utils.rs index 9459c5f54abbf..bca385f6bdca7 100644 --- a/compiler/rustc_hir_analysis/src/outlives/utils.rs +++ b/compiler/rustc_hir_analysis/src/outlives/utils.rs @@ -170,6 +170,8 @@ fn is_free_region(region: Region<'_>) -> bool { // ignore it. We can't put it on the struct header anyway. ty::ReLateBound(..) => false, + ty::ReError => false, + // These regions don't appear in types from type declarations: ty::ReErased | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReFree(..) => { bug!("unexpected region in outlives inference: {:?}", region); diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 165782f209a0c..06a4a5d366f6d 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -409,6 +409,8 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { // way early-bound regions do, so we skip them here. } + ty::ReError => {} + ty::ReFree(..) | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReErased => { // We don't expect to see anything but 'static or bound // regions when visiting member types or method types. diff --git a/compiler/rustc_infer/src/errors/note_and_explain.rs b/compiler/rustc_infer/src/errors/note_and_explain.rs index 7aaa5ce2f4242..b51a85977e717 100644 --- a/compiler/rustc_infer/src/errors/note_and_explain.rs +++ b/compiler/rustc_infer/src/errors/note_and_explain.rs @@ -31,6 +31,8 @@ impl<'a> DescriptionCtx<'a> { ty::RePlaceholder(_) => return None, + ty::ReError => return None, + // FIXME(#13998) RePlaceholder should probably print like // ReFree rather than dumping Debug output on the user. // diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 87c6dfad5fa2b..78a86b4e267aa 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -371,6 +371,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> { ty::ReStatic | ty::ReEarlyBound(..) + | ty::ReError | ty::ReFree(_) | ty::RePlaceholder(..) | ty::ReErased => self.canonicalize_mode.canonicalize_free_region(self, r), diff --git a/compiler/rustc_infer/src/infer/combine.rs b/compiler/rustc_infer/src/infer/combine.rs index a567b6acdbeeb..a7684ea1ff6d5 100644 --- a/compiler/rustc_infer/src/infer/combine.rs +++ b/compiler/rustc_infer/src/infer/combine.rs @@ -705,6 +705,10 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> { return Ok(r); } + ty::ReError => { + return Ok(r); + } + ty::RePlaceholder(..) | ty::ReVar(..) | ty::ReStatic @@ -861,7 +865,7 @@ impl<'tcx> FallibleTypeFolder<'tcx> for ConstInferUnifier<'_, 'tcx> { match *r { // Never make variables for regions bound within the type itself, // nor for erased regions. - ty::ReLateBound(..) | ty::ReErased => { + ty::ReLateBound(..) | ty::ReErased | ty::ReError => { return Ok(r); } diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index 86f3174b7b2bb..ceaa58257bbf0 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -134,6 +134,8 @@ pub(super) fn note_and_explain_region<'tcx>( ty::RePlaceholder(_) => return, + ty::ReError => return, + // FIXME(#13998) RePlaceholder should probably print like // ReFree rather than dumping Debug output on the user. // @@ -313,6 +315,9 @@ pub fn unexpected_hidden_region_diagnostic<'tcx>( ) } } + ty::ReError => { + err.delay_as_bug(); + } _ => { // Ugh. This is a painful case: the hidden region is not one // that we can easily summarize or explain. This can happen @@ -2546,7 +2551,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { ); err.note_expected_found(&"", sup_expected, &"", sup_found); - err.emit(); + if sub_region.is_error() | sup_region.is_error() { + err.delay_as_bug(); + } else { + err.emit(); + } return; } @@ -2562,7 +2571,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { ); self.note_region_origin(&mut err, &sub_origin); - err.emit(); + if sub_region.is_error() | sup_region.is_error() { + err.delay_as_bug(); + } else { + err.emit(); + } } /// Determine whether an error associated with the given span and definition diff --git a/compiler/rustc_infer/src/infer/error_reporting/note.rs b/compiler/rustc_infer/src/infer/error_reporting/note.rs index b18cbd404d47f..bdd09a995dc23 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/note.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/note.rs @@ -78,7 +78,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { sub: Region<'tcx>, sup: Region<'tcx>, ) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> { - match origin { + let mut err = match origin { infer::Subtype(box trace) => { let terr = TypeError::RegionsDoesNotOutlive(sup, sub); let mut err = self.report_and_explain_type_error(trace, terr); @@ -299,7 +299,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { ); err } + }; + if sub.is_error() || sup.is_error() { + err.delay_as_bug(); } + err } pub fn suggest_copy_trait_method_bounds( diff --git a/compiler/rustc_infer/src/infer/freshen.rs b/compiler/rustc_infer/src/infer/freshen.rs index 2355234637c40..33cd29d26fe31 100644 --- a/compiler/rustc_infer/src/infer/freshen.rs +++ b/compiler/rustc_infer/src/infer/freshen.rs @@ -126,6 +126,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> { | ty::ReFree(_) | ty::ReVar(_) | ty::RePlaceholder(..) + | ty::ReError | ty::ReErased => { // replace all free regions with 'erased self.tcx().lifetimes.re_erased diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index ce8aec8044bae..c79ef9802825d 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -17,7 +17,7 @@ use rustc_index::vec::{Idx, IndexVec}; use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::PlaceholderRegion; use rustc_middle::ty::{self, Ty, TyCtxt}; -use rustc_middle::ty::{ReEarlyBound, ReErased, ReFree, ReStatic}; +use rustc_middle::ty::{ReEarlyBound, ReErased, ReError, ReFree, ReStatic}; use rustc_middle::ty::{ReLateBound, RePlaceholder, ReVar}; use rustc_middle::ty::{Region, RegionVid}; use rustc_span::Span; @@ -211,7 +211,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { ); } - ReStatic => { + ReStatic | ReError => { // nothing lives longer than `'static` Ok(self.tcx().lifetimes.re_static) } @@ -436,7 +436,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } (VarValue::Value(a), VarValue::Empty(_)) => { match *a { - ReLateBound(..) | ReErased => { + ReLateBound(..) | ReErased | ReError => { bug!("cannot relate region: {:?}", a); } @@ -465,7 +465,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } (VarValue::Empty(a_ui), VarValue::Value(b)) => { match *b { - ReLateBound(..) | ReErased => { + ReLateBound(..) | ReErased | ReError => { bug!("cannot relate region: {:?}", b); } @@ -546,6 +546,8 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { ); } + (ReError, _) | (_, ReError) => self.tcx().lifetimes.re_error, + (ReStatic, _) | (_, ReStatic) => { // nothing lives longer than `'static` self.tcx().lifetimes.re_static @@ -1040,7 +1042,7 @@ impl<'tcx> LexicalRegionResolutions<'tcx> { ty::ReVar(rid) => match self.values[rid] { VarValue::Empty(_) => r, VarValue::Value(r) => r, - VarValue::ErrorValue => tcx.lifetimes.re_static, + VarValue::ErrorValue => tcx.lifetimes.re_error, }, _ => r, }; diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index 0428481b7ff02..f7b5129b49f03 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -696,7 +696,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { pub fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex { match *region { - ty::ReStatic | ty::ReErased | ty::ReFree(..) | ty::ReEarlyBound(..) => { + ty::ReStatic | ty::ReErased | ty::ReFree(..) | ty::ReEarlyBound(..) | ty::ReError => { ty::UniverseIndex::ROOT } ty::RePlaceholder(placeholder) => placeholder.universe, diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 9205a8a0ffed8..948675ebc37fb 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -275,6 +275,9 @@ pub struct CommonLifetimes<'tcx> { /// Erased region, used outside of type inference. pub re_erased: Region<'tcx>, + + /// Error region, used only for error reporting. + pub re_error: Region<'tcx>, } pub struct CommonConsts<'tcx> { @@ -324,7 +327,11 @@ impl<'tcx> CommonLifetimes<'tcx> { )) }; - CommonLifetimes { re_static: mk(ty::ReStatic), re_erased: mk(ty::ReErased) } + CommonLifetimes { + re_static: mk(ty::ReStatic), + re_erased: mk(ty::ReErased), + re_error: mk(ty::ReError), + } } } diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index 801ca60044568..7cbf7994be00f 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -100,7 +100,7 @@ impl GenericParamDef { preceding_substs: &[ty::GenericArg<'tcx>], ) -> ty::GenericArg<'tcx> { match &self.kind { - ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_static.into(), + ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_error.into(), ty::GenericParamDefKind::Type { .. } => tcx.ty_error().into(), ty::GenericParamDefKind::Const { .. } => { tcx.const_error(tcx.bound_type_of(self.def_id).subst(tcx, preceding_substs)).into() diff --git a/compiler/rustc_middle/src/ty/opaque_types.rs b/compiler/rustc_middle/src/ty/opaque_types.rs index 7ff58f02623dc..576cccc09d579 100644 --- a/compiler/rustc_middle/src/ty/opaque_types.rs +++ b/compiler/rustc_middle/src/ty/opaque_types.rs @@ -109,6 +109,8 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> { // them. ty::ReErased => return r, + ty::ReError => return r, + // The regions that we expect from borrow checking. ty::ReEarlyBound(_) | ty::ReFree(_) => {} @@ -132,13 +134,13 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> { self.span, format!( "lifetime `{}` is part of concrete type but not used in \ - parameter list of the `impl Trait` type alias", + parameter list of the `impl Trait` type alias", r ), ) .emit(); - self.tcx().lifetimes.re_static + self.tcx().lifetimes.re_error } } } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index bbb4fd999bc76..228827bcbf48f 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2114,7 +2114,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { ty::ReVar(_) if identify_regions => true, - ty::ReVar(_) | ty::ReErased => false, + ty::ReVar(_) | ty::ReErased | ty::ReError => false, ty::ReStatic => true, } @@ -2194,6 +2194,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { } ty::ReVar(_) => {} ty::ReErased => {} + ty::ReError => {} ty::ReStatic => { p!("'static"); return Ok(self); diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 98d6b68356368..beadb9a2f0f14 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -1623,9 +1623,15 @@ impl<'tcx> Region<'tcx> { ty::ReVar(..) => false, ty::RePlaceholder(placeholder) => placeholder.name.is_named(), ty::ReErased => false, + ty::ReError => false, } } + #[inline] + pub fn is_error(self) -> bool { + matches!(*self, ty::ReError) + } + #[inline] pub fn is_static(self) -> bool { matches!(*self, ty::ReStatic) @@ -1686,6 +1692,7 @@ impl<'tcx> Region<'tcx> { ty::ReErased => { flags = flags | TypeFlags::HAS_RE_ERASED; } + ty::ReError => {} } debug!("type_flags({:?}) = {:?}", self, flags); diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 3ca10ac50baa6..b28aee40b16c2 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -1725,7 +1725,6 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { !segment.has_generic_args, elided_lifetime_span, ); - err.note("assuming a `'static` lifetime..."); err.emit(); should_lint = false; diff --git a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs index c9b4ab0a38d6e..4d58ff921e382 100644 --- a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs +++ b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs @@ -299,6 +299,7 @@ fn encode_region<'tcx>( RegionKind::ReEarlyBound(..) | RegionKind::ReFree(..) | RegionKind::ReStatic + | RegionKind::ReError | RegionKind::ReVar(..) | RegionKind::RePlaceholder(..) => { bug!("encode_region: unexpected `{:?}`", region.kind()); diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 9c5db3314c5cd..05b201b9f9932 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -493,6 +493,9 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Lifetime>> for Region<'t ty::ReEarlyBound(_) => { panic!("Should have already been substituted."); } + ty::ReError => { + panic!("Error lifetime should not have already been lowered."); + } ty::ReLateBound(db, br) => chalk_ir::LifetimeData::BoundVar(chalk_ir::BoundVar::new( chalk_ir::DebruijnIndex::new(db.as_u32()), br.var.as_usize(), diff --git a/compiler/rustc_type_ir/src/sty.rs b/compiler/rustc_type_ir/src/sty.rs index 3ede95e84313d..0b573f96bf7a2 100644 --- a/compiler/rustc_type_ir/src/sty.rs +++ b/compiler/rustc_type_ir/src/sty.rs @@ -960,6 +960,9 @@ pub enum RegionKind { /// Erased region, used by trait selection, in MIR and during codegen. ReErased, + + /// A region that resulted from some other error. Used exclusively for diagnostics. + ReError, } // This is manually implemented for `RegionKind` because `std::mem::discriminant` @@ -974,6 +977,7 @@ const fn regionkind_discriminant(value: &RegionKind) -> usize { ReVar(_) => 4, RePlaceholder(_) => 5, ReErased => 6, + ReError => 7, } } @@ -999,6 +1003,7 @@ impl Clone for RegionKind { ReVar(r) => ReVar(r.clone()), RePlaceholder(r) => RePlaceholder(r.clone()), ReErased => ReErased, + ReError => ReError, } } } @@ -1077,6 +1082,7 @@ impl hash::Hash for RegionKind { ReVar(r) => r.hash(state), RePlaceholder(r) => r.hash(state), ReErased => (), + ReError => (), } } } @@ -1100,6 +1106,8 @@ impl fmt::Debug for RegionKind { RePlaceholder(placeholder) => write!(f, "RePlaceholder({placeholder:?})"), ReErased => f.write_str("ReErased"), + + ReError => f.write_str("ReError"), } } } @@ -1134,6 +1142,7 @@ where a.encode(e); }), ReErased => e.emit_enum_variant(disc, |_| {}), + ReError => e.emit_enum_variant(disc, |_| {}), } } } @@ -1156,6 +1165,7 @@ where 4 => ReVar(Decodable::decode(d)), 5 => RePlaceholder(Decodable::decode(d)), 6 => ReErased, + 7 => ReError, _ => panic!( "{}", format!( @@ -1184,7 +1194,7 @@ where ) { std::mem::discriminant(self).hash_stable(hcx, hasher); match self { - ReErased | ReStatic => { + ReErased | ReStatic | ReError => { // No variant fields to hash for these ... } ReLateBound(d, r) => { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 80493b100bb45..8a493478e9cb3 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -242,6 +242,7 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option { debug!("cannot clean region {:?}", region); diff --git a/tests/ui/associated-type-bounds/elision.stderr b/tests/ui/associated-type-bounds/elision.stderr index b64a4dab2065d..cc10bbcc0b500 100644 --- a/tests/ui/associated-type-bounds/elision.stderr +++ b/tests/ui/associated-type-bounds/elision.stderr @@ -16,10 +16,10 @@ error[E0308]: mismatched types LL | fn f(x: &mut dyn Iterator>) -> Option<&'_ ()> { x.next() } | ----------------------------- -------------- ^^^^^^^^ expected `Option<&()>`, found `Option>` | | | - | | expected `Option<&'static ()>` because of return type + | | expected `Option<&()>` because of return type | this type parameter | - = note: expected enum `Option<&'static ()>` + = note: expected enum `Option<&()>` found enum `Option>` error: aborting due to 2 previous errors diff --git a/tests/ui/async-await/async-fn-path-elision.stderr b/tests/ui/async-await/async-fn-path-elision.stderr index 5e0c8c2998910..224198653dc57 100644 --- a/tests/ui/async-await/async-fn-path-elision.stderr +++ b/tests/ui/async-await/async-fn-path-elision.stderr @@ -4,7 +4,6 @@ error[E0726]: implicit elided lifetime not allowed here LL | async fn error(lt: HasLifetime) { | ^^^^^^^^^^^ expected lifetime parameter | - = note: assuming a `'static` lifetime... help: indicate the anonymous lifetime | LL | async fn error(lt: HasLifetime<'_>) { diff --git a/tests/ui/const-generics/const-param-elided-lifetime.min.stderr b/tests/ui/const-generics/const-param-elided-lifetime.min.stderr index 4bba42c7782e3..656bc29466f23 100644 --- a/tests/ui/const-generics/const-param-elided-lifetime.min.stderr +++ b/tests/ui/const-generics/const-param-elided-lifetime.min.stderr @@ -28,7 +28,7 @@ error[E0637]: `&` without an explicit lifetime name cannot be used here LL | fn bar() {} | ^ explicit lifetime name needed here -error: `&'static u8` is forbidden as the type of a const generic parameter +error: `&u8` is forbidden as the type of a const generic parameter --> $DIR/const-param-elided-lifetime.rs:9:19 | LL | struct A; @@ -37,7 +37,7 @@ LL | struct A; = note: the only supported types are integers, `bool` and `char` = help: more complex types are supported with `#![feature(adt_const_params)]` -error: `&'static u8` is forbidden as the type of a const generic parameter +error: `&u8` is forbidden as the type of a const generic parameter --> $DIR/const-param-elided-lifetime.rs:14:15 | LL | impl A { @@ -46,7 +46,7 @@ LL | impl A { = note: the only supported types are integers, `bool` and `char` = help: more complex types are supported with `#![feature(adt_const_params)]` -error: `&'static u8` is forbidden as the type of a const generic parameter +error: `&u8` is forbidden as the type of a const generic parameter --> $DIR/const-param-elided-lifetime.rs:22:15 | LL | impl B for A {} @@ -55,7 +55,7 @@ LL | impl B for A {} = note: the only supported types are integers, `bool` and `char` = help: more complex types are supported with `#![feature(adt_const_params)]` -error: `&'static u8` is forbidden as the type of a const generic parameter +error: `&u8` is forbidden as the type of a const generic parameter --> $DIR/const-param-elided-lifetime.rs:26:17 | LL | fn bar() {} @@ -64,7 +64,7 @@ LL | fn bar() {} = note: the only supported types are integers, `bool` and `char` = help: more complex types are supported with `#![feature(adt_const_params)]` -error: `&'static u8` is forbidden as the type of a const generic parameter +error: `&u8` is forbidden as the type of a const generic parameter --> $DIR/const-param-elided-lifetime.rs:17:21 | LL | fn foo(&self) {} diff --git a/tests/ui/const-generics/const-param-elided-lifetime.rs b/tests/ui/const-generics/const-param-elided-lifetime.rs index 487b82dbf4ac4..45611d6bf5f3d 100644 --- a/tests/ui/const-generics/const-param-elided-lifetime.rs +++ b/tests/ui/const-generics/const-param-elided-lifetime.rs @@ -8,23 +8,23 @@ struct A; //~^ ERROR `&` without an explicit lifetime name cannot be used here -//[min]~^^ ERROR `&'static u8` is forbidden +//[min]~^^ ERROR `&u8` is forbidden trait B {} impl A { //~^ ERROR `&` without an explicit lifetime name cannot be used here -//[min]~^^ ERROR `&'static u8` is forbidden +//[min]~^^ ERROR `&u8` is forbidden fn foo(&self) {} //~^ ERROR `&` without an explicit lifetime name cannot be used here - //[min]~^^ ERROR `&'static u8` is forbidden + //[min]~^^ ERROR `&u8` is forbidden } impl B for A {} //~^ ERROR `&` without an explicit lifetime name cannot be used here -//[min]~^^ ERROR `&'static u8` is forbidden +//[min]~^^ ERROR `&u8` is forbidden fn bar() {} //~^ ERROR `&` without an explicit lifetime name cannot be used here -//[min]~^^ ERROR `&'static u8` is forbidden +//[min]~^^ ERROR `&u8` is forbidden fn main() {} diff --git a/tests/ui/const-generics/issues/issue-56445-1.min.stderr b/tests/ui/const-generics/issues/issue-56445-1.min.stderr index 43a5df117fdc7..9f88013416244 100644 --- a/tests/ui/const-generics/issues/issue-56445-1.min.stderr +++ b/tests/ui/const-generics/issues/issue-56445-1.min.stderr @@ -6,7 +6,7 @@ LL | struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>); | = note: for more information, see issue #74052 -error: `&'static str` is forbidden as the type of a const generic parameter +error: `&str` is forbidden as the type of a const generic parameter --> $DIR/issue-56445-1.rs:9:25 | LL | struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>); diff --git a/tests/ui/const-generics/issues/issue-56445-1.rs b/tests/ui/const-generics/issues/issue-56445-1.rs index 13eb2ea9f69d5..0741c3796ada4 100644 --- a/tests/ui/const-generics/issues/issue-56445-1.rs +++ b/tests/ui/const-generics/issues/issue-56445-1.rs @@ -8,6 +8,6 @@ use std::marker::PhantomData; struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>); //~^ ERROR: use of non-static lifetime `'a` in const generic -//[min]~| ERROR: `&'static str` is forbidden as the type of a const generic parameter +//[min]~| ERROR: `&str` is forbidden as the type of a const generic parameter impl Bug<'_, ""> {} diff --git a/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs b/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs index 9ea9fc71b557f..54b483f53d4cb 100644 --- a/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs +++ b/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.rs @@ -8,7 +8,6 @@ fn should_error() where T : Into<&u32> {} trait X<'a, K: 'a> { fn foo<'b, L: X<&'b Nested>>(); //~^ ERROR missing lifetime specifier [E0106] - //~| ERROR the type `&'b Nested` does not fulfill the required lifetime } fn bar<'b, L: X<&'b Nested>>(){} diff --git a/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr b/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr index 9d859fddf56b4..faf4c9eb87275 100644 --- a/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr +++ b/tests/ui/generics/issue-65285-incorrect-explicit-lifetime-name-needed.stderr @@ -29,7 +29,7 @@ LL | fn foo<'b, L: X<'lifetime, &'b Nested>>(); | ++++++++++ error[E0106]: missing lifetime specifier - --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:14:16 + --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:13:16 | LL | fn bar<'b, L: X<&'b Nested>>(){} | ^ expected named lifetime parameter @@ -39,19 +39,7 @@ help: consider using the `'b` lifetime LL | fn bar<'b, L: X<'b, &'b Nested>>(){} | +++ -error[E0477]: the type `&'b Nested` does not fulfill the required lifetime - --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:9:19 - | -LL | fn foo<'b, L: X<&'b Nested>>(); - | ^^^^^^^^^^^^^^^^ - | -note: type must satisfy the static lifetime as required by this binding - --> $DIR/issue-65285-incorrect-explicit-lifetime-name-needed.rs:8:16 - | -LL | trait X<'a, K: 'a> { - | ^^ - -error: aborting due to 4 previous errors +error: aborting due to 3 previous errors -Some errors have detailed explanations: E0106, E0477, E0637. +Some errors have detailed explanations: E0106, E0637. For more information about an error, try `rustc --explain E0106`. diff --git a/tests/ui/impl-header-lifetime-elision/path-elided.stderr b/tests/ui/impl-header-lifetime-elision/path-elided.stderr index 0b7d3f1e851e3..18e4c618dba99 100644 --- a/tests/ui/impl-header-lifetime-elision/path-elided.stderr +++ b/tests/ui/impl-header-lifetime-elision/path-elided.stderr @@ -4,7 +4,6 @@ error[E0726]: implicit elided lifetime not allowed here LL | impl MyTrait for Foo { | ^^^ expected lifetime parameter | - = note: assuming a `'static` lifetime... help: indicate the anonymous lifetime | LL | impl MyTrait for Foo<'_> { diff --git a/tests/ui/impl-header-lifetime-elision/trait-elided.stderr b/tests/ui/impl-header-lifetime-elision/trait-elided.stderr index 412bba6be7167..74631a0378601 100644 --- a/tests/ui/impl-header-lifetime-elision/trait-elided.stderr +++ b/tests/ui/impl-header-lifetime-elision/trait-elided.stderr @@ -4,7 +4,6 @@ error[E0726]: implicit elided lifetime not allowed here LL | impl MyTrait for u32 {} | ^^^^^^^ expected lifetime parameter | - = note: assuming a `'static` lifetime... help: indicate the anonymous lifetime | LL | impl MyTrait<'_> for u32 {} diff --git a/tests/ui/impl-trait/in-trait/signature-mismatch.stderr b/tests/ui/impl-trait/in-trait/signature-mismatch.stderr index e105660173b48..c4fcaabe44619 100644 --- a/tests/ui/impl-trait/in-trait/signature-mismatch.stderr +++ b/tests/ui/impl-trait/in-trait/signature-mismatch.stderr @@ -2,12 +2,12 @@ error: `impl` item signature doesn't match `trait` item signature --> $DIR/signature-mismatch.rs:15:5 | LL | fn async_fn(&self, buff: &[u8]) -> impl Future>; - | ----------------------------------------------------------------- expected `fn(&'1 Struct, &'2 [u8]) -> impl Future> + 'static` + | ----------------------------------------------------------------- expected `fn(&'1 Struct, &'2 [u8]) -> impl Future> + '3` ... LL | fn async_fn<'a>(&self, buff: &'a [u8]) -> impl Future> + 'a { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ found `fn(&'1 Struct, &'2 [u8]) -> impl Future> + '2` | - = note: expected signature `fn(&'1 Struct, &'2 [u8]) -> impl Future> + 'static` + = note: expected signature `fn(&'1 Struct, &'2 [u8]) -> impl Future> + '3` found signature `fn(&'1 Struct, &'2 [u8]) -> impl Future> + '2` = help: the lifetime requirements from the `impl` do not correspond to the requirements in the `trait` = help: verify the lifetime relationships in the `trait` and `impl` between the `self` argument, the other inputs and its output diff --git a/tests/ui/inference/issue-107090.rs b/tests/ui/inference/issue-107090.rs index 9426445656f23..a22e12c6d885b 100644 --- a/tests/ui/inference/issue-107090.rs +++ b/tests/ui/inference/issue-107090.rs @@ -2,9 +2,7 @@ use std::marker::PhantomData; struct Foo<'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>) where Foo<'short, 'out, T>: Convert<'a, 'b>; - //~^ ERROR mismatched types - //~^^ ERROR mismatched types - //~^^^ ERROR use of undeclared lifetime name + //~^ ERROR use of undeclared lifetime name //~| ERROR use of undeclared lifetime name `'out` trait Convert<'a, 'b>: Sized { @@ -13,19 +11,15 @@ trait Convert<'a, 'b>: Sized { impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { //~^ ERROR use of undeclared lifetime name //~^^ ERROR use of undeclared lifetime name `'out` - //~| ERROR cannot infer an appropriate lifetime for lifetime parameter fn cast(&'long self) -> &'short Foo<'short, 'out, T> { //~^ ERROR use of undeclared lifetime name - //~| ERROR cannot infer an appropriate lifetime for lifetime parameter self } } fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T { //~^ ERROR use of undeclared lifetime name - //~^^ ERROR incompatible lifetime on type - //~| ERROR `x` has lifetime `'in_` but it needs to satisfy a `'static` lifetime requirement - sadness.cast() + sadness.cast() //~ ERROR mismatched types } fn main() {} diff --git a/tests/ui/inference/issue-107090.stderr b/tests/ui/inference/issue-107090.stderr index 33cb39014acfa..6233b629ad6c6 100644 --- a/tests/ui/inference/issue-107090.stderr +++ b/tests/ui/inference/issue-107090.stderr @@ -30,7 +30,7 @@ LL | struct Foo<'out, 'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>) | +++++ error[E0261]: use of undeclared lifetime name `'b` - --> $DIR/issue-107090.rs:13:47 + --> $DIR/issue-107090.rs:11:47 | LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { | - ^^ undeclared lifetime @@ -38,13 +38,13 @@ LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> | help: consider introducing lifetime `'b` here: `'b,` error[E0261]: use of undeclared lifetime name `'out` - --> $DIR/issue-107090.rs:13:67 + --> $DIR/issue-107090.rs:11:67 | LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { | - help: consider introducing lifetime `'out` here: `'out,` ^^^^ undeclared lifetime error[E0261]: use of undeclared lifetime name `'out` - --> $DIR/issue-107090.rs:17:49 + --> $DIR/issue-107090.rs:14:49 | LL | fn cast(&'long self) -> &'short Foo<'short, 'out, T> { | ^^^^ undeclared lifetime @@ -59,7 +59,7 @@ LL | impl<'out, 'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'ou | +++++ error[E0261]: use of undeclared lifetime name `'short` - --> $DIR/issue-107090.rs:24:68 + --> $DIR/issue-107090.rs:20:68 | LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T { | - ^^^^^^ undeclared lifetime @@ -67,107 +67,18 @@ LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, | help: consider introducing lifetime `'short` here: `'short,` error[E0308]: mismatched types - --> $DIR/issue-107090.rs:4:27 - | -LL | Foo<'short, 'out, T>: Convert<'a, 'b>; - | ^^^^^^^^^^^^^^^ lifetime mismatch - | - = note: expected trait `Convert<'static, 'static>` - found trait `Convert<'a, 'b>` -note: the lifetime `'a` as defined here... - --> $DIR/issue-107090.rs:2:12 - | -LL | struct Foo<'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>) - | ^^ - = note: ...does not necessarily outlive the static lifetime - -error[E0308]: mismatched types - --> $DIR/issue-107090.rs:4:27 - | -LL | Foo<'short, 'out, T>: Convert<'a, 'b>; - | ^^^^^^^^^^^^^^^ lifetime mismatch - | - = note: expected trait `Convert<'static, 'static>` - found trait `Convert<'a, 'b>` -note: the lifetime `'b` as defined here... - --> $DIR/issue-107090.rs:2:16 - | -LL | struct Foo<'a, 'b, T>(PhantomData<(&'a (), &'b (), T)>) - | ^^ - = note: ...does not necessarily outlive the static lifetime - -error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'long` due to conflicting requirements - --> $DIR/issue-107090.rs:13:55 - | -LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { - | ^^^^^^^^^^^^^^^^^^^^ - | -note: first, the lifetime cannot outlive the lifetime `'short` as defined here... - --> $DIR/issue-107090.rs:13:21 - | -LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { - | ^^^^^^ - = note: ...but the lifetime must also be valid for the static lifetime... -note: ...so that the types are compatible - --> $DIR/issue-107090.rs:13:55 - | -LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { - | ^^^^^^^^^^^^^^^^^^^^ - = note: expected `Convert<'short, 'static>` - found `Convert<'_, 'static>` - -error: incompatible lifetime on type - --> $DIR/issue-107090.rs:24:29 - | -LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T { - | ^^^^^^^^^^^^^^^^^^ - | -note: because this has an unmet lifetime requirement - --> $DIR/issue-107090.rs:4:27 - | -LL | Foo<'short, 'out, T>: Convert<'a, 'b>; - | ^^^^^^^^^^^^^^^ introduces a `'static` lifetime requirement -note: the lifetime `'out` as defined here... - --> $DIR/issue-107090.rs:24:17 - | -LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T { - | ^^^^ -note: ...does not necessarily outlive the static lifetime introduced by the compatible `impl` - --> $DIR/issue-107090.rs:13:1 - | -LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -error[E0759]: `x` has lifetime `'in_` but it needs to satisfy a `'static` lifetime requirement - --> $DIR/issue-107090.rs:24:29 + --> $DIR/issue-107090.rs:22:5 | LL | fn badboi<'in_, 'out, T>(x: Foo<'in_, 'out, T>, sadness: &'in_ Foo<'short, 'out, T>) -> &'out T { - | ^^^^^^^^^^^^^^^^^^ - | | - | this data with lifetime `'in_`... - | ...is used and required to live as long as `'static` here - -error[E0495]: cannot infer an appropriate lifetime for lifetime parameter `'long` due to conflicting requirements - --> $DIR/issue-107090.rs:17:13 - | -LL | fn cast(&'long self) -> &'short Foo<'short, 'out, T> { - | ^^^^^^^^^^^ + | - this type parameter ------- expected `&'out T` because of return type +LL | +LL | sadness.cast() + | ^^^^^^^^^^^^^^ expected `&T`, found `&Foo<'_, '_, T>` | -note: first, the lifetime cannot outlive the lifetime `'short` as defined here... - --> $DIR/issue-107090.rs:13:21 - | -LL | impl<'long: 'short, 'short, T> Convert<'long, 'b> for Foo<'short, 'out, T> { - | ^^^^^^ - = note: ...but the lifetime must also be valid for the static lifetime... -note: ...so that the types are compatible - --> $DIR/issue-107090.rs:17:13 - | -LL | fn cast(&'long self) -> &'short Foo<'short, 'out, T> { - | ^^^^^^^^^^^ - = note: expected `Convert<'short, 'static>` - found `Convert<'_, 'static>` + = note: expected reference `&'out T` + found reference `&Foo<'_, '_, T>` -error: aborting due to 12 previous errors +error: aborting due to 7 previous errors -Some errors have detailed explanations: E0261, E0308, E0495, E0759. +Some errors have detailed explanations: E0261, E0308. For more information about an error, try `rustc --explain E0261`. diff --git a/tests/ui/issues/issue-10412.stderr b/tests/ui/issues/issue-10412.stderr index 46b9fd541adfa..26666782d2abc 100644 --- a/tests/ui/issues/issue-10412.stderr +++ b/tests/ui/issues/issue-10412.stderr @@ -46,7 +46,6 @@ error[E0726]: implicit elided lifetime not allowed here LL | impl<'self> Serializable for &'self str { | ^^^^^^^^^^^^^^^^^ expected lifetime parameter | - = note: assuming a `'static` lifetime... help: indicate the anonymous lifetime | LL | impl<'self> Serializable<'_, str> for &'self str { diff --git a/tests/ui/lifetimes/issue-26638.stderr b/tests/ui/lifetimes/issue-26638.stderr index 4dfacb9380115..30afcecf827e5 100644 --- a/tests/ui/lifetimes/issue-26638.stderr +++ b/tests/ui/lifetimes/issue-26638.stderr @@ -40,9 +40,9 @@ error[E0308]: mismatched types LL | fn parse_type(iter: Box+'static>) -> &str { iter.next() } | ---- ^^^^^^^^^^^ expected `&str`, found `Option<&str>` | | - | expected `&'static str` because of return type + | expected `&str` because of return type | - = note: expected reference `&'static str` + = note: expected reference `&str` found enum `Option<&str>` error[E0061]: this function takes 1 argument but 0 arguments were supplied diff --git a/tests/ui/lifetimes/issue-69314.fixed b/tests/ui/lifetimes/issue-69314.fixed new file mode 100644 index 0000000000000..41116d4ea6170 --- /dev/null +++ b/tests/ui/lifetimes/issue-69314.fixed @@ -0,0 +1,22 @@ +// run-rustfix +// edition:2021 +#![allow(dead_code, unused_mut, unused_variables)] +struct A {} +struct Msg<'a> { + s: &'a [i32], +} +impl A { + async fn g(buf: &[i32]) -> Msg<'_> { + Msg { s: &buf[0..1] } + } + async fn f() { + let mut buf = [0; 512]; + let m2 = &buf[..]; //~ ERROR `buf` does not live long enough + let m = Self::g(m2).await; + Self::f2(m).await; + } + async fn f2(m: Msg<'_>) {} + //~^ ERROR implicit elided lifetime not allowed here +} + +fn main() {} diff --git a/tests/ui/lifetimes/issue-69314.rs b/tests/ui/lifetimes/issue-69314.rs new file mode 100644 index 0000000000000..17445341eb689 --- /dev/null +++ b/tests/ui/lifetimes/issue-69314.rs @@ -0,0 +1,22 @@ +// run-rustfix +// edition:2021 +#![allow(dead_code, unused_mut, unused_variables)] +struct A {} +struct Msg<'a> { + s: &'a [i32], +} +impl A { + async fn g(buf: &[i32]) -> Msg<'_> { + Msg { s: &buf[0..1] } + } + async fn f() { + let mut buf = [0; 512]; + let m2 = &buf[..]; //~ ERROR `buf` does not live long enough + let m = Self::g(m2).await; + Self::f2(m).await; + } + async fn f2(m: Msg) {} + //~^ ERROR implicit elided lifetime not allowed here +} + +fn main() {} diff --git a/tests/ui/lifetimes/issue-69314.stderr b/tests/ui/lifetimes/issue-69314.stderr new file mode 100644 index 0000000000000..7ae6789285baa --- /dev/null +++ b/tests/ui/lifetimes/issue-69314.stderr @@ -0,0 +1,26 @@ +error[E0726]: implicit elided lifetime not allowed here + --> $DIR/issue-69314.rs:18:20 + | +LL | async fn f2(m: Msg) {} + | ^^^ expected lifetime parameter + | +help: indicate the anonymous lifetime + | +LL | async fn f2(m: Msg<'_>) {} + | ++++ + +error[E0597]: `buf` does not live long enough + --> $DIR/issue-69314.rs:14:19 + | +LL | let m2 = &buf[..]; + | ^^^ borrowed value does not live long enough +LL | let m = Self::g(m2).await; + | ----------- argument requires that `buf` is borrowed for `'static` +LL | Self::f2(m).await; +LL | } + | - `buf` dropped here while still borrowed + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0597, E0726. +For more information about an error, try `rustc --explain E0597`. diff --git a/tests/ui/lifetimes/unusual-rib-combinations.rs b/tests/ui/lifetimes/unusual-rib-combinations.rs index b4c86aab863c8..1c122f42e5922 100644 --- a/tests/ui/lifetimes/unusual-rib-combinations.rs +++ b/tests/ui/lifetimes/unusual-rib-combinations.rs @@ -23,6 +23,6 @@ fn c() {} // Elided lifetime in path in ConstGeneric fn d() {} //~^ ERROR missing lifetime specifier -//~| ERROR `S<'static>` is forbidden as the type of a const generic parameter +//~| ERROR `S<'_>` is forbidden as the type of a const generic parameter fn main() {} diff --git a/tests/ui/lifetimes/unusual-rib-combinations.stderr b/tests/ui/lifetimes/unusual-rib-combinations.stderr index 6d7b42506982c..68f4fce0178e0 100644 --- a/tests/ui/lifetimes/unusual-rib-combinations.stderr +++ b/tests/ui/lifetimes/unusual-rib-combinations.stderr @@ -46,7 +46,7 @@ LL | fn a() -> [u8; foo::()] { = note: expected type `usize` found fn item `fn() {foo}` -error: `S<'static>` is forbidden as the type of a const generic parameter +error: `S<'_>` is forbidden as the type of a const generic parameter --> $DIR/unusual-rib-combinations.rs:24:15 | LL | fn d() {} diff --git a/tests/ui/mismatched_types/issue-74918-missing-lifetime.stderr b/tests/ui/mismatched_types/issue-74918-missing-lifetime.stderr index 9ddea16294450..b523182309959 100644 --- a/tests/ui/mismatched_types/issue-74918-missing-lifetime.stderr +++ b/tests/ui/mismatched_types/issue-74918-missing-lifetime.stderr @@ -16,9 +16,9 @@ LL | fn next(&mut self) -> Option> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ found `fn(&'1 mut ChunkingIterator) -> Option>` --> $SRC_DIR/core/src/iter/traits/iterator.rs:LL:COL | - = note: expected `fn(&'1 mut ChunkingIterator) -> Option>` + = note: expected `fn(&'1 mut ChunkingIterator) -> Option>` | - = note: expected signature `fn(&'1 mut ChunkingIterator) -> Option>` + = note: expected signature `fn(&'1 mut ChunkingIterator) -> Option>` found signature `fn(&'1 mut ChunkingIterator) -> Option>` = help: the lifetime requirements from the `impl` do not correspond to the requirements in the `trait` = help: verify the lifetime relationships in the `trait` and `impl` between the `self` argument, the other inputs and its output diff --git a/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs b/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs index d3e169a70d3f7..cdd8f6f1976a5 100644 --- a/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs +++ b/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.rs @@ -4,7 +4,7 @@ fn main() { let y = 42; let x = wrong_generic(&y); let z: i32 = x; - //~^ ERROR expected generic type parameter, found `&'static i32 + //~^ ERROR expected generic type parameter, found `&i32` } type WrongGeneric = impl 'static; diff --git a/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.stderr b/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.stderr index 19115fd28662b..fa79e51e9f79f 100644 --- a/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.stderr +++ b/tests/ui/type-alias-impl-trait/generic_type_does_not_live_long_enough.stderr @@ -4,7 +4,7 @@ error: at least one trait must be specified LL | type WrongGeneric = impl 'static; | ^^^^^^^^^^^^ -error[E0792]: expected generic type parameter, found `&'static i32` +error[E0792]: expected generic type parameter, found `&i32` --> $DIR/generic_type_does_not_live_long_enough.rs:6:18 | LL | let z: i32 = x; diff --git a/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.rs b/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.rs index 4fcf8f403bbb6..0be5127dcc4da 100644 --- a/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.rs +++ b/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.rs @@ -13,5 +13,5 @@ pub struct Ref<'a>(&'a u8); impl Trait for Ref {} //~ ERROR: implicit elided lifetime not allowed here extern "C" { - pub fn repro(_: Wrapper); //~ ERROR: incompatible lifetime on type + pub fn repro(_: Wrapper); } diff --git a/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.stderr b/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.stderr index 94f6dc266245a..b10856571a61c 100644 --- a/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.stderr +++ b/tests/ui/wf/wf-in-foreign-fn-decls-issue-80468.stderr @@ -4,34 +4,11 @@ error[E0726]: implicit elided lifetime not allowed here LL | impl Trait for Ref {} | ^^^ expected lifetime parameter | - = note: assuming a `'static` lifetime... help: indicate the anonymous lifetime | LL | impl Trait for Ref<'_> {} | ++++ -error: incompatible lifetime on type - --> $DIR/wf-in-foreign-fn-decls-issue-80468.rs:16:21 - | -LL | pub fn repro(_: Wrapper); - | ^^^^^^^^^^^^ - | -note: because this has an unmet lifetime requirement - --> $DIR/wf-in-foreign-fn-decls-issue-80468.rs:8:23 - | -LL | pub struct Wrapper(T); - | ^^^^^ introduces a `'static` lifetime requirement -note: the anonymous lifetime as defined here... - --> $DIR/wf-in-foreign-fn-decls-issue-80468.rs:16:29 - | -LL | pub fn repro(_: Wrapper); - | ^^^ -note: ...does not necessarily outlive the static lifetime introduced by the compatible `impl` - --> $DIR/wf-in-foreign-fn-decls-issue-80468.rs:13:1 - | -LL | impl Trait for Ref {} - | ^^^^^^^^^^^^^^^^^^ - -error: aborting due to 2 previous errors +error: aborting due to previous error For more information about this error, try `rustc --explain E0726`. From ffaf2a5c270f6677d7746f6ae30f498692afc750 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Sun, 5 Feb 2023 12:32:27 +0000 Subject: [PATCH 350/501] review comments --- compiler/rustc_hir_analysis/src/collect/type_of.rs | 2 +- compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/collect/type_of.rs b/compiler/rustc_hir_analysis/src/collect/type_of.rs index e80ff89623a35..c5522c94874dd 100644 --- a/compiler/rustc_hir_analysis/src/collect/type_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/type_of.rs @@ -928,7 +928,7 @@ fn infer_placeholder_type<'a>( // Typeck doesn't expect erased regions to be returned from `type_of`. tcx.fold_regions(ty, |r, _| match *r { - ty::ReErased | ty::ReError => tcx.lifetimes.re_static, + ty::ReErased => tcx.lifetimes.re_static, _ => r, }) } diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index c79ef9802825d..bc1d9dc3fde71 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -211,11 +211,13 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { ); } - ReStatic | ReError => { + ReStatic => { // nothing lives longer than `'static` Ok(self.tcx().lifetimes.re_static) } + ReError => Ok(self.tcx().lifetimes.re_error), + ReEarlyBound(_) | ReFree(_) => { // All empty regions are less than early-bound, free, // and scope regions. From 861f4512353e83dbc67de34992058f44fc1b4648 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Tue, 7 Feb 2023 14:55:16 +0000 Subject: [PATCH 351/501] Change to `ReError(ErrorGuaranteed)` --- .../src/diagnostics/region_name.rs | 2 +- .../src/region_infer/opaque_types.rs | 5 ++-- .../rustc_borrowck/src/universal_regions.rs | 2 +- .../rustc_hir_analysis/src/astconv/mod.rs | 8 ++--- .../src/check/compare_impl_item.rs | 8 +---- .../rustc_hir_analysis/src/outlives/utils.rs | 2 +- .../src/variance/constraints.rs | 2 +- .../src/errors/note_and_explain.rs | 2 +- .../src/infer/canonical/canonicalizer.rs | 2 +- compiler/rustc_infer/src/infer/combine.rs | 4 +-- .../src/infer/error_reporting/mod.rs | 4 +-- compiler/rustc_infer/src/infer/freshen.rs | 2 +- .../src/infer/lexical_region_resolve/mod.rs | 10 +++---- .../src/infer/region_constraints/mod.rs | 8 +++-- compiler/rustc_middle/src/ty/context.rs | 30 ++++++++++++++----- compiler/rustc_middle/src/ty/generics.rs | 2 +- compiler/rustc_middle/src/ty/opaque_types.rs | 4 +-- compiler/rustc_middle/src/ty/print/pretty.rs | 4 +-- compiler/rustc_middle/src/ty/sty.rs | 6 ++-- .../src/typeid/typeid_itanium_cxx_abi.rs | 2 +- compiler/rustc_traits/src/chalk/lowering.rs | 2 +- compiler/rustc_type_ir/src/sty.rs | 18 ++++++----- src/librustdoc/clean/mod.rs | 2 +- 23 files changed, 70 insertions(+), 61 deletions(-) diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs index d56ca1981ae2f..237e063d8d11f 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs @@ -368,7 +368,7 @@ impl<'tcx> MirBorrowckCtxt<'_, 'tcx> { | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReErased - | ty::ReError => None, + | ty::ReError(_) => None, } } diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs index e598b70d12ae7..c7b22d5f2e604 100644 --- a/compiler/rustc_borrowck/src/region_infer/opaque_types.rs +++ b/compiler/rustc_borrowck/src/region_infer/opaque_types.rs @@ -91,11 +91,10 @@ impl<'tcx> RegionInferenceContext<'tcx> { } None => { subst_regions.push(vid); - infcx.tcx.sess.delay_span_bug( + infcx.tcx.re_error_with_message( concrete_type.span, "opaque type with non-universal region substs", - ); - infcx.tcx.lifetimes.re_error + ) } } }; diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index 6680c57b0c96e..e813ff837ff8a 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -821,7 +821,7 @@ impl<'tcx> UniversalRegionIndices<'tcx> { pub fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { if let ty::ReVar(..) = *r { r.to_region_vid() - } else if let ty::ReError = *r { + } else if let ty::ReError(_) = *r { RegionVid::new(0) } else { *self diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs index a1f5782fbd564..76dd3b9a0d186 100644 --- a/compiler/rustc_hir_analysis/src/astconv/mod.rs +++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs @@ -263,8 +263,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { // elision. `resolve_lifetime` should have // reported an error in this case -- but if // not, let's error out. - tcx.sess.delay_span_bug(lifetime.ident.span, "unelided lifetime in signature"); - tcx.lifetimes.re_error + tcx.re_error_with_message(lifetime.ident.span, "unelided lifetime in signature") }) } } @@ -478,8 +477,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { debug!(?param, "unelided lifetime in signature"); // This indicates an illegal lifetime in a non-assoc-trait position - tcx.sess.delay_span_bug(self.span, "unelided lifetime in signature"); - tcx.lifetimes.re_error + tcx.re_error_with_message(self.span, "unelided lifetime in signature") }) .into(), GenericParamDefKind::Type { has_default, .. } => { @@ -1623,7 +1621,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { } else { err.emit(); } - tcx.lifetimes.re_error + tcx.re_error() }) } }) diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs index facfc4313f427..640920638a755 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs @@ -786,13 +786,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>( } let Some(ty::ReEarlyBound(e)) = map.get(®ion.into()).map(|r| r.expect_region().kind()) else { - tcx - .sess - .delay_span_bug( - return_span, - "expected ReFree to map to ReEarlyBound" - ); - return tcx.lifetimes.re_error; + return tcx.re_error_with_message(return_span, "expected ReFree to map to ReEarlyBound") }; tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { def_id: e.def_id, diff --git a/compiler/rustc_hir_analysis/src/outlives/utils.rs b/compiler/rustc_hir_analysis/src/outlives/utils.rs index bca385f6bdca7..c5c5f63a108b3 100644 --- a/compiler/rustc_hir_analysis/src/outlives/utils.rs +++ b/compiler/rustc_hir_analysis/src/outlives/utils.rs @@ -170,7 +170,7 @@ fn is_free_region(region: Region<'_>) -> bool { // ignore it. We can't put it on the struct header anyway. ty::ReLateBound(..) => false, - ty::ReError => false, + ty::ReError(_) => false, // These regions don't appear in types from type declarations: ty::ReErased | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReFree(..) => { diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index 06a4a5d366f6d..b0cf0387f87a9 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -409,7 +409,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { // way early-bound regions do, so we skip them here. } - ty::ReError => {} + ty::ReError(_) => {} ty::ReFree(..) | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReErased => { // We don't expect to see anything but 'static or bound diff --git a/compiler/rustc_infer/src/errors/note_and_explain.rs b/compiler/rustc_infer/src/errors/note_and_explain.rs index b51a85977e717..5d861a78af800 100644 --- a/compiler/rustc_infer/src/errors/note_and_explain.rs +++ b/compiler/rustc_infer/src/errors/note_and_explain.rs @@ -31,7 +31,7 @@ impl<'a> DescriptionCtx<'a> { ty::RePlaceholder(_) => return None, - ty::ReError => return None, + ty::ReError(_) => return None, // FIXME(#13998) RePlaceholder should probably print like // ReFree rather than dumping Debug output on the user. diff --git a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs index 78a86b4e267aa..13c39dab3ad7a 100644 --- a/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs +++ b/compiler/rustc_infer/src/infer/canonical/canonicalizer.rs @@ -371,7 +371,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for Canonicalizer<'cx, 'tcx> { ty::ReStatic | ty::ReEarlyBound(..) - | ty::ReError + | ty::ReError(_) | ty::ReFree(_) | ty::RePlaceholder(..) | ty::ReErased => self.canonicalize_mode.canonicalize_free_region(self, r), diff --git a/compiler/rustc_infer/src/infer/combine.rs b/compiler/rustc_infer/src/infer/combine.rs index a7684ea1ff6d5..1360044fe7539 100644 --- a/compiler/rustc_infer/src/infer/combine.rs +++ b/compiler/rustc_infer/src/infer/combine.rs @@ -705,7 +705,7 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> { return Ok(r); } - ty::ReError => { + ty::ReError(_) => { return Ok(r); } @@ -865,7 +865,7 @@ impl<'tcx> FallibleTypeFolder<'tcx> for ConstInferUnifier<'_, 'tcx> { match *r { // Never make variables for regions bound within the type itself, // nor for erased regions. - ty::ReLateBound(..) | ty::ReErased | ty::ReError => { + ty::ReLateBound(..) | ty::ReErased | ty::ReError(_) => { return Ok(r); } diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index ceaa58257bbf0..88a0d6def5ec2 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -134,7 +134,7 @@ pub(super) fn note_and_explain_region<'tcx>( ty::RePlaceholder(_) => return, - ty::ReError => return, + ty::ReError(_) => return, // FIXME(#13998) RePlaceholder should probably print like // ReFree rather than dumping Debug output on the user. @@ -315,7 +315,7 @@ pub fn unexpected_hidden_region_diagnostic<'tcx>( ) } } - ty::ReError => { + ty::ReError(_) => { err.delay_as_bug(); } _ => { diff --git a/compiler/rustc_infer/src/infer/freshen.rs b/compiler/rustc_infer/src/infer/freshen.rs index 33cd29d26fe31..1c76950cc6c99 100644 --- a/compiler/rustc_infer/src/infer/freshen.rs +++ b/compiler/rustc_infer/src/infer/freshen.rs @@ -126,7 +126,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> { | ty::ReFree(_) | ty::ReVar(_) | ty::RePlaceholder(..) - | ty::ReError + | ty::ReError(_) | ty::ReErased => { // replace all free regions with 'erased self.tcx().lifetimes.re_erased diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index bc1d9dc3fde71..d4a12195ca01a 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -216,7 +216,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { Ok(self.tcx().lifetimes.re_static) } - ReError => Ok(self.tcx().lifetimes.re_error), + ReError(_) => Ok(self.tcx().re_error()), ReEarlyBound(_) | ReFree(_) => { // All empty regions are less than early-bound, free, @@ -438,7 +438,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } (VarValue::Value(a), VarValue::Empty(_)) => { match *a { - ReLateBound(..) | ReErased | ReError => { + ReLateBound(..) | ReErased | ReError(_) => { bug!("cannot relate region: {:?}", a); } @@ -467,7 +467,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { } (VarValue::Empty(a_ui), VarValue::Value(b)) => { match *b { - ReLateBound(..) | ReErased | ReError => { + ReLateBound(..) | ReErased | ReError(_) => { bug!("cannot relate region: {:?}", b); } @@ -548,7 +548,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { ); } - (ReError, _) | (_, ReError) => self.tcx().lifetimes.re_error, + (ReError(_), _) | (_, ReError(_)) => self.tcx().re_error(), (ReStatic, _) | (_, ReStatic) => { // nothing lives longer than `'static` @@ -1044,7 +1044,7 @@ impl<'tcx> LexicalRegionResolutions<'tcx> { ty::ReVar(rid) => match self.values[rid] { VarValue::Empty(_) => r, VarValue::Value(r) => r, - VarValue::ErrorValue => tcx.lifetimes.re_error, + VarValue::ErrorValue => tcx.re_error(), }, _ => r, }; diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index f7b5129b49f03..cb24375c7a3fe 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -696,9 +696,11 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { pub fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex { match *region { - ty::ReStatic | ty::ReErased | ty::ReFree(..) | ty::ReEarlyBound(..) | ty::ReError => { - ty::UniverseIndex::ROOT - } + ty::ReStatic + | ty::ReErased + | ty::ReFree(..) + | ty::ReEarlyBound(..) + | ty::ReError(_) => ty::UniverseIndex::ROOT, ty::RePlaceholder(placeholder) => placeholder.universe, ty::ReVar(vid) => self.var_universe(vid), ty::ReLateBound(..) => bug!("universe(): encountered bound region {:?}", region), diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 948675ebc37fb..0e6126ae7c518 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -275,9 +275,6 @@ pub struct CommonLifetimes<'tcx> { /// Erased region, used outside of type inference. pub re_erased: Region<'tcx>, - - /// Error region, used only for error reporting. - pub re_error: Region<'tcx>, } pub struct CommonConsts<'tcx> { @@ -327,11 +324,7 @@ impl<'tcx> CommonLifetimes<'tcx> { )) }; - CommonLifetimes { - re_static: mk(ty::ReStatic), - re_erased: mk(ty::ReErased), - re_error: mk(ty::ReError), - } + CommonLifetimes { re_static: mk(ty::ReStatic), re_erased: mk(ty::ReErased) } } } @@ -656,6 +649,27 @@ impl<'tcx> TyCtxt<'tcx> { self.mk_ty(Error(reported)) } + /// Constructs a `RegionKind::ReError` lifetime and registers a `delay_span_bug` to ensure it + /// gets used. + #[track_caller] + pub fn re_error(self) -> Region<'tcx> { + self.re_error_with_message( + DUMMY_SP, + "RegionKind::ReError constructed but no error reported", + ) + } + + /// Constructs a `RegionKind::ReError` lifetime and registers a `delay_span_bug` with the given + /// `msg` to ensure it gets used. + #[track_caller] + pub fn re_error_with_message>(self, span: S, msg: &str) -> Region<'tcx> { + let reported = self.sess.delay_span_bug(span, msg); + let r = ty::ReError(reported); + Region(Interned::new_unchecked( + self.interners.region.intern(r, |r| InternedInSet(self.interners.arena.alloc(r))).0, + )) + } + /// Like [TyCtxt::ty_error] but for constants, with current `ErrorGuaranteed` #[track_caller] pub fn const_error_with_guaranteed( diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index 7cbf7994be00f..0112debc1c821 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -100,7 +100,7 @@ impl GenericParamDef { preceding_substs: &[ty::GenericArg<'tcx>], ) -> ty::GenericArg<'tcx> { match &self.kind { - ty::GenericParamDefKind::Lifetime => tcx.lifetimes.re_error.into(), + ty::GenericParamDefKind::Lifetime => tcx.re_error().into(), ty::GenericParamDefKind::Type { .. } => tcx.ty_error().into(), ty::GenericParamDefKind::Const { .. } => { tcx.const_error(tcx.bound_type_of(self.def_id).subst(tcx, preceding_substs)).into() diff --git a/compiler/rustc_middle/src/ty/opaque_types.rs b/compiler/rustc_middle/src/ty/opaque_types.rs index 576cccc09d579..624b62e1800a1 100644 --- a/compiler/rustc_middle/src/ty/opaque_types.rs +++ b/compiler/rustc_middle/src/ty/opaque_types.rs @@ -109,7 +109,7 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> { // them. ty::ReErased => return r, - ty::ReError => return r, + ty::ReError(_) => return r, // The regions that we expect from borrow checking. ty::ReEarlyBound(_) | ty::ReFree(_) => {} @@ -140,7 +140,7 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> { ) .emit(); - self.tcx().lifetimes.re_error + self.tcx().re_error() } } } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 228827bcbf48f..a8b23e64e8209 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2114,7 +2114,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> { ty::ReVar(_) if identify_regions => true, - ty::ReVar(_) | ty::ReErased | ty::ReError => false, + ty::ReVar(_) | ty::ReErased | ty::ReError(_) => false, ty::ReStatic => true, } @@ -2194,7 +2194,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> { } ty::ReVar(_) => {} ty::ReErased => {} - ty::ReError => {} + ty::ReError(_) => {} ty::ReStatic => { p!("'static"); return Ok(self); diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index beadb9a2f0f14..b3b2a4fd0e5da 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -1623,13 +1623,13 @@ impl<'tcx> Region<'tcx> { ty::ReVar(..) => false, ty::RePlaceholder(placeholder) => placeholder.name.is_named(), ty::ReErased => false, - ty::ReError => false, + ty::ReError(_) => false, } } #[inline] pub fn is_error(self) -> bool { - matches!(*self, ty::ReError) + matches!(*self, ty::ReError(_)) } #[inline] @@ -1692,7 +1692,7 @@ impl<'tcx> Region<'tcx> { ty::ReErased => { flags = flags | TypeFlags::HAS_RE_ERASED; } - ty::ReError => {} + ty::ReError(_) => {} } debug!("type_flags({:?}) = {:?}", self, flags); diff --git a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs index 4d58ff921e382..710f38264036c 100644 --- a/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs +++ b/compiler/rustc_symbol_mangling/src/typeid/typeid_itanium_cxx_abi.rs @@ -299,7 +299,7 @@ fn encode_region<'tcx>( RegionKind::ReEarlyBound(..) | RegionKind::ReFree(..) | RegionKind::ReStatic - | RegionKind::ReError + | RegionKind::ReError(_) | RegionKind::ReVar(..) | RegionKind::RePlaceholder(..) => { bug!("encode_region: unexpected `{:?}`", region.kind()); diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 05b201b9f9932..65ed3105d10bc 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -493,7 +493,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::Lifetime>> for Region<'t ty::ReEarlyBound(_) => { panic!("Should have already been substituted."); } - ty::ReError => { + ty::ReError(_) => { panic!("Error lifetime should not have already been lowered."); } ty::ReLateBound(db, br) => chalk_ir::LifetimeData::BoundVar(chalk_ir::BoundVar::new( diff --git a/compiler/rustc_type_ir/src/sty.rs b/compiler/rustc_type_ir/src/sty.rs index 0b573f96bf7a2..ea31678b49364 100644 --- a/compiler/rustc_type_ir/src/sty.rs +++ b/compiler/rustc_type_ir/src/sty.rs @@ -962,7 +962,7 @@ pub enum RegionKind { ReErased, /// A region that resulted from some other error. Used exclusively for diagnostics. - ReError, + ReError(I::ErrorGuaranteed), } // This is manually implemented for `RegionKind` because `std::mem::discriminant` @@ -977,7 +977,7 @@ const fn regionkind_discriminant(value: &RegionKind) -> usize { ReVar(_) => 4, RePlaceholder(_) => 5, ReErased => 6, - ReError => 7, + ReError(_) => 7, } } @@ -989,6 +989,7 @@ where I::FreeRegion: Copy, I::RegionVid: Copy, I::PlaceholderRegion: Copy, + I::ErrorGuaranteed: Copy, { } @@ -1003,7 +1004,7 @@ impl Clone for RegionKind { ReVar(r) => ReVar(r.clone()), RePlaceholder(r) => RePlaceholder(r.clone()), ReErased => ReErased, - ReError => ReError, + ReError(r) => ReError(r.clone()), } } } @@ -1082,7 +1083,7 @@ impl hash::Hash for RegionKind { ReVar(r) => r.hash(state), RePlaceholder(r) => r.hash(state), ReErased => (), - ReError => (), + ReError(_) => (), } } } @@ -1107,7 +1108,7 @@ impl fmt::Debug for RegionKind { ReErased => f.write_str("ReErased"), - ReError => f.write_str("ReError"), + ReError(_) => f.write_str("ReError"), } } } @@ -1142,7 +1143,7 @@ where a.encode(e); }), ReErased => e.emit_enum_variant(disc, |_| {}), - ReError => e.emit_enum_variant(disc, |_| {}), + ReError(_) => e.emit_enum_variant(disc, |_| {}), } } } @@ -1155,6 +1156,7 @@ where I::FreeRegion: Decodable, I::RegionVid: Decodable, I::PlaceholderRegion: Decodable, + I::ErrorGuaranteed: Decodable, { fn decode(d: &mut D) -> Self { match Decoder::read_usize(d) { @@ -1165,7 +1167,7 @@ where 4 => ReVar(Decodable::decode(d)), 5 => RePlaceholder(Decodable::decode(d)), 6 => ReErased, - 7 => ReError, + 7 => ReError(Decodable::decode(d)), _ => panic!( "{}", format!( @@ -1194,7 +1196,7 @@ where ) { std::mem::discriminant(self).hash_stable(hcx, hasher); match self { - ReErased | ReStatic | ReError => { + ReErased | ReStatic | ReError(_) => { // No variant fields to hash for these ... } ReLateBound(d, r) => { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 8a493478e9cb3..5bda3620dd0fd 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -242,7 +242,7 @@ pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option { debug!("cannot clean region {:?}", region); From ed8651c7b8f9a179b584124cde3994bb93433bb0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Tue, 7 Feb 2023 15:11:04 +0000 Subject: [PATCH 352/501] Use 'static RegionVid for ReError --- compiler/rustc_borrowck/src/universal_regions.rs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs index e813ff837ff8a..56930c89b2c64 100644 --- a/compiler/rustc_borrowck/src/universal_regions.rs +++ b/compiler/rustc_borrowck/src/universal_regions.rs @@ -167,6 +167,9 @@ struct UniversalRegionIndices<'tcx> { /// contains an entry for `ReStatic` -- it might be nice to just /// use a substs, and then handle `ReStatic` another way. indices: FxHashMap, RegionVid>, + + /// The vid assigned to `'static`. Used only for diagnostics. + pub fr_static: RegionVid, } #[derive(Debug, PartialEq)] @@ -609,7 +612,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { let subst_mapping = iter::zip(identity_substs.regions(), fr_substs.regions().map(|r| r.to_region_vid())); - UniversalRegionIndices { indices: global_mapping.chain(subst_mapping).collect() } + UniversalRegionIndices { indices: global_mapping.chain(subst_mapping).collect(), fr_static } } fn compute_inputs_and_output( @@ -821,8 +824,11 @@ impl<'tcx> UniversalRegionIndices<'tcx> { pub fn to_region_vid(&self, r: ty::Region<'tcx>) -> RegionVid { if let ty::ReVar(..) = *r { r.to_region_vid() - } else if let ty::ReError(_) = *r { - RegionVid::new(0) + } else if r.is_error() { + // We use the `'static` `RegionVid` because `ReError` doesn't actually exist in the + // `UniversalRegionIndices`. This is fine because 1) it is a fallback only used if + // errors are being emitted and 2) it leaves the happy path unaffected. + self.fr_static } else { *self .indices From 32227255386cb8387e3961845f101d23bac42b0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Tue, 7 Feb 2023 15:47:28 +0000 Subject: [PATCH 353/501] Fix `RegionKind: PartialEq` to account for `ReError` --- compiler/rustc_type_ir/src/sty.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_type_ir/src/sty.rs b/compiler/rustc_type_ir/src/sty.rs index ea31678b49364..61557fdc0eda4 100644 --- a/compiler/rustc_type_ir/src/sty.rs +++ b/compiler/rustc_type_ir/src/sty.rs @@ -1022,10 +1022,11 @@ impl PartialEq for RegionKind { (ReVar(a_r), ReVar(b_r)) => a_r == b_r, (RePlaceholder(a_r), RePlaceholder(b_r)) => a_r == b_r, (ReErased, ReErased) => true, + (ReError(_), ReError(_)) => true, _ => { debug_assert!( false, - "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}" + "This branch must be unreachable, maybe the match is missing an arm? self = {self:?}, other = {other:?}" ); true } From 3689295a6bf43a6defbb392e1aca08757fa14a59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Thu, 9 Feb 2023 10:38:45 +0000 Subject: [PATCH 354/501] Use `ErrorGuaranteed` more in `ReError` --- compiler/rustc_hir_analysis/src/astconv/mod.rs | 10 +++++----- .../src/infer/lexical_region_resolve/mod.rs | 8 +++++--- compiler/rustc_middle/src/ty/context.rs | 13 ++++++++----- compiler/rustc_middle/src/ty/generics.rs | 2 +- compiler/rustc_middle/src/ty/opaque_types.rs | 5 +++-- 5 files changed, 22 insertions(+), 16 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs index 76dd3b9a0d186..f121979be715f 100644 --- a/compiler/rustc_hir_analysis/src/astconv/mod.rs +++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs @@ -1614,14 +1614,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { "the lifetime bound for this object type cannot be deduced \ from context; please supply an explicit bound" ); - if borrowed { + let e = if borrowed { // We will have already emitted an error E0106 complaining about a // missing named lifetime in `&dyn Trait`, so we elide this one. - err.delay_as_bug(); + err.delay_as_bug() } else { - err.emit(); - } - tcx.re_error() + err.emit() + }; + tcx.re_error(e) }) } }) diff --git a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs index d4a12195ca01a..4a2210bdb68d0 100644 --- a/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs +++ b/compiler/rustc_infer/src/infer/lexical_region_resolve/mod.rs @@ -216,7 +216,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { Ok(self.tcx().lifetimes.re_static) } - ReError(_) => Ok(self.tcx().re_error()), + ReError(_) => Ok(a_region), ReEarlyBound(_) | ReFree(_) => { // All empty regions are less than early-bound, free, @@ -548,7 +548,9 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> { ); } - (ReError(_), _) | (_, ReError(_)) => self.tcx().re_error(), + (ReError(_), _) => a, + + (_, ReError(_)) => b, (ReStatic, _) | (_, ReStatic) => { // nothing lives longer than `'static` @@ -1044,7 +1046,7 @@ impl<'tcx> LexicalRegionResolutions<'tcx> { ty::ReVar(rid) => match self.values[rid] { VarValue::Empty(_) => r, VarValue::Value(r) => r, - VarValue::ErrorValue => tcx.re_error(), + VarValue::ErrorValue => tcx.re_error_misc(), }, _ => r, }; diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 0e6126ae7c518..d1d7d0f84308a 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -649,10 +649,16 @@ impl<'tcx> TyCtxt<'tcx> { self.mk_ty(Error(reported)) } + /// Constructs a `RegionKind::ReError` lifetime. + #[track_caller] + pub fn re_error(self, reported: ErrorGuaranteed) -> Region<'tcx> { + self.mk_region(ty::ReError(reported)) + } + /// Constructs a `RegionKind::ReError` lifetime and registers a `delay_span_bug` to ensure it /// gets used. #[track_caller] - pub fn re_error(self) -> Region<'tcx> { + pub fn re_error_misc(self) -> Region<'tcx> { self.re_error_with_message( DUMMY_SP, "RegionKind::ReError constructed but no error reported", @@ -664,10 +670,7 @@ impl<'tcx> TyCtxt<'tcx> { #[track_caller] pub fn re_error_with_message>(self, span: S, msg: &str) -> Region<'tcx> { let reported = self.sess.delay_span_bug(span, msg); - let r = ty::ReError(reported); - Region(Interned::new_unchecked( - self.interners.region.intern(r, |r| InternedInSet(self.interners.arena.alloc(r))).0, - )) + self.re_error(reported) } /// Like [TyCtxt::ty_error] but for constants, with current `ErrorGuaranteed` diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index 0112debc1c821..ea95a38f272c6 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -100,7 +100,7 @@ impl GenericParamDef { preceding_substs: &[ty::GenericArg<'tcx>], ) -> ty::GenericArg<'tcx> { match &self.kind { - ty::GenericParamDefKind::Lifetime => tcx.re_error().into(), + ty::GenericParamDefKind::Lifetime => tcx.re_error_misc().into(), ty::GenericParamDefKind::Type { .. } => tcx.ty_error().into(), ty::GenericParamDefKind::Const { .. } => { tcx.const_error(tcx.bound_type_of(self.def_id).subst(tcx, preceding_substs)).into() diff --git a/compiler/rustc_middle/src/ty/opaque_types.rs b/compiler/rustc_middle/src/ty/opaque_types.rs index 624b62e1800a1..a5ebdbc8792fb 100644 --- a/compiler/rustc_middle/src/ty/opaque_types.rs +++ b/compiler/rustc_middle/src/ty/opaque_types.rs @@ -127,7 +127,8 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> { Some(u) => panic!("region mapped to unexpected kind: {:?}", u), None if self.do_not_error => self.tcx.lifetimes.re_static, None => { - self.tcx + let e = self + .tcx .sess .struct_span_err(self.span, "non-defining opaque type use in defining scope") .span_label( @@ -140,7 +141,7 @@ impl<'tcx> TypeFolder<'tcx> for ReverseMapper<'tcx> { ) .emit(); - self.tcx().re_error() + self.tcx().re_error(e) } } } From 2e1b78ddb9146d0f2ad96a08d664ae08d69cf341 Mon Sep 17 00:00:00 2001 From: Deadbeef Date: Thu, 9 Feb 2023 19:38:55 +0800 Subject: [PATCH 355/501] Patch `build/rustfmt/lib/*.so` for NixOS fixes #107676. --- src/bootstrap/download.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs index bd67978a7662e..3e3a59daab5cc 100644 --- a/src/bootstrap/download.rs +++ b/src/bootstrap/download.rs @@ -173,8 +173,7 @@ impl Config { // appear to have this (even when `../lib` is redundant). // NOTE: there are only two paths here, delimited by a `:` let mut entries = OsString::from("$ORIGIN/../lib:"); - entries.push(t!(fs::canonicalize(nix_deps_dir))); - entries.push("/lib"); + entries.push(t!(fs::canonicalize(nix_deps_dir)).join("lib")); entries }; patchelf.args(&[OsString::from("--set-rpath"), rpath_entries]); @@ -353,6 +352,13 @@ impl Config { if self.should_fix_bins_and_dylibs() { self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if lib.path().extension() == Some(OsStr::new("so")) { + self.fix_bin_or_dylib(&lib.path()); + } + } } self.create(&rustfmt_stamp, &channel); From 7615045ebd7b62751b872b5bc084be7cc9be3e00 Mon Sep 17 00:00:00 2001 From: bohan Date: Sat, 4 Feb 2023 18:55:33 +0800 Subject: [PATCH 356/501] test: snapshot for derive suggestion in diff files --- compiler/rustc_errors/src/emitter.rs | 22 +++--- tests/ui/modules/issue-107649.rs | 106 +++++++++++++++++++++++++++ tests/ui/modules/issue-107649.stderr | 18 +++++ 3 files changed, 135 insertions(+), 11 deletions(-) create mode 100644 tests/ui/modules/issue-107649.rs create mode 100644 tests/ui/modules/issue-107649.stderr diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index 9768526a2f47d..5f460b26488c4 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -1796,17 +1796,17 @@ impl EmitterWriter { // telling users to make a change but not clarifying *where*. let loc = sm.lookup_char_pos(parts[0].span.lo()); if loc.file.name != sm.span_to_filename(span) && loc.file.name.is_real() { - buffer.puts(row_num - 1, 0, "--> ", Style::LineNumber); - buffer.append( - row_num - 1, - &format!( - "{}:{}:{}", - sm.filename_for_diagnostics(&loc.file.name), - sm.doctest_offset_line(&loc.file.name, loc.line), - loc.col.0 + 1, - ), - Style::LineAndColumn, - ); + let arrow = "--> "; + buffer.puts(row_num - 1, 0, arrow, Style::LineNumber); + let filename = sm.filename_for_diagnostics(&loc.file.name); + let offset = sm.doctest_offset_line(&loc.file.name, loc.line); + let message = format!("{}:{}:{}", filename, offset, loc.col.0 + 1); + if row_num == 2 { + let col = usize::max(max_line_num_len + 1, arrow.len()); + buffer.puts(1, col, &message, Style::LineAndColumn); + } else { + buffer.append(row_num - 1, &message, Style::LineAndColumn); + } for _ in 0..max_line_num_len { buffer.prepend(row_num - 1, " ", Style::NoStyle); } diff --git a/tests/ui/modules/issue-107649.rs b/tests/ui/modules/issue-107649.rs new file mode 100644 index 0000000000000..71b84cd30d6ff --- /dev/null +++ b/tests/ui/modules/issue-107649.rs @@ -0,0 +1,106 @@ +// compile-flags: -Z ui-testing=no +#[path = "auxiliary/dummy_lib.rs"] +mod lib; + +/// The function needs to be long enough to +/// ensure `max_line_num_len` to be large enough +/// for no-ui-testing +fn main() { + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + (); + dbg!(lib::Dummy); //~ Error: `Dummy` doesn't implement `Debug` +} diff --git a/tests/ui/modules/issue-107649.stderr b/tests/ui/modules/issue-107649.stderr new file mode 100644 index 0000000000000..1cea71f2829fd --- /dev/null +++ b/tests/ui/modules/issue-107649.stderr @@ -0,0 +1,18 @@ +error[E0277]: `Dummy` doesn't implement `Debug` + --> $DIR/issue-107649.rs:105:5 + | +105 | dbg!(lib::Dummy); + | ^^^^^^^^^^^^^^^^ `Dummy` cannot be formatted using `{:?}` + | + = help: the trait `Debug` is not implemented for `Dummy` + = note: add `#[derive(Debug)]` to `Dummy` or manually `impl Debug for Dummy` + = note: this error originates in the macro `$crate::format_args_nl` which comes from the expansion of the macro `dbg` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider annotating `Dummy` with `#[derive(Debug)]` + --> $DIR/auxiliary/dummy_lib.rs:2:1 + | +2 | #[derive(Debug)] + | + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0277`. From a576514e13d5608ab48847958c2b97ab5ab01d85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Thu, 9 Feb 2023 10:16:00 +0000 Subject: [PATCH 357/501] Introduce `-Zterminal-urls` to use OSC8 for error codes Terminals supporting the OSC8 Hyperlink Extension can support inline anchors where the text is user defineable but clicking on it opens a browser to a specified URLs, just like `` does in HTML. https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda --- compiler/rustc_driver_impl/src/lib.rs | 3 ++- compiler/rustc_errors/src/emitter.rs | 17 +++++++++++++++-- compiler/rustc_errors/src/json.rs | 9 +++++++++ compiler/rustc_errors/src/json/tests.rs | 3 ++- compiler/rustc_errors/src/lib.rs | 8 ++++++++ compiler/rustc_expand/src/tests.rs | 3 ++- compiler/rustc_session/src/options.rs | 16 +++++++++++++++- compiler/rustc_session/src/session.rs | 18 ++++++++++++++++++ src/librustdoc/core.rs | 3 +++ src/librustdoc/doctest.rs | 5 ++++- src/tools/clippy/clippy_lints/src/doc.rs | 3 ++- src/tools/clippy/src/driver.rs | 1 + src/tools/rustfmt/src/parse/session.rs | 3 ++- tests/rustdoc-ui/z-help.stdout | 1 + tests/ui/diagnostic-flags/terminal_urls.rs | 4 ++++ tests/ui/diagnostic-flags/terminal_urls.stderr | 11 +++++++++++ 16 files changed, 99 insertions(+), 9 deletions(-) create mode 100644 tests/ui/diagnostic-flags/terminal_urls.rs create mode 100644 tests/ui/diagnostic-flags/terminal_urls.stderr diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index a392d70f100a4..290652f45155c 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -23,7 +23,7 @@ use rustc_codegen_ssa::{traits::CodegenBackend, CodegenErrors, CodegenResults}; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry}; use rustc_data_structures::sync::SeqCst; use rustc_errors::registry::{InvalidErrorCode, Registry}; -use rustc_errors::{ErrorGuaranteed, PResult}; +use rustc_errors::{ErrorGuaranteed, PResult, TerminalUrl}; use rustc_feature::find_gated_cfg; use rustc_hir::def_id::LOCAL_CRATE; use rustc_interface::util::{self, collect_crate_types, get_codegen_backend}; @@ -1192,6 +1192,7 @@ pub fn report_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) { None, false, false, + TerminalUrl::No, )); let handler = rustc_errors::Handler::with_emitter(true, None, emitter); diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs index 9768526a2f47d..ac8f40987e864 100644 --- a/compiler/rustc_errors/src/emitter.rs +++ b/compiler/rustc_errors/src/emitter.rs @@ -18,7 +18,7 @@ use crate::translation::{to_fluent_args, Translate}; use crate::{ diagnostic::DiagnosticLocation, CodeSuggestion, Diagnostic, DiagnosticId, DiagnosticMessage, FluentBundle, Handler, LazyFallbackBundle, Level, MultiSpan, SubDiagnostic, - SubstitutionHighlight, SuggestionStyle, + SubstitutionHighlight, SuggestionStyle, TerminalUrl, }; use rustc_lint_defs::pluralize; @@ -66,6 +66,7 @@ impl HumanReadableErrorType { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, ) -> EmitterWriter { let (short, color_config) = self.unzip(); let color = color_config.suggests_using_colors(); @@ -80,6 +81,7 @@ impl HumanReadableErrorType { diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, ) } } @@ -652,6 +654,7 @@ pub struct EmitterWriter { macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, } #[derive(Debug)] @@ -672,6 +675,7 @@ impl EmitterWriter { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, ) -> EmitterWriter { let dst = Destination::from_stderr(color_config); EmitterWriter { @@ -685,6 +689,7 @@ impl EmitterWriter { diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, } } @@ -699,6 +704,7 @@ impl EmitterWriter { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, ) -> EmitterWriter { EmitterWriter { dst: Raw(dst, colored), @@ -711,6 +717,7 @@ impl EmitterWriter { diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, } } @@ -1378,7 +1385,13 @@ impl EmitterWriter { // only render error codes, not lint codes if let Some(DiagnosticId::Error(ref code)) = *code { buffer.append(0, "[", Style::Level(*level)); - buffer.append(0, code, Style::Level(*level)); + let code = if let TerminalUrl::Yes = self.terminal_url { + let path = "https://doc.rust-lang.org/error_codes"; + format!("\x1b]8;;{path}/{code}.html\x07{code}\x1b]8;;\x07") + } else { + code.clone() + }; + buffer.append(0, &code, Style::Level(*level)); buffer.append(0, "]", Style::Level(*level)); label_width += 2 + code.len(); } diff --git a/compiler/rustc_errors/src/json.rs b/compiler/rustc_errors/src/json.rs index dc38b8725ad1e..e475fc725c33b 100644 --- a/compiler/rustc_errors/src/json.rs +++ b/compiler/rustc_errors/src/json.rs @@ -17,6 +17,7 @@ use crate::translation::{to_fluent_args, Translate}; use crate::DiagnosticId; use crate::{ CodeSuggestion, FluentBundle, LazyFallbackBundle, MultiSpan, SpanLabel, SubDiagnostic, + TerminalUrl, }; use rustc_lint_defs::Applicability; @@ -47,6 +48,7 @@ pub struct JsonEmitter { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, } impl JsonEmitter { @@ -60,6 +62,7 @@ impl JsonEmitter { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, ) -> JsonEmitter { JsonEmitter { dst: Box::new(io::BufWriter::new(io::stderr())), @@ -73,6 +76,7 @@ impl JsonEmitter { diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, } } @@ -84,6 +88,7 @@ impl JsonEmitter { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, ) -> JsonEmitter { let file_path_mapping = FilePathMapping::empty(); JsonEmitter::stderr( @@ -96,6 +101,7 @@ impl JsonEmitter { diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, ) } @@ -110,6 +116,7 @@ impl JsonEmitter { diagnostic_width: Option, macro_backtrace: bool, track_diagnostics: bool, + terminal_url: TerminalUrl, ) -> JsonEmitter { JsonEmitter { dst, @@ -123,6 +130,7 @@ impl JsonEmitter { diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, } } @@ -360,6 +368,7 @@ impl Diagnostic { je.diagnostic_width, je.macro_backtrace, je.track_diagnostics, + je.terminal_url, ) .ui_testing(je.ui_testing) .emit_diagnostic(diag); diff --git a/compiler/rustc_errors/src/json/tests.rs b/compiler/rustc_errors/src/json/tests.rs index f131468971b5a..f161532d3b7e2 100644 --- a/compiler/rustc_errors/src/json/tests.rs +++ b/compiler/rustc_errors/src/json/tests.rs @@ -4,7 +4,7 @@ use crate::json::JsonEmitter; use rustc_span::source_map::{FilePathMapping, SourceMap}; use crate::emitter::{ColorConfig, HumanReadableErrorType}; -use crate::Handler; +use crate::{Handler, TerminalUrl}; use rustc_span::{BytePos, Span}; use std::str; @@ -60,6 +60,7 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) { None, false, false, + TerminalUrl::No, ); let span = Span::with_root_ctxt(BytePos(span.0), BytePos(span.1)); diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index ec04e865d53b1..83b733d4c0677 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -573,6 +573,7 @@ impl Handler { None, flags.macro_backtrace, flags.track_diagnostics, + TerminalUrl::No, )); Self::with_emitter_and_flags(emitter, flags) } @@ -1838,6 +1839,13 @@ pub fn add_elided_lifetime_in_path_suggestion( ); } +#[derive(Clone, Copy, PartialEq, Hash, Debug)] +pub enum TerminalUrl { + No, + Yes, + Auto, +} + /// Useful type to use with `Result<>` indicate that an error has already /// been reported to the user, so no need to continue checking. #[derive(Clone, Copy, Debug, Encodable, Decodable, Hash, PartialEq, Eq, PartialOrd, Ord)] diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs index 8f3bea29ffd28..f80141403bf15 100644 --- a/compiler/rustc_expand/src/tests.rs +++ b/compiler/rustc_expand/src/tests.rs @@ -8,7 +8,7 @@ use rustc_span::{BytePos, Span}; use rustc_data_structures::sync::Lrc; use rustc_errors::emitter::EmitterWriter; -use rustc_errors::{Handler, MultiSpan, PResult}; +use rustc_errors::{Handler, MultiSpan, PResult, TerminalUrl}; use std::io; use std::io::prelude::*; @@ -152,6 +152,7 @@ fn test_harness(file_text: &str, span_labels: Vec, expected_output: & None, false, false, + TerminalUrl::No, ); let handler = Handler::with_emitter(true, None, Box::new(emitter)); #[allow(rustc::untranslatable_diagnostic)] diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 0db4d85ff4b67..7bef89a3704a8 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -4,7 +4,7 @@ use crate::early_error; use crate::lint; use crate::search_paths::SearchPath; use crate::utils::NativeLib; -use rustc_errors::LanguageIdentifier; +use rustc_errors::{LanguageIdentifier, TerminalUrl}; use rustc_target::spec::{CodeModel, LinkerFlavorCli, MergeFunctions, PanicStrategy, SanitizerSet}; use rustc_target::spec::{ RelocModel, RelroLevel, SplitDebuginfo, StackProtector, TargetTriple, TlsModel, @@ -399,6 +399,8 @@ mod desc { pub const parse_code_model: &str = "one of supported code models (`rustc --print code-models`)"; pub const parse_tls_model: &str = "one of supported TLS models (`rustc --print tls-models`)"; pub const parse_target_feature: &str = parse_string; + pub const parse_terminal_url: &str = + "either a boolean (`yes`, `no`, `on`, `off`, etc), or `auto`"; pub const parse_wasi_exec_model: &str = "either `command` or `reactor`"; pub const parse_split_debuginfo: &str = "one of supported split-debuginfo modes (`off`, `packed`, or `unpacked`)"; @@ -979,6 +981,16 @@ mod parse { true } + pub(crate) fn parse_terminal_url(slot: &mut TerminalUrl, v: Option<&str>) -> bool { + *slot = match v { + Some("on" | "" | "yes" | "y") | None => TerminalUrl::Yes, + Some("off" | "no" | "n") => TerminalUrl::No, + Some("auto") => TerminalUrl::Auto, + _ => return false, + }; + true + } + pub(crate) fn parse_symbol_mangling_version( slot: &mut Option, v: Option<&str>, @@ -1602,6 +1614,8 @@ options! { "show extended diagnostic help (default: no)"), temps_dir: Option = (None, parse_opt_string, [UNTRACKED], "the directory the intermediate files are written to"), + terminal_urls: TerminalUrl = (TerminalUrl::No, parse_terminal_url, [UNTRACKED], + "use the OSC 8 hyperlink terminal specification to print hyperlinks in the compiler output"), #[rustc_lint_opt_deny_field_access("use `Session::lto` instead of this field")] thinlto: Option = (None, parse_opt_bool, [TRACKED], "enable ThinLTO when possible"), diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 8a0176f639174..54b68b8341f50 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -24,6 +24,7 @@ use rustc_errors::registry::Registry; use rustc_errors::{ error_code, fallback_fluent_bundle, DiagnosticBuilder, DiagnosticId, DiagnosticMessage, ErrorGuaranteed, FluentBundle, IntoDiagnostic, LazyFallbackBundle, MultiSpan, Noted, + TerminalUrl, }; use rustc_macros::HashStable_Generic; pub use rustc_span::def_id::StableCrateId; @@ -1273,6 +1274,19 @@ fn default_emitter( ) -> Box { let macro_backtrace = sopts.unstable_opts.macro_backtrace; let track_diagnostics = sopts.unstable_opts.track_diagnostics; + let terminal_url = match sopts.unstable_opts.terminal_urls { + TerminalUrl::Auto => { + match (std::env::var("COLORTERM").as_deref(), std::env::var("TERM").as_deref()) { + (Ok("truecolor"), Ok("xterm-256color")) + if sopts.unstable_features.is_nightly_build() => + { + TerminalUrl::Yes + } + _ => TerminalUrl::No, + } + } + t => t, + }; match sopts.error_format { config::ErrorOutputType::HumanReadable(kind) => { let (short, color_config) = kind.unzip(); @@ -1297,6 +1311,7 @@ fn default_emitter( sopts.diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, ); Box::new(emitter.ui_testing(sopts.unstable_opts.ui_testing)) } @@ -1312,6 +1327,7 @@ fn default_emitter( sopts.diagnostic_width, macro_backtrace, track_diagnostics, + terminal_url, ) .ui_testing(sopts.unstable_opts.ui_testing), ), @@ -1624,6 +1640,7 @@ fn early_error_handler(output: config::ErrorOutputType) -> rustc_errors::Handler None, false, false, + TerminalUrl::No, )) } config::ErrorOutputType::Json { pretty, json_rendered } => Box::new(JsonEmitter::basic( @@ -1634,6 +1651,7 @@ fn early_error_handler(output: config::ErrorOutputType) -> rustc_errors::Handler None, false, false, + TerminalUrl::No, )), }; rustc_errors::Handler::with_emitter(true, None, emitter) diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 0ce43f7db8e8b..05b2a5334f19f 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -4,6 +4,7 @@ use rustc_data_structures::sync::{self, Lrc}; use rustc_data_structures::unord::UnordSet; use rustc_errors::emitter::{Emitter, EmitterWriter}; use rustc_errors::json::JsonEmitter; +use rustc_errors::TerminalUrl; use rustc_feature::UnstableFeatures; use rustc_hir::def::{Namespace, Res}; use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId}; @@ -164,6 +165,7 @@ pub(crate) fn new_handler( diagnostic_width, false, unstable_opts.track_diagnostics, + TerminalUrl::No, ) .ui_testing(unstable_opts.ui_testing), ) @@ -183,6 +185,7 @@ pub(crate) fn new_handler( diagnostic_width, false, unstable_opts.track_diagnostics, + TerminalUrl::No, ) .ui_testing(unstable_opts.ui_testing), ) diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index 37a1005cba1fc..57c41b57311df 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -1,7 +1,7 @@ use rustc_ast as ast; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::sync::Lrc; -use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError}; +use rustc_errors::{ColorConfig, ErrorGuaranteed, FatalError, TerminalUrl}; use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID, LOCAL_CRATE}; use rustc_hir::{self as hir, intravisit, CRATE_HIR_ID}; use rustc_interface::interface; @@ -557,6 +557,7 @@ pub(crate) fn make_test( Some(80), false, false, + TerminalUrl::No, ) .supports_color(); @@ -571,6 +572,7 @@ pub(crate) fn make_test( None, false, false, + TerminalUrl::No, ); // FIXME(misdreavus): pass `-Z treat-err-as-bug` to the doctest parser @@ -756,6 +758,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool { None, false, false, + TerminalUrl::No, ); let handler = Handler::with_emitter(false, None, Box::new(emitter)); diff --git a/src/tools/clippy/clippy_lints/src/doc.rs b/src/tools/clippy/clippy_lints/src/doc.rs index 127201b72e275..0b31e20fc87c0 100644 --- a/src/tools/clippy/clippy_lints/src/doc.rs +++ b/src/tools/clippy/clippy_lints/src/doc.rs @@ -11,7 +11,7 @@ use rustc_ast::token::CommentKind; use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; use rustc_errors::emitter::EmitterWriter; -use rustc_errors::{Applicability, Handler, SuggestionStyle}; +use rustc_errors::{Applicability, Handler, SuggestionStyle, TerminalUrl}; use rustc_hir as hir; use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::{AnonConst, Expr}; @@ -717,6 +717,7 @@ fn check_code(cx: &LateContext<'_>, text: &str, edition: Edition, span: Span) { None, false, false, + TerminalUrl::No, ); let handler = Handler::with_emitter(false, None, Box::new(emitter)); let sess = ParseSess::with_span_handler(handler, sm); diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs index d521e8d883983..e45835efe7464 100644 --- a/src/tools/clippy/src/driver.rs +++ b/src/tools/clippy/src/driver.rs @@ -220,6 +220,7 @@ fn report_clippy_ice(info: &panic::PanicInfo<'_>, bug_report_url: &str) { None, false, false, + rustc_errors::TerminalUrl::No, )); let handler = rustc_errors::Handler::with_emitter(true, None, emitter); diff --git a/src/tools/rustfmt/src/parse/session.rs b/src/tools/rustfmt/src/parse/session.rs index 6bfec79cd7030..9014026b0aa4a 100644 --- a/src/tools/rustfmt/src/parse/session.rs +++ b/src/tools/rustfmt/src/parse/session.rs @@ -4,7 +4,7 @@ use std::sync::atomic::{AtomicBool, Ordering}; use rustc_data_structures::sync::{Lrc, Send}; use rustc_errors::emitter::{Emitter, EmitterWriter}; use rustc_errors::translation::Translate; -use rustc_errors::{ColorConfig, Diagnostic, Handler, Level as DiagnosticLevel}; +use rustc_errors::{ColorConfig, Diagnostic, Handler, Level as DiagnosticLevel, TerminalUrl}; use rustc_session::parse::ParseSess as RawParseSess; use rustc_span::{ source_map::{FilePathMapping, SourceMap}, @@ -135,6 +135,7 @@ fn default_handler( None, false, false, + TerminalUrl::No, )) }; Handler::with_emitter( diff --git a/tests/rustdoc-ui/z-help.stdout b/tests/rustdoc-ui/z-help.stdout index 4f07fca82d1eb..3bb975f28825f 100644 --- a/tests/rustdoc-ui/z-help.stdout +++ b/tests/rustdoc-ui/z-help.stdout @@ -168,6 +168,7 @@ -Z symbol-mangling-version=val -- which mangling version to use for symbol names ('legacy' (default) or 'v0') -Z teach=val -- show extended diagnostic help (default: no) -Z temps-dir=val -- the directory the intermediate files are written to + -Z terminal-urls=val -- use the OSC 8 hyperlink terminal specification to print hyperlinks in the compiler output -Z thinlto=val -- enable ThinLTO when possible -Z thir-unsafeck=val -- use the THIR unsafety checker (default: no) -Z threads=val -- use a thread pool with N threads diff --git a/tests/ui/diagnostic-flags/terminal_urls.rs b/tests/ui/diagnostic-flags/terminal_urls.rs new file mode 100644 index 0000000000000..1f04e2aade17f --- /dev/null +++ b/tests/ui/diagnostic-flags/terminal_urls.rs @@ -0,0 +1,4 @@ +// compile-flags: -Zterminal-urls=yes +fn main() { + let () = 4; //~ ERROR +} diff --git a/tests/ui/diagnostic-flags/terminal_urls.stderr b/tests/ui/diagnostic-flags/terminal_urls.stderr new file mode 100644 index 0000000000000..7f7e69c5d5da7 --- /dev/null +++ b/tests/ui/diagnostic-flags/terminal_urls.stderr @@ -0,0 +1,11 @@ +error[]8;;https://doc.rust-lang.org/error_codes/E0308.htmlE0308]8;;]: mismatched types + --> $DIR/terminal_urls.rs:3:9 + | +LL | let () = 4; + | ^^ - this expression has type `{integer}` + | | + | expected integer, found `()` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0308`. From 7fc0f8985dcd51db19b30b8a77345bcaa5daff79 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 9 Feb 2023 17:34:21 +0100 Subject: [PATCH 358/501] Explicitly disable the rust-analyzer extension in unstrusted workspaces --- editors/code/package.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/editors/code/package.json b/editors/code/package.json index 7160781b6f3eb..995036700177a 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -19,6 +19,12 @@ "categories": [ "Programming Languages" ], + "capabilities": { + "untrustedWorkspaces": { + "supported": false, + "description": "rust-analyzer invokes binaries set up by its configuration as well as the Rust toolchain's binaries. A malicious actor could exploit this to run arbitrary code on your machine." + } + }, "engines": { "vscode": "^1.66.0" }, From 8dadd54f52c44f829ba95d0f483ee155b1f2e19b Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Feb 2023 02:34:23 +0000 Subject: [PATCH 359/501] Fix subst issue with object_ty_for_trait --- .../rustc_trait_selection/src/traits/object_safety.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_trait_selection/src/traits/object_safety.rs b/compiler/rustc_trait_selection/src/traits/object_safety.rs index 8f548acfd2eac..bafa2981a8739 100644 --- a/compiler/rustc_trait_selection/src/traits/object_safety.rs +++ b/compiler/rustc_trait_selection/src/traits/object_safety.rs @@ -646,11 +646,9 @@ fn object_ty_for_trait<'tcx>( debug!(?obligation); let pred = obligation.predicate.to_opt_poly_projection_pred()?; Some(pred.map_bound(|p| { - ty::ExistentialPredicate::Projection(ty::ExistentialProjection { - def_id: p.projection_ty.def_id, - substs: p.projection_ty.substs, - term: p.term, - }) + ty::ExistentialPredicate::Projection(ty::ExistentialProjection::erase_self_ty( + tcx, p, + )) })) }) .collect(); From 8c67ecd1245daef98a2667bd1672fe47ff16927a Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Feb 2023 02:46:22 +0000 Subject: [PATCH 360/501] Use elaborated item bounds for alias types --- compiler/rustc_trait_selection/src/solve/assembly.rs | 5 +---- tests/ui/traits/new-solver/elaborate-item-bounds.rs | 12 ++++++++++++ 2 files changed, 13 insertions(+), 4 deletions(-) create mode 100644 tests/ui/traits/new-solver/elaborate-item-bounds.rs diff --git a/compiler/rustc_trait_selection/src/solve/assembly.rs b/compiler/rustc_trait_selection/src/solve/assembly.rs index 8525b96c0c21f..f94a47c847e8e 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly.rs @@ -399,10 +399,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ty::Alias(_, alias_ty) => alias_ty, }; - for (assumption, _) in self - .tcx() - .bound_explicit_item_bounds(alias_ty.def_id) - .subst_iter_copied(self.tcx(), alias_ty.substs) + for assumption in self.tcx().item_bounds(alias_ty.def_id).subst(self.tcx(), alias_ty.substs) { match G::consider_assumption(self, goal, assumption) { Ok(result) => { diff --git a/tests/ui/traits/new-solver/elaborate-item-bounds.rs b/tests/ui/traits/new-solver/elaborate-item-bounds.rs new file mode 100644 index 0000000000000..076aefcf8fc60 --- /dev/null +++ b/tests/ui/traits/new-solver/elaborate-item-bounds.rs @@ -0,0 +1,12 @@ +// compile-flags: -Ztrait-solver=next +// check-pass + +trait Foo { + type Bar: Bar; +} + +trait Bar: Baz {} + +trait Baz {} + +fn main() {} From 8987e68247ed6df47624f9eeb35430fc92d54f15 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Feb 2023 03:03:15 +0000 Subject: [PATCH 361/501] Implement a dummy drop-in-favor-of for the new solver --- .../src/solve/project_goals.rs | 23 ++++++++++----- .../src/solve/trait_goals.rs | 29 ++++++++++++------- .../new-solver/provisional-result-done.rs | 4 --- .../new-solver/provisional-result-done.stderr | 15 ++++++---- .../traits/new-solver/temporary-ambiguity.rs | 22 ++++++++++++++ 5 files changed, 65 insertions(+), 28 deletions(-) create mode 100644 tests/ui/traits/new-solver/temporary-ambiguity.rs diff --git a/compiler/rustc_trait_selection/src/solve/project_goals.rs b/compiler/rustc_trait_selection/src/solve/project_goals.rs index e3ec71d1b4f73..bdf1550fab90e 100644 --- a/compiler/rustc_trait_selection/src/solve/project_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/project_goals.rs @@ -3,7 +3,7 @@ use crate::traits::{specialization_graph, translate_substs}; use super::assembly::{self, Candidate, CandidateSource}; use super::infcx_ext::InferCtxtExt; use super::trait_goals::structural_traits; -use super::{Certainty, EvalCtxt, Goal, QueryResult}; +use super::{Certainty, EvalCtxt, Goal, MaybeCause, QueryResult}; use rustc_errors::ErrorGuaranteed; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; @@ -182,11 +182,17 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // If there are *STILL* multiple candidates, give up // and report ambiguity. i += 1; - if i > 1 { - debug!("multiple matches, ambig"); - // FIXME: return overflow if all candidates overflow, otherwise return ambiguity. - unimplemented!(); - } + } + + if candidates.len() > 1 { + let certainty = if candidates.iter().all(|x| { + matches!(x.result.value.certainty, Certainty::Maybe(MaybeCause::Overflow)) + }) { + Certainty::Maybe(MaybeCause::Overflow) + } else { + Certainty::AMBIGUOUS + }; + return self.make_canonical_response(certainty); } } @@ -203,7 +209,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { (CandidateSource::Impl(_), _) | (CandidateSource::ParamEnv(_), _) | (CandidateSource::BuiltinImpl, _) - | (CandidateSource::AliasBound, _) => unimplemented!(), + | (CandidateSource::AliasBound, _) => false, } } } @@ -452,7 +458,8 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { [ty::GenericArg::from(goal.predicate.self_ty())], )); - let is_sized_certainty = ecx.evaluate_goal(goal.with(tcx, sized_predicate))?.1; + let (_, is_sized_certainty) = + ecx.evaluate_goal(goal.with(tcx, sized_predicate))?; return ecx.eq_term_and_make_canonical_response( goal, is_sized_certainty, diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index 06a72e95d4905..6e1e993b2decc 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -4,7 +4,7 @@ use std::iter; use super::assembly::{self, Candidate, CandidateSource}; use super::infcx_ext::InferCtxtExt; -use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, QueryResult}; +use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, MaybeCause, QueryResult}; use rustc_hir::def_id::DefId; use rustc_infer::infer::InferCtxt; use rustc_infer::traits::query::NoSolution; @@ -511,11 +511,17 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // If there are *STILL* multiple candidates, give up // and report ambiguity. i += 1; - if i > 1 { - debug!("multiple matches, ambig"); - // FIXME: return overflow if all candidates overflow, otherwise return ambiguity. - unimplemented!(); - } + } + + if candidates.len() > 1 { + let certainty = if candidates.iter().all(|x| { + matches!(x.result.value.certainty, Certainty::Maybe(MaybeCause::Overflow)) + }) { + Certainty::Maybe(MaybeCause::Overflow) + } else { + Certainty::AMBIGUOUS + }; + return self.make_canonical_response(certainty); } } @@ -532,17 +538,18 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { (CandidateSource::Impl(_), _) | (CandidateSource::ParamEnv(_), _) | (CandidateSource::AliasBound, _) - | (CandidateSource::BuiltinImpl, _) => unimplemented!(), + | (CandidateSource::BuiltinImpl, _) => false, } } - fn discard_reservation_impl(&self, candidate: Candidate<'tcx>) -> Candidate<'tcx> { + fn discard_reservation_impl(&self, mut candidate: Candidate<'tcx>) -> Candidate<'tcx> { if let CandidateSource::Impl(def_id) = candidate.source { if let ty::ImplPolarity::Reservation = self.tcx().impl_polarity(def_id) { debug!("Selected reservation impl"); - // FIXME: reduce candidate to ambiguous - // FIXME: replace `var_values` with identity, yeet external constraints. - unimplemented!() + // We assemble all candidates inside of a probe so by + // making a new canonical response here our result will + // have no constraints. + candidate.result = self.make_canonical_response(Certainty::AMBIGUOUS).unwrap(); } } diff --git a/tests/ui/traits/new-solver/provisional-result-done.rs b/tests/ui/traits/new-solver/provisional-result-done.rs index a3d97927bad22..254ab356ad89c 100644 --- a/tests/ui/traits/new-solver/provisional-result-done.rs +++ b/tests/ui/traits/new-solver/provisional-result-done.rs @@ -1,9 +1,5 @@ // known-bug: unknown // compile-flags: -Ztrait-solver=next -// failure-status: 101 -// normalize-stderr-test "note: .*\n\n" -> "" -// normalize-stderr-test "thread 'rustc' panicked.*\n" -> "" -// rustc-env:RUST_BACKTRACE=0 // This tests checks that we update results in the provisional cache when // we pop a goal from the stack. diff --git a/tests/ui/traits/new-solver/provisional-result-done.stderr b/tests/ui/traits/new-solver/provisional-result-done.stderr index ffc92b81f089e..5bd0613d25911 100644 --- a/tests/ui/traits/new-solver/provisional-result-done.stderr +++ b/tests/ui/traits/new-solver/provisional-result-done.stderr @@ -1,6 +1,11 @@ -error: the compiler unexpectedly panicked. this is a bug. +error[E0283]: type annotations needed: cannot satisfy `Bar: Coinductive` + --> $DIR/provisional-result-done.rs:16:25 + | +LL | impl Coinductive for Bar + | ^^^^^^ + | + = note: cannot satisfy `Bar: Coinductive` -query stack during panic: -#0 [check_well_formed] checking that `` is well-formed -#1 [check_mod_type_wf] checking that types are well-formed in top-level module -end of query stack +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0283`. diff --git a/tests/ui/traits/new-solver/temporary-ambiguity.rs b/tests/ui/traits/new-solver/temporary-ambiguity.rs new file mode 100644 index 0000000000000..18ee05457009b --- /dev/null +++ b/tests/ui/traits/new-solver/temporary-ambiguity.rs @@ -0,0 +1,22 @@ +// compile-flags: -Ztrait-solver=next +// check-pass + +// Checks that we don't explode when we assemble >1 candidate for a goal. + +struct Wrapper(T); + +trait Foo {} + +impl Foo for Wrapper {} + +impl Foo for Wrapper<()> {} + +fn needs_foo(_: impl Foo) {} + +fn main() { + let mut x = Default::default(); + let w = Wrapper(x); + needs_foo(w); + x = 1; + drop(x); +} From 654f43f34edded48a55fef2b515b4a7d0304c38e Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Feb 2023 19:25:21 +0000 Subject: [PATCH 362/501] Move winnowing to assembly --- .../src/solve/assembly.rs | 76 +++++++++++++++++- .../src/solve/project_goals.rs | 65 +-------------- .../src/solve/trait_goals.rs | 80 +------------------ 3 files changed, 81 insertions(+), 140 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/assembly.rs b/compiler/rustc_trait_selection/src/solve/assembly.rs index f94a47c847e8e..775974d8e9a60 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly.rs @@ -3,7 +3,7 @@ use super::infcx_ext::InferCtxtExt; #[cfg(doc)] use super::trait_goals::structural_traits::*; -use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, QueryResult}; +use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, MaybeCause, QueryResult}; use rustc_hir::def_id::DefId; use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::util::elaborate_predicates; @@ -459,4 +459,78 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { } } } + + #[instrument(level = "debug", skip(self), ret)] + pub(super) fn merge_candidates_and_discard_reservation_impls( + &mut self, + mut candidates: Vec>, + ) -> QueryResult<'tcx> { + match candidates.len() { + 0 => return Err(NoSolution), + 1 => return Ok(self.discard_reservation_impl(candidates.pop().unwrap()).result), + _ => {} + } + + if candidates.len() > 1 { + let mut i = 0; + 'outer: while i < candidates.len() { + for j in (0..candidates.len()).filter(|&j| i != j) { + if self.trait_candidate_should_be_dropped_in_favor_of( + &candidates[i], + &candidates[j], + ) { + debug!(candidate = ?candidates[i], "Dropping candidate #{}/{}", i, candidates.len()); + candidates.swap_remove(i); + continue 'outer; + } + } + + debug!(candidate = ?candidates[i], "Retaining candidate #{}/{}", i, candidates.len()); + i += 1; + } + + // If there are *STILL* multiple candidates, give up + // and report ambiguity. + if candidates.len() > 1 { + let certainty = if candidates.iter().all(|x| { + matches!(x.result.value.certainty, Certainty::Maybe(MaybeCause::Overflow)) + }) { + Certainty::Maybe(MaybeCause::Overflow) + } else { + Certainty::AMBIGUOUS + }; + return self.make_canonical_response(certainty); + } + } + + Ok(self.discard_reservation_impl(candidates.pop().unwrap()).result) + } + + fn trait_candidate_should_be_dropped_in_favor_of( + &self, + candidate: &Candidate<'tcx>, + other: &Candidate<'tcx>, + ) -> bool { + // FIXME: implement this + match (candidate.source, other.source) { + (CandidateSource::Impl(_), _) + | (CandidateSource::ParamEnv(_), _) + | (CandidateSource::AliasBound, _) + | (CandidateSource::BuiltinImpl, _) => false, + } + } + + fn discard_reservation_impl(&self, mut candidate: Candidate<'tcx>) -> Candidate<'tcx> { + if let CandidateSource::Impl(def_id) = candidate.source { + if let ty::ImplPolarity::Reservation = self.tcx().impl_polarity(def_id) { + debug!("Selected reservation impl"); + // We assemble all candidates inside of a probe so by + // making a new canonical response here our result will + // have no constraints. + candidate.result = self.make_canonical_response(Certainty::AMBIGUOUS).unwrap(); + } + } + + candidate + } } diff --git a/compiler/rustc_trait_selection/src/solve/project_goals.rs b/compiler/rustc_trait_selection/src/solve/project_goals.rs index bdf1550fab90e..4fea49893a6c6 100644 --- a/compiler/rustc_trait_selection/src/solve/project_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/project_goals.rs @@ -1,9 +1,9 @@ use crate::traits::{specialization_graph, translate_substs}; -use super::assembly::{self, Candidate, CandidateSource}; +use super::assembly; use super::infcx_ext::InferCtxtExt; use super::trait_goals::structural_traits; -use super::{Certainty, EvalCtxt, Goal, MaybeCause, QueryResult}; +use super::{Certainty, EvalCtxt, Goal, QueryResult}; use rustc_errors::ErrorGuaranteed; use rustc_hir::def::DefKind; use rustc_hir::def_id::DefId; @@ -34,7 +34,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { // projection cache in the solver. if self.term_is_fully_unconstrained(goal) { let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_project_candidates(candidates) + self.merge_candidates_and_discard_reservation_impls(candidates) } else { let predicate = goal.predicate; let unconstrained_rhs = match predicate.term.unpack() { @@ -153,65 +153,6 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { self.make_canonical_response(normalization_certainty.unify_and(rhs_certainty)) } - - fn merge_project_candidates( - &mut self, - mut candidates: Vec>, - ) -> QueryResult<'tcx> { - match candidates.len() { - 0 => return Err(NoSolution), - 1 => return Ok(candidates.pop().unwrap().result), - _ => {} - } - - if candidates.len() > 1 { - let mut i = 0; - 'outer: while i < candidates.len() { - for j in (0..candidates.len()).filter(|&j| i != j) { - if self.project_candidate_should_be_dropped_in_favor_of( - &candidates[i], - &candidates[j], - ) { - debug!(candidate = ?candidates[i], "Dropping candidate #{}/{}", i, candidates.len()); - candidates.swap_remove(i); - continue 'outer; - } - } - - debug!(candidate = ?candidates[i], "Retaining candidate #{}/{}", i, candidates.len()); - // If there are *STILL* multiple candidates, give up - // and report ambiguity. - i += 1; - } - - if candidates.len() > 1 { - let certainty = if candidates.iter().all(|x| { - matches!(x.result.value.certainty, Certainty::Maybe(MaybeCause::Overflow)) - }) { - Certainty::Maybe(MaybeCause::Overflow) - } else { - Certainty::AMBIGUOUS - }; - return self.make_canonical_response(certainty); - } - } - - Ok(candidates.pop().unwrap().result) - } - - fn project_candidate_should_be_dropped_in_favor_of( - &self, - candidate: &Candidate<'tcx>, - other: &Candidate<'tcx>, - ) -> bool { - // FIXME: implement this - match (candidate.source, other.source) { - (CandidateSource::Impl(_), _) - | (CandidateSource::ParamEnv(_), _) - | (CandidateSource::BuiltinImpl, _) - | (CandidateSource::AliasBound, _) => false, - } - } } impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index 6e1e993b2decc..abb69476caefb 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -2,9 +2,9 @@ use std::iter; -use super::assembly::{self, Candidate, CandidateSource}; +use super::assembly; use super::infcx_ext::InferCtxtExt; -use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, MaybeCause, QueryResult}; +use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, QueryResult}; use rustc_hir::def_id::DefId; use rustc_infer::infer::InferCtxt; use rustc_infer::traits::query::NoSolution; @@ -479,80 +479,6 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { goal: Goal<'tcx, TraitPredicate<'tcx>>, ) -> QueryResult<'tcx> { let candidates = self.assemble_and_evaluate_candidates(goal); - self.merge_trait_candidates_discard_reservation_impls(candidates) - } - - #[instrument(level = "debug", skip(self), ret)] - pub(super) fn merge_trait_candidates_discard_reservation_impls( - &mut self, - mut candidates: Vec>, - ) -> QueryResult<'tcx> { - match candidates.len() { - 0 => return Err(NoSolution), - 1 => return Ok(self.discard_reservation_impl(candidates.pop().unwrap()).result), - _ => {} - } - - if candidates.len() > 1 { - let mut i = 0; - 'outer: while i < candidates.len() { - for j in (0..candidates.len()).filter(|&j| i != j) { - if self.trait_candidate_should_be_dropped_in_favor_of( - &candidates[i], - &candidates[j], - ) { - debug!(candidate = ?candidates[i], "Dropping candidate #{}/{}", i, candidates.len()); - candidates.swap_remove(i); - continue 'outer; - } - } - - debug!(candidate = ?candidates[i], "Retaining candidate #{}/{}", i, candidates.len()); - // If there are *STILL* multiple candidates, give up - // and report ambiguity. - i += 1; - } - - if candidates.len() > 1 { - let certainty = if candidates.iter().all(|x| { - matches!(x.result.value.certainty, Certainty::Maybe(MaybeCause::Overflow)) - }) { - Certainty::Maybe(MaybeCause::Overflow) - } else { - Certainty::AMBIGUOUS - }; - return self.make_canonical_response(certainty); - } - } - - Ok(self.discard_reservation_impl(candidates.pop().unwrap()).result) - } - - fn trait_candidate_should_be_dropped_in_favor_of( - &self, - candidate: &Candidate<'tcx>, - other: &Candidate<'tcx>, - ) -> bool { - // FIXME: implement this - match (candidate.source, other.source) { - (CandidateSource::Impl(_), _) - | (CandidateSource::ParamEnv(_), _) - | (CandidateSource::AliasBound, _) - | (CandidateSource::BuiltinImpl, _) => false, - } - } - - fn discard_reservation_impl(&self, mut candidate: Candidate<'tcx>) -> Candidate<'tcx> { - if let CandidateSource::Impl(def_id) = candidate.source { - if let ty::ImplPolarity::Reservation = self.tcx().impl_polarity(def_id) { - debug!("Selected reservation impl"); - // We assemble all candidates inside of a probe so by - // making a new canonical response here our result will - // have no constraints. - candidate.result = self.make_canonical_response(Certainty::AMBIGUOUS).unwrap(); - } - } - - candidate + self.merge_candidates_and_discard_reservation_impls(candidates) } } From 68e27b305290352a21dfec9c6ce2e2e48323f528 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 8 Feb 2023 20:28:12 +0000 Subject: [PATCH 363/501] Disqualify auto-trait builtin impl in new solver if impl exists --- .../rustc_trait_selection/src/solve/trait_goals.rs | 14 ++++++++++++++ .../ui/traits/new-solver/unsafe-auto-trait-impl.rs | 8 ++++++++ 2 files changed, 22 insertions(+) create mode 100644 tests/ui/traits/new-solver/unsafe-auto-trait-impl.rs diff --git a/compiler/rustc_trait_selection/src/solve/trait_goals.rs b/compiler/rustc_trait_selection/src/solve/trait_goals.rs index 06a72e95d4905..4f0a83f203aa4 100644 --- a/compiler/rustc_trait_selection/src/solve/trait_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/trait_goals.rs @@ -89,6 +89,20 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> { ecx: &mut EvalCtxt<'_, 'tcx>, goal: Goal<'tcx, Self>, ) -> QueryResult<'tcx> { + // This differs from the current stable behavior and + // fixes #84857. Due to breakage found via crater, we + // currently instead lint patterns which can be used to + // exploit this unsoundness on stable, see #93367 for + // more details. + if let Some(def_id) = ecx.tcx().find_map_relevant_impl( + goal.predicate.def_id(), + goal.predicate.self_ty(), + Some, + ) { + debug!(?def_id, ?goal, "disqualified auto-trait implementation"); + return Err(NoSolution); + } + ecx.probe_and_evaluate_goal_for_constituent_tys( goal, structural_traits::instantiate_constituent_tys_for_auto_trait, diff --git a/tests/ui/traits/new-solver/unsafe-auto-trait-impl.rs b/tests/ui/traits/new-solver/unsafe-auto-trait-impl.rs new file mode 100644 index 0000000000000..bcfc747ebb170 --- /dev/null +++ b/tests/ui/traits/new-solver/unsafe-auto-trait-impl.rs @@ -0,0 +1,8 @@ +// compile-flags: -Ztrait-solver=next +// check-pass + +struct Foo(*mut ()); + +unsafe impl Sync for Foo {} + +fn main() {} From 221ea3080d62a32af8b9ad30c9f3fac5cb084347 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Thu, 9 Feb 2023 17:27:31 +0000 Subject: [PATCH 364/501] Put deaggregated statements after original constant. --- compiler/rustc_mir_transform/src/sroa.rs | 2 + ...onstant.ScalarReplacementOfAggregates.diff | 46 +++++++++++++++++++ tests/mir-opt/sroa.rs | 9 ++++ 3 files changed, 57 insertions(+) create mode 100644 tests/mir-opt/sroa.constant.ScalarReplacementOfAggregates.diff diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs index 26acd406ed8a9..30d8511153a2b 100644 --- a/compiler/rustc_mir_transform/src/sroa.rs +++ b/compiler/rustc_mir_transform/src/sroa.rs @@ -318,6 +318,8 @@ impl<'tcx, 'll> MutVisitor<'tcx> for ReplacementVisitor<'tcx, 'll> { // ConstProp will pick up the pieces and replace them by actual constants. StatementKind::Assign(box (place, Rvalue::Use(Operand::Constant(_)))) => { if let Some(final_locals) = self.replacements.place_fragments(place) { + // Put the deaggregated statements *after* the original one. + let location = location.successor_within_block(); for (field, ty, new_local) in final_locals { let rplace = self.tcx.mk_place_field(place, field, ty); let rvalue = Rvalue::Use(Operand::Move(rplace)); diff --git a/tests/mir-opt/sroa.constant.ScalarReplacementOfAggregates.diff b/tests/mir-opt/sroa.constant.ScalarReplacementOfAggregates.diff new file mode 100644 index 0000000000000..9e33215f2b5a6 --- /dev/null +++ b/tests/mir-opt/sroa.constant.ScalarReplacementOfAggregates.diff @@ -0,0 +1,46 @@ +- // MIR for `constant` before ScalarReplacementOfAggregates ++ // MIR for `constant` after ScalarReplacementOfAggregates + + fn constant() -> () { + let mut _0: (); // return place in scope 0 at $DIR/sroa.rs:+0:15: +0:15 + let _1: (usize, u8); // in scope 0 at $DIR/sroa.rs:+2:9: +2:10 ++ let _4: usize; // in scope 0 at $DIR/sroa.rs:+2:9: +2:10 ++ let _5: u8; // in scope 0 at $DIR/sroa.rs:+2:9: +2:10 + scope 1 { +- debug y => _1; // in scope 1 at $DIR/sroa.rs:+2:9: +2:10 ++ debug y => (usize, u8){ .0 => _4, .1 => _5, }; // in scope 1 at $DIR/sroa.rs:+2:9: +2:10 + let _2: usize; // in scope 1 at $DIR/sroa.rs:+3:9: +3:10 + scope 2 { + debug t => _2; // in scope 2 at $DIR/sroa.rs:+3:9: +3:10 + let _3: u8; // in scope 2 at $DIR/sroa.rs:+4:9: +4:10 + scope 3 { + debug u => _3; // in scope 3 at $DIR/sroa.rs:+4:9: +4:10 + } + } + } + + bb0: { +- StorageLive(_1); // scope 0 at $DIR/sroa.rs:+2:9: +2:10 ++ StorageLive(_4); // scope 0 at $DIR/sroa.rs:+2:9: +2:10 ++ StorageLive(_5); // scope 0 at $DIR/sroa.rs:+2:9: +2:10 ++ nop; // scope 0 at $DIR/sroa.rs:+2:9: +2:10 + _1 = const _; // scope 0 at $DIR/sroa.rs:+2:13: +2:14 ++ _4 = move (_1.0: usize); // scope 1 at $DIR/sroa.rs:+3:9: +3:10 ++ _5 = move (_1.1: u8); // scope 1 at $DIR/sroa.rs:+3:9: +3:10 + StorageLive(_2); // scope 1 at $DIR/sroa.rs:+3:9: +3:10 +- _2 = (_1.0: usize); // scope 1 at $DIR/sroa.rs:+3:13: +3:16 ++ _2 = _4; // scope 1 at $DIR/sroa.rs:+3:13: +3:16 + StorageLive(_3); // scope 2 at $DIR/sroa.rs:+4:9: +4:10 +- _3 = (_1.1: u8); // scope 2 at $DIR/sroa.rs:+4:13: +4:16 ++ _3 = _5; // scope 2 at $DIR/sroa.rs:+4:13: +4:16 + _0 = const (); // scope 0 at $DIR/sroa.rs:+0:15: +5:2 + StorageDead(_3); // scope 2 at $DIR/sroa.rs:+5:1: +5:2 + StorageDead(_2); // scope 1 at $DIR/sroa.rs:+5:1: +5:2 +- StorageDead(_1); // scope 0 at $DIR/sroa.rs:+5:1: +5:2 ++ StorageDead(_4); // scope 0 at $DIR/sroa.rs:+5:1: +5:2 ++ StorageDead(_5); // scope 0 at $DIR/sroa.rs:+5:1: +5:2 ++ nop; // scope 0 at $DIR/sroa.rs:+5:1: +5:2 + return; // scope 0 at $DIR/sroa.rs:+5:2: +5:2 + } + } + diff --git a/tests/mir-opt/sroa.rs b/tests/mir-opt/sroa.rs index 471aac9f9d82d..b69de2e124ec6 100644 --- a/tests/mir-opt/sroa.rs +++ b/tests/mir-opt/sroa.rs @@ -87,6 +87,13 @@ fn ref_copies(x: &Foo) { let u = y.c; } +fn constant() { + const U: (usize, u8) = (5, 9); + let y = U; + let t = y.0; + let u = y.1; +} + fn main() { dropping(); enums(5); @@ -96,6 +103,7 @@ fn main() { escaping(); copies(Foo { a: 5, b: (), c: "a", d: Some(-4) }); ref_copies(&Foo { a: 5, b: (), c: "a", d: Some(-4) }); + constant(); } // EMIT_MIR sroa.dropping.ScalarReplacementOfAggregates.diff @@ -106,3 +114,4 @@ fn main() { // EMIT_MIR sroa.escaping.ScalarReplacementOfAggregates.diff // EMIT_MIR sroa.copies.ScalarReplacementOfAggregates.diff // EMIT_MIR sroa.ref_copies.ScalarReplacementOfAggregates.diff +// EMIT_MIR sroa.constant.ScalarReplacementOfAggregates.diff From 792b8464e7c4feba1543308d67e637888a3da884 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Thu, 9 Feb 2023 10:42:42 -0700 Subject: [PATCH 365/501] rustdoc: remove unused fn parameter `tab` --- src/librustdoc/html/render/print_item.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index c2d24e514845e..82a51a8467bcd 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -1109,7 +1109,7 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean::Union) { wrap_item(w, |w| { render_attributes_in_pre(w, it, ""); - render_union(w, it, Some(&s.generics), &s.fields, "", cx); + render_union(w, it, Some(&s.generics), &s.fields, cx); }); document(w, cx, it, None, HeadingOffset::H2); @@ -1628,7 +1628,6 @@ fn render_union( it: &clean::Item, g: Option<&clean::Generics>, fields: &[clean::Item], - tab: &str, cx: &Context<'_>, ) { let tcx = cx.tcx(); @@ -1651,7 +1650,7 @@ fn render_union( w.write_str(" "); } - write!(w, "{{\n{}", tab); + write!(w, "{{\n"); let count_fields = fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count(); let toggle = should_hide_fields(count_fields); @@ -1663,17 +1662,16 @@ fn render_union( if let clean::StructFieldItem(ref ty) = *field.kind { write!( w, - " {}{}: {},\n{}", + " {}{}: {},\n", visibility_print_with_space(field.visibility(tcx), field.item_id, cx), field.name.unwrap(), - ty.print(cx), - tab + ty.print(cx) ); } } if it.has_stripped_entries().unwrap() { - write!(w, " /* private fields */\n{}", tab); + write!(w, " /* private fields */\n"); } if toggle { toggle_close(w); From 6e1a28b55093ec03ad15da1e80f3205d0b2223f6 Mon Sep 17 00:00:00 2001 From: Josh Stone Date: Mon, 6 Feb 2023 14:54:54 -0800 Subject: [PATCH 366/501] Release 1.67.1 --- RELEASES.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/RELEASES.md b/RELEASES.md index a63d4e8a043c6..00d0171de6dfc 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,10 @@ +Version 1.67.1 (2023-02-09) +=========================== + +- [Fix interoperability with thin archives.](https://github.com/rust-lang/rust/pull/107360) +- [Fix an internal error in the compiler build process.](https://github.com/rust-lang/rust/pull/105624) +- [Downgrade `clippy::uninlined_format_args` to pedantic.](https://github.com/rust-lang/rust-clippy/pull/10265) + Version 1.67.0 (2023-01-26) ========================== From 3c4e1f85cb7103751bf99e3d826051ab42700dcc Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Thu, 9 Feb 2023 18:14:13 +0000 Subject: [PATCH 367/501] Multiple candidates with same response is not ambiguous --- Cargo.lock | 1 + compiler/rustc_trait_selection/Cargo.toml | 1 + compiler/rustc_trait_selection/src/solve/assembly.rs | 8 +++++--- tests/ui/traits/new-solver/provisional-result-done.rs | 2 +- .../traits/new-solver/provisional-result-done.stderr | 11 ----------- 5 files changed, 8 insertions(+), 15 deletions(-) delete mode 100644 tests/ui/traits/new-solver/provisional-result-done.stderr diff --git a/Cargo.lock b/Cargo.lock index ad01ef5e41f16..1ddf8cadd7280 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4773,6 +4773,7 @@ checksum = "8ba09476327c4b70ccefb6180f046ef588c26a24cf5d269a9feba316eb4f029f" name = "rustc_trait_selection" version = "0.0.0" dependencies = [ + "itertools", "rustc_ast", "rustc_attr", "rustc_data_structures", diff --git a/compiler/rustc_trait_selection/Cargo.toml b/compiler/rustc_trait_selection/Cargo.toml index 3f863038efb37..d3eba43b47e95 100644 --- a/compiler/rustc_trait_selection/Cargo.toml +++ b/compiler/rustc_trait_selection/Cargo.toml @@ -24,3 +24,4 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_transmute = { path = "../rustc_transmute", features = ["rustc"] } smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } +itertools = "0.10.1" diff --git a/compiler/rustc_trait_selection/src/solve/assembly.rs b/compiler/rustc_trait_selection/src/solve/assembly.rs index 775974d8e9a60..126ec60b3d68a 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly.rs @@ -4,6 +4,7 @@ use super::infcx_ext::InferCtxtExt; #[cfg(doc)] use super::trait_goals::structural_traits::*; use super::{CanonicalResponse, Certainty, EvalCtxt, Goal, MaybeCause, QueryResult}; +use itertools::Itertools; use rustc_hir::def_id::DefId; use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::util::elaborate_predicates; @@ -489,9 +490,9 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { i += 1; } - // If there are *STILL* multiple candidates, give up - // and report ambiguity. - if candidates.len() > 1 { + // If there are *STILL* multiple candidates that have *different* response + // results, give up and report ambiguity. + if candidates.len() > 1 && !candidates.iter().map(|cand| cand.result).all_equal() { let certainty = if candidates.iter().all(|x| { matches!(x.result.value.certainty, Certainty::Maybe(MaybeCause::Overflow)) }) { @@ -503,6 +504,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { } } + // FIXME: What if there are >1 candidates left with the same response, and one is a reservation impl? Ok(self.discard_reservation_impl(candidates.pop().unwrap()).result) } diff --git a/tests/ui/traits/new-solver/provisional-result-done.rs b/tests/ui/traits/new-solver/provisional-result-done.rs index 254ab356ad89c..589d34dd7abb1 100644 --- a/tests/ui/traits/new-solver/provisional-result-done.rs +++ b/tests/ui/traits/new-solver/provisional-result-done.rs @@ -1,5 +1,5 @@ -// known-bug: unknown // compile-flags: -Ztrait-solver=next +// check-pass // This tests checks that we update results in the provisional cache when // we pop a goal from the stack. diff --git a/tests/ui/traits/new-solver/provisional-result-done.stderr b/tests/ui/traits/new-solver/provisional-result-done.stderr deleted file mode 100644 index 5bd0613d25911..0000000000000 --- a/tests/ui/traits/new-solver/provisional-result-done.stderr +++ /dev/null @@ -1,11 +0,0 @@ -error[E0283]: type annotations needed: cannot satisfy `Bar: Coinductive` - --> $DIR/provisional-result-done.rs:16:25 - | -LL | impl Coinductive for Bar - | ^^^^^^ - | - = note: cannot satisfy `Bar: Coinductive` - -error: aborting due to previous error - -For more information about this error, try `rustc --explain E0283`. From 1a98305b9d848fde0cf8052f2aba25b878e8ea5c Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Thu, 9 Feb 2023 18:09:05 -0700 Subject: [PATCH 368/501] rustdoc: clean up `write!` calls with less stuttering --- src/librustdoc/html/render/print_item.rs | 27 +++++++++++------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index c2d24e514845e..f6a5bf9dfa07a 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -1081,10 +1081,10 @@ fn item_typedef(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, t: &clea fn write_content(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) { wrap_item(w, |w| { render_attributes_in_pre(w, it, ""); - write!(w, "{}", visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx)); write!( w, - "type {}{}{where_clause} = {type_};", + "{}type {}{}{where_clause} = {type_};", + visibility_print_with_space(it.visibility(cx.tcx()), it.item_id, cx), it.name.unwrap(), t.generics.print(cx), where_clause = print_where_clause(&t.generics, cx, 0, Ending::Newline), @@ -1138,13 +1138,11 @@ fn item_union(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean: §\ {name}: {ty}\ ", - id = id, - name = name, shortty = ItemType::StructField, ty = ty.print(cx), ); if let Some(stability_class) = field.stability_class(cx.tcx()) { - write!(w, "", stab = stability_class); + write!(w, ""); } document(w, cx, field, Some(it), HeadingOffset::H3); } @@ -1242,7 +1240,6 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: w, "
\ §", - id = id, ); render_stability_since_raw_with_extra( w, @@ -1280,8 +1277,11 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: if let Some((heading, fields)) = heading_and_fields { let variant_id = cx.derive_id(format!("{}.{}.fields", ItemType::Variant, variant.name.unwrap())); - write!(w, "
", id = variant_id); - write!(w, "

{heading}

", heading = heading); + write!( + w, + "
\ +

{heading}

", + ); document_non_exhaustive(w, variant); for field in fields { match *field.kind { @@ -1299,7 +1299,6 @@ fn item_enum(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, e: &clean:: §\ {f}: {t}\ ", - id = id, f = field.name.unwrap(), t = ty.print(cx) ); @@ -1450,11 +1449,9 @@ fn item_struct(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, s: &clean w, "\ §\ - {name}: {ty}\ + {field_name}: {ty}\ ", item_type = ItemType::StructField, - id = id, - name = field_name, ty = ty.print(cx) ); document(w, cx, field, Some(it), HeadingOffset::H3); @@ -1842,8 +1839,8 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { if layout.abi.is_unsized() { write!(w, "(unsized)"); } else { - let bytes = layout.size.bytes() - tag_size; - write!(w, "{size} byte{pl}", size = bytes, pl = if bytes == 1 { "" } else { "s" },); + let size = layout.size.bytes() - tag_size; + write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" },); } } @@ -1898,7 +1895,7 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { for (index, layout) in variants.iter_enumerated() { let name = adt.variant(index).name; - write!(w, "
  • {name}: ", name = name); + write!(w, "
  • {name}: "); write_size_of_layout(w, layout, tag_size); writeln!(w, "
  • "); } From 1a663c0f53c71cbf69a982f699fbb00cdfce48f8 Mon Sep 17 00:00:00 2001 From: Jack Huey <31162821+jackh726@users.noreply.github.com> Date: Sun, 11 Sep 2022 03:53:54 -0400 Subject: [PATCH 369/501] Cleanup free_region_relations a bit --- .../rustc_borrowck/src/region_infer/values.rs | 1 + .../src/type_check/free_region_relations.rs | 152 ++++++++++-------- compiler/rustc_borrowck/src/type_check/mod.rs | 2 + .../rustc_infer/src/infer/outlives/verify.rs | 1 + .../src/implied_outlives_bounds.rs | 1 + tests/ui/nll/issue-52057.rs | 4 +- 6 files changed, 90 insertions(+), 71 deletions(-) diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs index c3dfeedc205f7..6a3748fded554 100644 --- a/compiler/rustc_borrowck/src/region_infer/values.rs +++ b/compiler/rustc_borrowck/src/region_infer/values.rs @@ -187,6 +187,7 @@ pub(crate) struct PlaceholderIndices { } impl PlaceholderIndices { + /// Returns the `PlaceholderIndex` for the inserted `PlaceholderRegion` pub(crate) fn insert(&mut self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex { let (index, _) = self.indices.insert_full(placeholder); index.into() diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index 82ff862479e81..f0d964ae78fcd 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -8,6 +8,7 @@ use rustc_infer::infer::InferCtxt; use rustc_middle::mir::ConstraintCategory; use rustc_middle::traits::query::OutlivesBound; use rustc_middle::ty::{self, RegionVid, Ty}; +use rustc_span::Span; use rustc_trait_selection::traits::query::type_op::{self, TypeOp}; use std::rc::Rc; use type_op::TypeOpOutput; @@ -217,8 +218,28 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { self.inverse_outlives.add(fr_b, fr_a); } + #[instrument(level = "debug", skip(self))] pub(crate) fn create(mut self) -> CreateResult<'tcx> { let span = self.infcx.tcx.def_span(self.universal_regions.defining_ty.def_id()); + + // Insert the facts we know from the predicates. Why? Why not. + let param_env = self.param_env; + self.add_outlives_bounds(outlives::explicit_outlives_bounds(param_env)); + + // Finally: + // - outlives is reflexive, so `'r: 'r` for every region `'r` + // - `'static: 'r` for every region `'r` + // - `'r: 'fn_body` for every (other) universally quantified + // region `'r`, all of which are provided by our caller + let fr_static = self.universal_regions.fr_static; + let fr_fn_body = self.universal_regions.fr_fn_body; + for fr in self.universal_regions.universal_regions() { + debug!("build: relating free region {:?} to itself and to 'static", fr); + self.relate_universal_regions(fr, fr); + self.relate_universal_regions(fr_static, fr); + self.relate_universal_regions(fr, fr_fn_body); + } + let unnormalized_input_output_tys = self .universal_regions .unnormalized_input_tys @@ -236,78 +257,52 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // the `relations` is built. let mut normalized_inputs_and_output = Vec::with_capacity(self.universal_regions.unnormalized_input_tys.len() + 1); - let constraint_sets: Vec<_> = unnormalized_input_output_tys - .flat_map(|ty| { - debug!("build: input_or_output={:?}", ty); - // We add implied bounds from both the unnormalized and normalized ty. - // See issue #87748 - let constraints_implied1 = self.add_implied_bounds(ty); - let TypeOpOutput { output: norm_ty, constraints: constraints1, .. } = self - .param_env - .and(type_op::normalize::Normalize::new(ty)) - .fully_perform(self.infcx) - .unwrap_or_else(|_| { - let reported = self - .infcx - .tcx - .sess - .delay_span_bug(span, &format!("failed to normalize {:?}", ty)); - TypeOpOutput { - output: self.infcx.tcx.ty_error_with_guaranteed(reported), - constraints: None, - error_info: None, - } - }); - // Note: we need this in examples like - // ``` - // trait Foo { - // type Bar; - // fn foo(&self) -> &Self::Bar; - // } - // impl Foo for () { - // type Bar = (); - // fn foo(&self) -> &() {} - // } - // ``` - // Both &Self::Bar and &() are WF - let constraints_implied2 = - if ty != norm_ty { self.add_implied_bounds(norm_ty) } else { None }; - normalized_inputs_and_output.push(norm_ty); - constraints1.into_iter().chain(constraints_implied1).chain(constraints_implied2) - }) - .collect(); - - // Insert the facts we know from the predicates. Why? Why not. - let param_env = self.param_env; - self.add_outlives_bounds(outlives::explicit_outlives_bounds(param_env)); + let mut constraints = vec![]; + for ty in unnormalized_input_output_tys { + debug!("build: input_or_output={:?}", ty); + // We add implied bounds from both the unnormalized and normalized ty. + // See issue #87748 + let constraints_unnorm = self.add_implied_bounds(ty); + constraints_unnorm.map(|c| constraints.push(c)); + let TypeOpOutput { output: norm_ty, constraints: constraints_normalize, .. } = self + .param_env + .and(type_op::normalize::Normalize::new(ty)) + .fully_perform(self.infcx) + .unwrap_or_else(|_| { + self.infcx + .tcx + .sess + .delay_span_bug(span, &format!("failed to normalize {:?}", ty)); + TypeOpOutput { + output: self.infcx.tcx.ty_error(), + constraints: None, + error_info: None, + } + }); + constraints_normalize.map(|c| constraints.push(c)); + + // Note: we need this in examples like + // ``` + // trait Foo { + // type Bar; + // fn foo(&self) -> &Self::Bar; + // } + // impl Foo for () { + // type Bar = (); + // fn foo(&self) ->&() {} + // } + // ``` + // Both &Self::Bar and &() are WF + if ty != norm_ty { + let constraints_norm = self.add_implied_bounds(norm_ty); + constraints_norm.map(|c| constraints.push(c)); + } - // Finally: - // - outlives is reflexive, so `'r: 'r` for every region `'r` - // - `'static: 'r` for every region `'r` - // - `'r: 'fn_body` for every (other) universally quantified - // region `'r`, all of which are provided by our caller - let fr_static = self.universal_regions.fr_static; - let fr_fn_body = self.universal_regions.fr_fn_body; - for fr in self.universal_regions.universal_regions() { - debug!("build: relating free region {:?} to itself and to 'static", fr); - self.relate_universal_regions(fr, fr); - self.relate_universal_regions(fr_static, fr); - self.relate_universal_regions(fr, fr_fn_body); + normalized_inputs_and_output.push(norm_ty); } - for data in &constraint_sets { - constraint_conversion::ConstraintConversion::new( - self.infcx, - &self.universal_regions, - &self.region_bound_pairs, - self.implicit_region_bound, - self.param_env, - Locations::All(span), - span, - ConstraintCategory::Internal, - &mut self.constraints, - ) - .convert_all(data); + for c in constraints { + self.push_region_constraints(c, span); } CreateResult { @@ -321,6 +316,24 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { } } + #[instrument(skip(self, data), level = "debug")] + fn push_region_constraints(&mut self, data: &QueryRegionConstraints<'tcx>, span: Span) { + debug!("constraints generated: {:#?}", data); + + constraint_conversion::ConstraintConversion::new( + self.infcx, + &self.universal_regions, + &self.region_bound_pairs, + self.implicit_region_bound, + self.param_env, + Locations::All(span), + span, + ConstraintCategory::Internal, + &mut self.constraints, + ) + .convert_all(data); + } + /// Update the type of a single local, which should represent /// either the return type of the MIR or one of its arguments. At /// the same time, compute and add any implied bounds that come @@ -332,6 +345,7 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { .and(type_op::implied_outlives_bounds::ImpliedOutlivesBounds { ty }) .fully_perform(self.infcx) .unwrap_or_else(|_| bug!("failed to compute implied bounds {:?}", ty)); + debug!(?bounds, ?constraints); self.add_outlives_bounds(bounds); constraints } diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 5b52846562f87..8273113295c02 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -910,6 +910,8 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> { } impl<'tcx> MirTypeckRegionConstraints<'tcx> { + /// Creates a `Region` that for a given `PlaceholderRegion`, or returns the + /// region that corresponds to a previously created one. fn placeholder_region( &mut self, infcx: &InferCtxt<'tcx>, diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index 94de9bc2d0228..bae246418b05a 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -207,6 +207,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { /// /// In some cases, such as when `erased_ty` represents a `ty::Param`, however, /// the result is precise. + #[instrument(level = "debug", skip(self))] fn declared_generic_bounds_from_env_for_erased_ty( &self, erased_ty: Ty<'tcx>, diff --git a/compiler/rustc_traits/src/implied_outlives_bounds.rs b/compiler/rustc_traits/src/implied_outlives_bounds.rs index fe633d687d91b..e2fb80d51caed 100644 --- a/compiler/rustc_traits/src/implied_outlives_bounds.rs +++ b/compiler/rustc_traits/src/implied_outlives_bounds.rs @@ -81,6 +81,7 @@ fn compute_implied_outlives_bounds<'tcx>( // From the full set of obligations, just filter down to the // region relationships. outlives_bounds.extend(obligations.into_iter().filter_map(|obligation| { + debug!(?obligation); assert!(!obligation.has_escaping_bound_vars()); match obligation.predicate.kind().no_bound_vars() { None => None, diff --git a/tests/ui/nll/issue-52057.rs b/tests/ui/nll/issue-52057.rs index 98f49fe8f5507..5991c1104c8a2 100644 --- a/tests/ui/nll/issue-52057.rs +++ b/tests/ui/nll/issue-52057.rs @@ -1,6 +1,6 @@ // Regression test for #52057. There is an implied bound -// that `I: 'a` where `'a` is the lifetime of `self` in `parse_first`; -// but to observe that, one must normalize first. +// that `I: 'x` where `'x` is the lifetime of the reference `&mut Self::Input` +// in `parse_first`; but to observe that, one must normalize first. // // run-pass From 0637b6b4718bcaa273b55f83ca4be893a86b1f83 Mon Sep 17 00:00:00 2001 From: Jack Huey <31162821+jackh726@users.noreply.github.com> Date: Sun, 11 Sep 2022 04:27:25 -0400 Subject: [PATCH 370/501] Update implied_outlives_bounds to properly register implied bounds behind normalization --- .../src/type_check/free_region_relations.rs | 13 ++- compiler/rustc_borrowck/src/type_check/mod.rs | 2 +- .../src/implied_outlives_bounds.rs | 85 +++++++++++-------- compiler/rustc_traits/src/lib.rs | 1 + 4 files changed, 60 insertions(+), 41 deletions(-) diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs index f0d964ae78fcd..2dd24fe034038 100644 --- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs +++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs @@ -226,7 +226,6 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { let param_env = self.param_env; self.add_outlives_bounds(outlives::explicit_outlives_bounds(param_env)); - // Finally: // - outlives is reflexive, so `'r: 'r` for every region `'r` // - `'static: 'r` for every region `'r` // - `'r: 'fn_body` for every (other) universally quantified @@ -263,7 +262,9 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // We add implied bounds from both the unnormalized and normalized ty. // See issue #87748 let constraints_unnorm = self.add_implied_bounds(ty); - constraints_unnorm.map(|c| constraints.push(c)); + if let Some(c) = constraints_unnorm { + constraints.push(c) + } let TypeOpOutput { output: norm_ty, constraints: constraints_normalize, .. } = self .param_env .and(type_op::normalize::Normalize::new(ty)) @@ -279,7 +280,9 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { error_info: None, } }); - constraints_normalize.map(|c| constraints.push(c)); + if let Some(c) = constraints_normalize { + constraints.push(c) + } // Note: we need this in examples like // ``` @@ -295,7 +298,9 @@ impl<'tcx> UniversalRegionRelationsBuilder<'_, 'tcx> { // Both &Self::Bar and &() are WF if ty != norm_ty { let constraints_norm = self.add_implied_bounds(norm_ty); - constraints_norm.map(|c| constraints.push(c)); + if let Some(c) = constraints_norm { + constraints.push(c) + } } normalized_inputs_and_output.push(norm_ty); diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs index 8273113295c02..64c96281ed983 100644 --- a/compiler/rustc_borrowck/src/type_check/mod.rs +++ b/compiler/rustc_borrowck/src/type_check/mod.rs @@ -910,7 +910,7 @@ pub(crate) struct MirTypeckRegionConstraints<'tcx> { } impl<'tcx> MirTypeckRegionConstraints<'tcx> { - /// Creates a `Region` that for a given `PlaceholderRegion`, or returns the + /// Creates a `Region` for a given `PlaceholderRegion`, or returns the /// region that corresponds to a previously created one. fn placeholder_region( &mut self, diff --git a/compiler/rustc_traits/src/implied_outlives_bounds.rs b/compiler/rustc_traits/src/implied_outlives_bounds.rs index e2fb80d51caed..2c6c77072e60e 100644 --- a/compiler/rustc_traits/src/implied_outlives_bounds.rs +++ b/compiler/rustc_traits/src/implied_outlives_bounds.rs @@ -70,48 +70,61 @@ fn compute_implied_outlives_bounds<'tcx>( let obligations = wf::obligations(ocx.infcx, param_env, CRATE_DEF_ID, 0, arg, DUMMY_SP) .unwrap_or_default(); - // While these predicates should all be implied by other parts of - // the program, they are still relevant as they may constrain - // inference variables, which is necessary to add the correct - // implied bounds in some cases, mostly when dealing with projections. - ocx.register_obligations( - obligations.iter().filter(|o| o.predicate.has_non_region_infer()).cloned(), - ); - - // From the full set of obligations, just filter down to the - // region relationships. - outlives_bounds.extend(obligations.into_iter().filter_map(|obligation| { + for obligation in obligations { debug!(?obligation); assert!(!obligation.has_escaping_bound_vars()); - match obligation.predicate.kind().no_bound_vars() { - None => None, - Some(pred) => match pred { - ty::PredicateKind::Clause(ty::Clause::Trait(..)) - | ty::PredicateKind::Subtype(..) - | ty::PredicateKind::Coerce(..) - | ty::PredicateKind::Clause(ty::Clause::Projection(..)) - | ty::PredicateKind::ClosureKind(..) - | ty::PredicateKind::ObjectSafe(..) - | ty::PredicateKind::ConstEvaluatable(..) - | ty::PredicateKind::ConstEquate(..) - | ty::PredicateKind::Ambiguous - | ty::PredicateKind::TypeWellFormedFromEnv(..) => None, - ty::PredicateKind::WellFormed(arg) => { - wf_args.push(arg); - None + + // While these predicates should all be implied by other parts of + // the program, they are still relevant as they may constrain + // inference variables, which is necessary to add the correct + // implied bounds in some cases, mostly when dealing with projections. + // + // Another important point here: we only register `Projection` + // predicates, since otherwise we might register outlives + // predicates containing inference variables, and we don't + // learn anything new from those. + if obligation.predicate.has_non_region_infer() { + match obligation.predicate.kind().skip_binder() { + ty::PredicateKind::Clause(ty::Clause::Projection(..)) => { + ocx.register_obligation(obligation.clone()); } + _ => {} + } + } - ty::PredicateKind::Clause(ty::Clause::RegionOutlives( - ty::OutlivesPredicate(r_a, r_b), - )) => Some(ty::OutlivesPredicate(r_a.into(), r_b)), + let pred = match obligation.predicate.kind().no_bound_vars() { + None => continue, + Some(pred) => pred, + }; + match pred { + ty::PredicateKind::Clause(ty::Clause::Trait(..)) + | ty::PredicateKind::Subtype(..) + | ty::PredicateKind::Coerce(..) + | ty::PredicateKind::Clause(ty::Clause::Projection(..)) + | ty::PredicateKind::ClosureKind(..) + | ty::PredicateKind::ObjectSafe(..) + | ty::PredicateKind::ConstEvaluatable(..) + | ty::PredicateKind::ConstEquate(..) + | ty::PredicateKind::Ambiguous + | ty::PredicateKind::TypeWellFormedFromEnv(..) => {} + + // We need to search through *all* WellFormed predicates + ty::PredicateKind::WellFormed(arg) => { + wf_args.push(arg); + } + + // We need to register region relationships + ty::PredicateKind::Clause(ty::Clause::RegionOutlives(ty::OutlivesPredicate( + r_a, + r_b, + ))) => outlives_bounds.push(ty::OutlivesPredicate(r_a.into(), r_b)), - ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate( - ty_a, - r_b, - ))) => Some(ty::OutlivesPredicate(ty_a.into(), r_b)), - }, + ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate( + ty_a, + r_b, + ))) => outlives_bounds.push(ty::OutlivesPredicate(ty_a.into(), r_b)), } - })); + } } // This call to `select_all_or_error` is necessary to constrain inference variables, which we diff --git a/compiler/rustc_traits/src/lib.rs b/compiler/rustc_traits/src/lib.rs index 9aa26667e7bf4..8bea5588ae75e 100644 --- a/compiler/rustc_traits/src/lib.rs +++ b/compiler/rustc_traits/src/lib.rs @@ -4,6 +4,7 @@ #![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::diagnostic_outside_of_impl)] #![feature(let_chains)] +#![feature(drain_filter)] #![recursion_limit = "256"] #[macro_use] From 4b1157509f96e4fffe7ef40ab8cbaa08704d36cc Mon Sep 17 00:00:00 2001 From: Dan Gohman Date: Thu, 9 Feb 2023 19:01:47 -0800 Subject: [PATCH 371/501] Allow wasi-libc to initialize its environment variables lazily. Use `__wasilibc_get_environ()` to read the environment variable list from wasi-libc instead of using `environ`. `environ` is a global variable which effectively requires wasi-libc to initialize the environment variables eagerly, and `__wasilibc_get_environ()` is specifically designed to be an alternative that lets wasi-libc intiailize its environment variables lazily. This should have the side effect of fixing at least some of the cases of #107635. --- library/std/src/sys/wasi/os.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/library/std/src/sys/wasi/os.rs b/library/std/src/sys/wasi/os.rs index f5513e9996d40..9919dc7087ec1 100644 --- a/library/std/src/sys/wasi/os.rs +++ b/library/std/src/sys/wasi/os.rs @@ -21,6 +21,7 @@ mod libc { extern "C" { pub fn getcwd(buf: *mut c_char, size: size_t) -> *mut c_char; pub fn chdir(dir: *const c_char) -> c_int; + pub fn __wasilibc_get_environ() -> *mut *mut c_char; } } @@ -161,7 +162,12 @@ impl Iterator for Env { pub fn env() -> Env { unsafe { let _guard = env_read_lock(); - let mut environ = libc::environ; + + // Use `__wasilibc_get_environ` instead of `environ` here so that we + // don't require wasi-libc to eagerly initialize the environment + // variables. + let mut environ = libc::__wasilibc_get_environ(); + let mut result = Vec::new(); if !environ.is_null() { while !(*environ).is_null() { From b62b82aef4b6aa667161c664fc3ae738c2fc6b9f Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 1 Feb 2022 20:30:32 +0800 Subject: [PATCH 372/501] Resolve documentation links in rustc and store the results in metadata This commit implements MCP https://github.com/rust-lang/compiler-team/issues/584 It also removes code that is no longer used, and that includes code cloning resolver, so issue #83761 is fixed. --- Cargo.lock | 2 +- .../src/stable_hasher.rs | 8 + compiler/rustc_hir/src/def.rs | 16 +- compiler/rustc_metadata/src/rmeta/decoder.rs | 41 +- .../src/rmeta/decoder/cstore_impl.rs | 34 +- compiler/rustc_metadata/src/rmeta/encoder.rs | 37 +- compiler/rustc_metadata/src/rmeta/mod.rs | 7 +- compiler/rustc_middle/src/arena.rs | 1 + compiler/rustc_middle/src/query/mod.rs | 12 + compiler/rustc_middle/src/ty/mod.rs | 4 +- compiler/rustc_middle/src/ty/parameterized.rs | 2 + compiler/rustc_middle/src/ty/query.rs | 2 +- compiler/rustc_resolve/Cargo.toml | 1 + .../rustc_resolve/src/build_reduced_graph.rs | 4 +- compiler/rustc_resolve/src/late.rs | 128 +++++- compiler/rustc_resolve/src/lib.rs | 38 +- compiler/rustc_resolve/src/macros.rs | 2 +- compiler/rustc_resolve/src/rustdoc.rs | 361 ++++++++++++++++ src/librustdoc/Cargo.toml | 1 - src/librustdoc/clean/types.rs | 171 +------- src/librustdoc/clean/types/tests.rs | 1 + src/librustdoc/core.rs | 24 +- src/librustdoc/html/markdown.rs | 10 +- src/librustdoc/lib.rs | 17 +- .../passes/collect_intra_doc_links.rs | 173 +------- .../passes/collect_intra_doc_links/early.rs | 404 +----------------- src/librustdoc/passes/mod.rs | 3 +- src/tools/tidy/src/deps.rs | 2 + 28 files changed, 653 insertions(+), 853 deletions(-) create mode 100644 compiler/rustc_resolve/src/rustdoc.rs diff --git a/Cargo.lock b/Cargo.lock index ad01ef5e41f16..ed5065d7611a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4613,6 +4613,7 @@ name = "rustc_resolve" version = "0.0.0" dependencies = [ "bitflags", + "pulldown-cmark 0.9.2", "rustc_arena", "rustc_ast", "rustc_ast_pretty", @@ -4878,7 +4879,6 @@ dependencies = [ "itertools", "minifier", "once_cell", - "pulldown-cmark 0.9.2", "rayon", "regex", "rustdoc-json-types", diff --git a/compiler/rustc_data_structures/src/stable_hasher.rs b/compiler/rustc_data_structures/src/stable_hasher.rs index ae4836645fa41..e0d77cdaebb36 100644 --- a/compiler/rustc_data_structures/src/stable_hasher.rs +++ b/compiler/rustc_data_structures/src/stable_hasher.rs @@ -486,6 +486,14 @@ impl ToStableHashKey for String { } } +impl, T2: ToStableHashKey> ToStableHashKey for (T1, T2) { + type KeyType = (T1::KeyType, T2::KeyType); + #[inline] + fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType { + (self.0.to_stable_hash_key(hcx), self.1.to_stable_hash_key(hcx)) + } +} + impl HashStable for bool { #[inline] fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { diff --git a/compiler/rustc_hir/src/def.rs b/compiler/rustc_hir/src/def.rs index cca5ead0f8395..f1801a0f844f7 100644 --- a/compiler/rustc_hir/src/def.rs +++ b/compiler/rustc_hir/src/def.rs @@ -2,6 +2,8 @@ use crate::hir; use rustc_ast as ast; use rustc_ast::NodeId; +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::stable_hasher::ToStableHashKey; use rustc_macros::HashStable_Generic; use rustc_span::def_id::{DefId, LocalDefId}; use rustc_span::hygiene::MacroKind; @@ -472,7 +474,8 @@ impl PartialRes { /// Different kinds of symbols can coexist even if they share the same textual name. /// Therefore, they each have a separate universe (known as a "namespace"). -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)] +#[derive(HashStable_Generic)] pub enum Namespace { /// The type namespace includes `struct`s, `enum`s, `union`s, `trait`s, and `mod`s /// (and, by extension, crates). @@ -499,6 +502,15 @@ impl Namespace { } } +impl ToStableHashKey for Namespace { + type KeyType = Namespace; + + #[inline] + fn to_stable_hash_key(&self, _: &CTX) -> Namespace { + *self + } +} + /// Just a helper ‒ separate structure for each namespace. #[derive(Copy, Clone, Default, Debug)] pub struct PerNS { @@ -760,3 +772,5 @@ pub enum LifetimeRes { /// HACK: This is used to recover the NodeId of an elided lifetime. ElidedAnchor { start: NodeId, end: NodeId }, } + +pub type DocLinkResMap = FxHashMap<(Symbol, Namespace), Option>>; diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index e2b07fad6e782..800f85063c41a 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -11,7 +11,7 @@ use rustc_data_structures::sync::{Lock, LockGuard, Lrc, OnceCell}; use rustc_data_structures::unhash::UnhashMap; use rustc_expand::base::{SyntaxExtension, SyntaxExtensionKind}; use rustc_expand::proc_macro::{AttrProcMacro, BangProcMacro, DeriveProcMacro}; -use rustc_hir::def::{CtorKind, DefKind, Res}; +use rustc_hir::def::{CtorKind, DefKind, DocLinkResMap, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash}; use rustc_hir::diagnostic_items::DiagnosticItems; @@ -1163,20 +1163,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { ) } - /// Decodes all inherent impls in the crate (for rustdoc). - fn get_inherent_impls(self) -> impl Iterator + 'a { - (0..self.root.tables.inherent_impls.size()).flat_map(move |i| { - let ty_index = DefIndex::from_usize(i); - let ty_def_id = self.local_def_id(ty_index); - self.root - .tables - .inherent_impls - .get(self, ty_index) - .decode(self) - .map(move |impl_index| (ty_def_id, self.local_def_id(impl_index))) - }) - } - /// Decodes all traits in the crate (for rustdoc and rustc diagnostics). fn get_traits(self) -> impl Iterator + 'a { self.root.traits.decode(self).map(move |index| self.local_def_id(index)) @@ -1195,13 +1181,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { }) } - fn get_all_incoherent_impls(self) -> impl Iterator + 'a { - self.cdata - .incoherent_impls - .values() - .flat_map(move |impls| impls.decode(self).map(move |idx| self.local_def_id(idx))) - } - fn get_incoherent_impls(self, tcx: TyCtxt<'tcx>, simp: SimplifiedType) -> &'tcx [DefId] { if let Some(impls) = self.cdata.incoherent_impls.get(&simp) { tcx.arena.alloc_from_iter(impls.decode(self).map(|idx| self.local_def_id(idx))) @@ -1598,6 +1577,24 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { fn get_is_intrinsic(self, index: DefIndex) -> bool { self.root.tables.is_intrinsic.get(self, index) } + + fn get_doc_link_resolutions(self, index: DefIndex) -> DocLinkResMap { + self.root + .tables + .doc_link_resolutions + .get(self, index) + .expect("no resolutions for a doc link") + .decode(self) + } + + fn get_doc_link_traits_in_scope(self, index: DefIndex) -> impl Iterator + 'a { + self.root + .tables + .doc_link_traits_in_scope + .get(self, index) + .expect("no traits in scope for a doc link") + .decode(self) + } } impl CrateMetadata { diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs index 07cc84ab95368..b12f9b5c917e8 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs @@ -345,6 +345,10 @@ provide! { tcx, def_id, other, cdata, expn_that_defined => { cdata.get_expn_that_defined(def_id.index, tcx.sess) } generator_diagnostic_data => { cdata.get_generator_diagnostic_data(tcx, def_id.index) } is_doc_hidden => { cdata.get_attr_flags(def_id.index).contains(AttrFlags::IS_DOC_HIDDEN) } + doc_link_resolutions => { tcx.arena.alloc(cdata.get_doc_link_resolutions(def_id.index)) } + doc_link_traits_in_scope => { + tcx.arena.alloc_from_iter(cdata.get_doc_link_traits_in_scope(def_id.index)) + } } pub(in crate::rmeta) fn provide(providers: &mut Providers) { @@ -613,36 +617,6 @@ impl CStore { self.get_crate_data(cnum).get_trait_impls() } - /// Decodes all inherent impls in the crate (for rustdoc). - pub fn inherent_impls_in_crate_untracked( - &self, - cnum: CrateNum, - ) -> impl Iterator + '_ { - self.get_crate_data(cnum).get_inherent_impls() - } - - /// Decodes all incoherent inherent impls in the crate (for rustdoc). - pub fn incoherent_impls_in_crate_untracked( - &self, - cnum: CrateNum, - ) -> impl Iterator + '_ { - self.get_crate_data(cnum).get_all_incoherent_impls() - } - - pub fn associated_item_def_ids_untracked<'a>( - &'a self, - def_id: DefId, - sess: &'a Session, - ) -> impl Iterator + 'a { - self.get_crate_data(def_id.krate).get_associated_item_def_ids(def_id.index, sess) - } - - pub fn may_have_doc_links_untracked(&self, def_id: DefId) -> bool { - self.get_crate_data(def_id.krate) - .get_attr_flags(def_id.index) - .contains(AttrFlags::MAY_HAVE_DOC_LINKS) - } - pub fn is_doc_hidden_untracked(&self, def_id: DefId) -> bool { self.get_crate_data(def_id.krate) .get_attr_flags(def_id.index) diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 85e9ae9a98302..263c71ae70286 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -3,7 +3,6 @@ use crate::rmeta::def_path_hash_map::DefPathHashMapRef; use crate::rmeta::table::TableBuilder; use crate::rmeta::*; -use rustc_ast::util::comments; use rustc_ast::Attribute; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; @@ -772,7 +771,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { struct AnalyzeAttrState { is_exported: bool, - may_have_doc_links: bool, is_doc_hidden: bool, } @@ -790,15 +788,12 @@ fn analyze_attr(attr: &Attribute, state: &mut AnalyzeAttrState) -> bool { let mut should_encode = false; if rustc_feature::is_builtin_only_local(attr.name_or_empty()) { // Attributes marked local-only don't need to be encoded for downstream crates. - } else if let Some(s) = attr.doc_str() { + } else if attr.doc_str().is_some() { // We keep all doc comments reachable to rustdoc because they might be "imported" into // downstream crates if they use `#[doc(inline)]` to copy an item's documentation into // their own. if state.is_exported { should_encode = true; - if comments::may_have_doc_links(s.as_str()) { - state.may_have_doc_links = true; - } } } else if attr.has_name(sym::doc) { // If this is a `doc` attribute that doesn't have anything except maybe `inline` (as in @@ -1139,7 +1134,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let tcx = self.tcx; let mut state = AnalyzeAttrState { is_exported: tcx.effective_visibilities(()).is_exported(def_id), - may_have_doc_links: false, is_doc_hidden: false, }; let attr_iter = tcx @@ -1151,9 +1145,6 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { record_array!(self.tables.attributes[def_id.to_def_id()] <- attr_iter); let mut attr_flags = AttrFlags::empty(); - if state.may_have_doc_links { - attr_flags |= AttrFlags::MAY_HAVE_DOC_LINKS; - } if state.is_doc_hidden { attr_flags |= AttrFlags::IS_DOC_HIDDEN; } @@ -1231,6 +1222,14 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { def_id.index })); } + + for (def_id, res_map) in &tcx.resolutions(()).doc_link_resolutions { + record!(self.tables.doc_link_resolutions[def_id.to_def_id()] <- res_map); + } + + for (def_id, traits) in &tcx.resolutions(()).doc_link_traits_in_scope { + record_array!(self.tables.doc_link_traits_in_scope[def_id.to_def_id()] <- traits); + } } #[instrument(level = "trace", skip(self))] @@ -1715,6 +1714,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { record!(self.tables.lookup_stability[LOCAL_CRATE.as_def_id()] <- stability); } self.encode_deprecation(LOCAL_CRATE.as_def_id()); + if let Some(res_map) = tcx.resolutions(()).doc_link_resolutions.get(&CRATE_DEF_ID) { + record!(self.tables.doc_link_resolutions[LOCAL_CRATE.as_def_id()] <- res_map); + } + if let Some(traits) = tcx.resolutions(()).doc_link_traits_in_scope.get(&CRATE_DEF_ID) { + record_array!(self.tables.doc_link_traits_in_scope[LOCAL_CRATE.as_def_id()] <- traits); + } // Normally, this information is encoded when we walk the items // defined in this crate. However, we skip doing that for proc-macro crates, @@ -2225,6 +2230,18 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>, path: &Path) { pub fn provide(providers: &mut Providers) { *providers = Providers { + doc_link_resolutions: |tcx, def_id| { + tcx.resolutions(()) + .doc_link_resolutions + .get(&def_id.expect_local()) + .expect("no resolutions for a doc link") + }, + doc_link_traits_in_scope: |tcx, def_id| { + tcx.resolutions(()) + .doc_link_traits_in_scope + .get(&def_id.expect_local()) + .expect("no traits in scope for a doc link") + }, traits_in_crate: |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index a74aa381d9eb8..9227609cc8b66 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -9,7 +9,7 @@ use rustc_attr as attr; use rustc_data_structures::svh::Svh; use rustc_data_structures::sync::MetadataRef; use rustc_hir as hir; -use rustc_hir::def::{CtorKind, DefKind}; +use rustc_hir::def::{CtorKind, DefKind, DocLinkResMap}; use rustc_hir::def_id::{CrateNum, DefId, DefIndex, DefPathHash, StableCrateId}; use rustc_hir::definitions::DefKey; use rustc_hir::lang_items::LangItem; @@ -413,6 +413,8 @@ define_tables! { module_reexports: Table>, deduced_param_attrs: Table>, trait_impl_trait_tys: Table>>>, + doc_link_resolutions: Table>, + doc_link_traits_in_scope: Table>, } #[derive(TyEncodable, TyDecodable)] @@ -426,8 +428,7 @@ struct VariantData { bitflags::bitflags! { #[derive(Default)] pub struct AttrFlags: u8 { - const MAY_HAVE_DOC_LINKS = 1 << 0; - const IS_DOC_HIDDEN = 1 << 1; + const IS_DOC_HIDDEN = 1 << 0; } } diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 2ba7ec5b15192..9d2144c443b4a 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -113,6 +113,7 @@ macro_rules! arena_types { [decode] trait_impl_trait_tys: rustc_data_structures::fx::FxHashMap>, [] bit_set_u32: rustc_index::bit_set::BitSet, [] external_constraints: rustc_middle::traits::solve::ExternalConstraintsData<'tcx>, + [decode] doc_link_resolutions: rustc_hir::def::DocLinkResMap, ]); ) } diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 0a16ede64991d..d37d6b37a37c3 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -2156,4 +2156,16 @@ rustc_queries! { desc { |tcx| "deducing parameter attributes for {}", tcx.def_path_str(def_id) } separate_provide_extern } + + query doc_link_resolutions(def_id: DefId) -> &'tcx DocLinkResMap { + eval_always + desc { "resolutions for documentation links for a module" } + separate_provide_extern + } + + query doc_link_traits_in_scope(def_id: DefId) -> &'tcx [DefId] { + eval_always + desc { "traits in scope for documentation links for a module" } + separate_provide_extern + } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 09c3d5b736cf1..cff3ba194bdb2 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -36,7 +36,7 @@ use rustc_data_structures::intern::Interned; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::tagged_ptr::CopyTaggedPtr; use rustc_hir as hir; -use rustc_hir::def::{CtorKind, CtorOf, DefKind, LifetimeRes, Res}; +use rustc_hir::def::{CtorKind, CtorOf, DefKind, DocLinkResMap, LifetimeRes, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LocalDefIdMap}; use rustc_hir::Node; use rustc_index::vec::IndexVec; @@ -181,6 +181,8 @@ pub struct ResolverGlobalCtxt { /// exist under `std`. For example, wrote `str::from_utf8` instead of `std::str::from_utf8`. pub confused_type_with_std_module: FxHashMap, pub registered_tools: RegisteredTools, + pub doc_link_resolutions: FxHashMap, + pub doc_link_traits_in_scope: FxHashMap>, } /// Resolutions that should only be used for lowering. diff --git a/compiler/rustc_middle/src/ty/parameterized.rs b/compiler/rustc_middle/src/ty/parameterized.rs index 84edb5f2a4288..303675d3ca5c1 100644 --- a/compiler/rustc_middle/src/ty/parameterized.rs +++ b/compiler/rustc_middle/src/ty/parameterized.rs @@ -81,6 +81,8 @@ trivially_parameterized_over_tcx! { rustc_hir::IsAsync, rustc_hir::LangItem, rustc_hir::def::DefKind, + rustc_hir::def::DocLinkResMap, + rustc_hir::def_id::DefId, rustc_hir::def_id::DefIndex, rustc_hir::definitions::DefKey, rustc_index::bit_set::BitSet, diff --git a/compiler/rustc_middle/src/ty/query.rs b/compiler/rustc_middle/src/ty/query.rs index 933aaadd62e1d..bec70974dde04 100644 --- a/compiler/rustc_middle/src/ty/query.rs +++ b/compiler/rustc_middle/src/ty/query.rs @@ -45,7 +45,7 @@ use rustc_data_structures::sync::Lrc; use rustc_data_structures::unord::UnordSet; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; -use rustc_hir::def::DefKind; +use rustc_hir::def::{DefKind, DocLinkResMap}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet, LocalDefId}; use rustc_hir::hir_id::OwnerId; use rustc_hir::lang_items::{LangItem, LanguageItems}; diff --git a/compiler/rustc_resolve/Cargo.toml b/compiler/rustc_resolve/Cargo.toml index 7c3a0f8f277b5..d4935b52b1044 100644 --- a/compiler/rustc_resolve/Cargo.toml +++ b/compiler/rustc_resolve/Cargo.toml @@ -7,6 +7,7 @@ edition = "2021" [dependencies] bitflags = "1.2.1" +pulldown-cmark = { version = "0.9.2", default-features = false } rustc_arena = { path = "../rustc_arena" } rustc_ast = { path = "../rustc_ast" } rustc_ast_pretty = { path = "../rustc_ast_pretty" } diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 2fb62ce53ba6e..e74bb0a9a4f32 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -95,7 +95,7 @@ impl<'a> Resolver<'a> { /// Reachable macros with block module parents exist due to `#[macro_export] macro_rules!`, /// but they cannot use def-site hygiene, so the assumption holds /// (). - pub fn get_nearest_non_block_module(&mut self, mut def_id: DefId) -> Module<'a> { + pub(crate) fn get_nearest_non_block_module(&mut self, mut def_id: DefId) -> Module<'a> { loop { match self.get_module(def_id) { Some(module) => return module, @@ -104,7 +104,7 @@ impl<'a> Resolver<'a> { } } - pub fn expect_module(&mut self, def_id: DefId) -> Module<'a> { + pub(crate) fn expect_module(&mut self, def_id: DefId) -> Module<'a> { self.get_module(def_id).expect("argument `DefId` is not a module") } diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 3ca10ac50baa6..ed1e787f3962c 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -8,7 +8,7 @@ use RibKind::*; -use crate::{path_names_to_string, BindingError, Finalize, LexicalScopeBinding}; +use crate::{path_names_to_string, rustdoc, BindingError, Finalize, LexicalScopeBinding}; use crate::{Module, ModuleOrUniformRoot, NameBinding, ParentScope, PathResult}; use crate::{ResolutionError, Resolver, Segment, UseError}; @@ -24,9 +24,10 @@ use rustc_hir::{BindingAnnotation, PrimTy, TraitCandidate}; use rustc_middle::middle::resolve_lifetime::Set1; use rustc_middle::ty::DefIdTree; use rustc_middle::{bug, span_bug}; +use rustc_session::config::CrateType; use rustc_session::lint; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::{BytePos, Span}; +use rustc_span::{BytePos, Span, SyntaxContext}; use smallvec::{smallvec, SmallVec}; use rustc_span::source_map::{respan, Spanned}; @@ -620,7 +621,9 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { self.resolve_arm(arm); } fn visit_block(&mut self, block: &'ast Block) { + let old_macro_rules = self.parent_scope.macro_rules; self.resolve_block(block); + self.parent_scope.macro_rules = old_macro_rules; } fn visit_anon_const(&mut self, constant: &'ast AnonConst) { // We deal with repeat expressions explicitly in `resolve_expr`. @@ -771,6 +774,7 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { ); } fn visit_foreign_item(&mut self, foreign_item: &'ast ForeignItem) { + self.resolve_doc_links(&foreign_item.attrs); match foreign_item.kind { ForeignItemKind::TyAlias(box TyAlias { ref generics, .. }) => { self.with_generic_param_rib( @@ -1159,6 +1163,16 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { }) }); } + + fn visit_variant(&mut self, v: &'ast Variant) { + self.resolve_doc_links(&v.attrs); + visit::walk_variant(self, v) + } + + fn visit_field_def(&mut self, f: &'ast FieldDef) { + self.resolve_doc_links(&f.attrs); + visit::walk_field_def(self, f) + } } impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { @@ -2185,6 +2199,8 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { } fn resolve_item(&mut self, item: &'ast Item) { + self.resolve_doc_links(&item.attrs); + let name = item.ident.name; debug!("(resolving item) resolving {} ({:?})", name, item.kind); @@ -2274,9 +2290,18 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { ); } - ItemKind::Mod(..) | ItemKind::ForeignMod(_) => { + ItemKind::Mod(..) => { self.with_scope(item.id, |this| { + this.resolve_doc_links(&item.attrs); + let old_macro_rules = this.parent_scope.macro_rules; visit::walk_item(this, item); + // Maintain macro_rules scopes in the same way as during early resolution + // for diagnostics and doc links. + if item.attrs.iter().all(|attr| { + !attr.has_name(sym::macro_use) && !attr.has_name(sym::macro_escape) + }) { + this.parent_scope.macro_rules = old_macro_rules; + } }); } @@ -2309,14 +2334,22 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { self.future_proof_import(use_tree); } - ItemKind::ExternCrate(..) | ItemKind::MacroDef(..) => { - // do nothing, these are just around to be encoded + ItemKind::MacroDef(ref macro_def) => { + // Maintain macro_rules scopes in the same way as during early resolution + // for diagnostics and doc links. + if macro_def.macro_rules { + let (macro_rules_scope, _) = + self.r.macro_rules_scope(self.r.local_def_id(item.id)); + self.parent_scope.macro_rules = macro_rules_scope; + } } - ItemKind::GlobalAsm(_) => { + ItemKind::ForeignMod(_) | ItemKind::GlobalAsm(_) => { visit::walk_item(self, item); } + ItemKind::ExternCrate(..) => {} + ItemKind::MacCall(_) => panic!("unexpanded macro in resolve!"), } } @@ -2544,6 +2577,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { }; for item in trait_items { + self.resolve_doc_links(&item.attrs); match &item.kind { AssocItemKind::Const(_, ty, default) => { self.visit_ty(ty); @@ -2714,6 +2748,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { seen_trait_items: &mut FxHashMap, ) { use crate::ResolutionError::*; + self.resolve_doc_links(&item.attrs); match &item.kind { AssocItemKind::Const(_, ty, default) => { debug!("resolve_implementation AssocItemKind::Const"); @@ -4116,6 +4151,86 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { self.r.extra_lifetime_params_map.insert(async_node_id, extra_lifetime_params); } } + + fn resolve_and_cache_rustdoc_path(&mut self, path_str: &str, ns: Namespace) -> bool { + // FIXME: This caching may be incorrect in case of multiple `macro_rules` + // items with the same name in the same module. + // Also hygiene is not considered. + let mut doc_link_resolutions = std::mem::take(&mut self.r.doc_link_resolutions); + let res = doc_link_resolutions + .entry(self.parent_scope.module.nearest_parent_mod().expect_local()) + .or_default() + .entry((Symbol::intern(path_str), ns)) + .or_insert_with_key(|(path, ns)| { + let res = self.r.resolve_rustdoc_path(path.as_str(), *ns, self.parent_scope); + if let Some(res) = res + && let Some(def_id) = res.opt_def_id() + && !def_id.is_local() + && self.r.session.crate_types().contains(&CrateType::ProcMacro) { + // Encoding foreign def ids in proc macro crate metadata will ICE. + return None; + } + res + }) + .is_some(); + self.r.doc_link_resolutions = doc_link_resolutions; + res + } + + fn resolve_doc_links(&mut self, attrs: &[Attribute]) { + if !attrs.iter().any(|attr| attr.may_have_doc_links()) { + return; + } + + let mut need_traits_in_scope = false; + for path_str in rustdoc::attrs_to_preprocessed_links(attrs) { + // Resolve all namespaces due to no disambiguator or for diagnostics. + let mut any_resolved = false; + let mut need_assoc = false; + for ns in [TypeNS, ValueNS, MacroNS] { + if self.resolve_and_cache_rustdoc_path(&path_str, ns) { + any_resolved = true; + } else if ns != MacroNS { + need_assoc = true; + } + } + + // Resolve all prefixes for type-relative resolution or for diagnostics. + if need_assoc || !any_resolved { + let mut path = &path_str[..]; + while let Some(idx) = path.rfind("::") { + path = &path[..idx]; + need_traits_in_scope = true; + for ns in [TypeNS, ValueNS, MacroNS] { + self.resolve_and_cache_rustdoc_path(path, ns); + } + } + } + } + + if need_traits_in_scope { + // FIXME: hygiene is not considered. + let mut doc_link_traits_in_scope = std::mem::take(&mut self.r.doc_link_traits_in_scope); + doc_link_traits_in_scope + .entry(self.parent_scope.module.nearest_parent_mod().expect_local()) + .or_insert_with(|| { + self.r + .traits_in_scope(None, &self.parent_scope, SyntaxContext::root(), None) + .into_iter() + .filter_map(|tr| { + if !tr.def_id.is_local() + && self.r.session.crate_types().contains(&CrateType::ProcMacro) + { + // Encoding foreign def ids in proc macro crate metadata will ICE. + return None; + } + Some(tr.def_id) + }) + .collect() + }); + self.r.doc_link_traits_in_scope = doc_link_traits_in_scope; + } + } } struct LifetimeCountVisitor<'a, 'b> { @@ -4162,6 +4277,7 @@ impl<'a> Resolver<'a> { pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) { visit::walk_crate(&mut LifetimeCountVisitor { r: self }, krate); let mut late_resolution_visitor = LateResolutionVisitor::new(self); + late_resolution_visitor.resolve_doc_links(&krate.attrs); visit::walk_crate(&mut late_resolution_visitor, krate); for (id, span) in late_resolution_visitor.diagnostic_metadata.unused_labels.iter() { self.lint_buffer.buffer_lint(lint::builtin::UNUSED_LABELS, *id, *span, "unused label"); diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 1b181b714005b..e61e83189c384 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -33,7 +33,7 @@ use rustc_data_structures::sync::{Lrc, RwLock}; use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed}; use rustc_expand::base::{DeriveResolutions, SyntaxExtension, SyntaxExtensionKind}; use rustc_hir::def::Namespace::*; -use rustc_hir::def::{self, CtorOf, DefKind, LifetimeRes, PartialRes}; +use rustc_hir::def::{self, CtorOf, DefKind, DocLinkResMap, LifetimeRes, PartialRes}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId}; use rustc_hir::def_id::{CRATE_DEF_ID, LOCAL_CRATE}; use rustc_hir::definitions::{DefPathData, Definitions}; @@ -78,6 +78,7 @@ mod ident; mod imports; mod late; mod macros; +pub mod rustdoc; enum Weak { Yes, @@ -138,17 +139,17 @@ enum ScopeSet<'a> { /// This struct is currently used only for early resolution (imports and macros), /// but not for late resolution yet. #[derive(Clone, Copy, Debug)] -pub struct ParentScope<'a> { - pub module: Module<'a>, +struct ParentScope<'a> { + module: Module<'a>, expansion: LocalExpnId, - pub macro_rules: MacroRulesScopeRef<'a>, + macro_rules: MacroRulesScopeRef<'a>, derives: &'a [ast::Path], } impl<'a> ParentScope<'a> { /// Creates a parent scope with the passed argument used as the module scope component, /// and other scope components set to default empty values. - pub fn module(module: Module<'a>, resolver: &Resolver<'a>) -> ParentScope<'a> { + fn module(module: Module<'a>, resolver: &Resolver<'a>) -> ParentScope<'a> { ParentScope { module, expansion: LocalExpnId::ROOT, @@ -1046,6 +1047,8 @@ pub struct Resolver<'a> { lifetime_elision_allowed: FxHashSet, effective_visibilities: EffectiveVisibilities, + doc_link_resolutions: FxHashMap, + doc_link_traits_in_scope: FxHashMap>, } /// Nothing really interesting here; it just provides memory for the rest of the crate. @@ -1374,6 +1377,8 @@ impl<'a> Resolver<'a> { confused_type_with_std_module: Default::default(), lifetime_elision_allowed: Default::default(), effective_visibilities: Default::default(), + doc_link_resolutions: Default::default(), + doc_link_traits_in_scope: Default::default(), }; let root_parent_scope = ParentScope::module(graph_root, &resolver); @@ -1450,6 +1455,8 @@ impl<'a> Resolver<'a> { proc_macros, confused_type_with_std_module, registered_tools: self.registered_tools, + doc_link_resolutions: self.doc_link_resolutions, + doc_link_traits_in_scope: self.doc_link_traits_in_scope, }; let ast_lowering = ty::ResolverAstLowering { legacy_const_generic_args: self.legacy_const_generic_args, @@ -1494,6 +1501,8 @@ impl<'a> Resolver<'a> { confused_type_with_std_module: self.confused_type_with_std_module.clone(), registered_tools: self.registered_tools.clone(), effective_visibilities: self.effective_visibilities.clone(), + doc_link_resolutions: self.doc_link_resolutions.clone(), + doc_link_traits_in_scope: self.doc_link_traits_in_scope.clone(), }; let ast_lowering = ty::ResolverAstLowering { legacy_const_generic_args: self.legacy_const_generic_args.clone(), @@ -1575,7 +1584,7 @@ impl<'a> Resolver<'a> { }); } - pub fn traits_in_scope( + fn traits_in_scope( &mut self, current_trait: Option>, parent_scope: &ParentScope<'a>, @@ -1927,7 +1936,7 @@ impl<'a> Resolver<'a> { /// isn't something that can be returned because it can't be made to live that long, /// and also it's a private type. Fortunately rustdoc doesn't need to know the error, /// just that an error occurred. - pub fn resolve_rustdoc_path( + fn resolve_rustdoc_path( &mut self, path_str: &str, ns: Namespace, @@ -1959,16 +1968,6 @@ impl<'a> Resolver<'a> { } } - /// For rustdoc. - /// For local modules returns only reexports, for external modules returns all children. - pub fn module_children_or_reexports(&self, def_id: DefId) -> Vec { - if let Some(def_id) = def_id.as_local() { - self.reexport_map.get(&def_id).cloned().unwrap_or_default() - } else { - self.cstore().module_children_untracked(def_id, self.session).collect() - } - } - /// For rustdoc. pub fn macro_rules_scope(&self, def_id: LocalDefId) -> (MacroRulesScopeRef<'a>, Res) { let scope = *self.macro_rules_scopes.get(&def_id).expect("not a `macro_rules` item"); @@ -1978,11 +1977,6 @@ impl<'a> Resolver<'a> { } } - /// For rustdoc. - pub fn get_partial_res(&self, node_id: NodeId) -> Option { - self.partial_res_map.get(&node_id).copied() - } - /// Retrieves the span of the given `DefId` if `DefId` is in the local crate. #[inline] pub fn opt_span(&self, def_id: DefId) -> Option { diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index b5b1602c5e0d3..0c2e8be049884 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -568,7 +568,7 @@ impl<'a> Resolver<'a> { Ok((ext, res)) } - pub fn resolve_macro_path( + pub(crate) fn resolve_macro_path( &mut self, path: &ast::Path, kind: Option, diff --git a/compiler/rustc_resolve/src/rustdoc.rs b/compiler/rustc_resolve/src/rustdoc.rs new file mode 100644 index 0000000000000..86172afb87693 --- /dev/null +++ b/compiler/rustc_resolve/src/rustdoc.rs @@ -0,0 +1,361 @@ +use pulldown_cmark::{BrokenLink, Event, Options, Parser, Tag}; +use rustc_ast as ast; +use rustc_ast::util::comments::beautify_doc_string; +use rustc_data_structures::fx::FxHashMap; +use rustc_span::def_id::DefId; +use rustc_span::symbol::{kw, Symbol}; +use rustc_span::Span; +use std::cell::RefCell; +use std::{cmp, mem}; + +#[derive(Clone, Copy, PartialEq, Eq, Debug)] +pub enum DocFragmentKind { + /// A doc fragment created from a `///` or `//!` doc comment. + SugaredDoc, + /// A doc fragment created from a "raw" `#[doc=""]` attribute. + RawDoc, +} + +/// A portion of documentation, extracted from a `#[doc]` attribute. +/// +/// Each variant contains the line number within the complete doc-comment where the fragment +/// starts, as well as the Span where the corresponding doc comment or attribute is located. +/// +/// Included files are kept separate from inline doc comments so that proper line-number +/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are +/// kept separate because of issue #42760. +#[derive(Clone, PartialEq, Eq, Debug)] +pub struct DocFragment { + pub span: Span, + /// The module this doc-comment came from. + /// + /// This allows distinguishing between the original documentation and a pub re-export. + /// If it is `None`, the item was not re-exported. + pub parent_module: Option, + pub doc: Symbol, + pub kind: DocFragmentKind, + pub indent: usize, +} + +#[derive(Clone, Copy, Debug)] +pub enum MalformedGenerics { + /// This link has unbalanced angle brackets. + /// + /// For example, `Vec>`. + UnbalancedAngleBrackets, + /// The generics are not attached to a type. + /// + /// For example, `` should trigger this. + /// + /// This is detected by checking if the path is empty after the generics are stripped. + MissingType, + /// The link uses fully-qualified syntax, which is currently unsupported. + /// + /// For example, `::into_iter` should trigger this. + /// + /// This is detected by checking if ` as ` (the keyword `as` with spaces around it) is inside + /// angle brackets. + HasFullyQualifiedSyntax, + /// The link has an invalid path separator. + /// + /// For example, `Vec::new()` should trigger this. Note that `Vec:new()` will **not** + /// trigger this because it has no generics and thus [`strip_generics_from_path`] will not be + /// called. + /// + /// Note that this will also **not** be triggered if the invalid path separator is inside angle + /// brackets because rustdoc mostly ignores what's inside angle brackets (except for + /// [`HasFullyQualifiedSyntax`](MalformedGenerics::HasFullyQualifiedSyntax)). + /// + /// This is detected by checking if there is a colon followed by a non-colon in the link. + InvalidPathSeparator, + /// The link has too many angle brackets. + /// + /// For example, `Vec<>` should trigger this. + TooManyAngleBrackets, + /// The link has empty angle brackets. + /// + /// For example, `Vec<>` should trigger this. + EmptyAngleBrackets, +} + +/// Removes excess indentation on comments in order for the Markdown +/// to be parsed correctly. This is necessary because the convention for +/// writing documentation is to provide a space between the /// or //! marker +/// and the doc text, but Markdown is whitespace-sensitive. For example, +/// a block of text with four-space indentation is parsed as a code block, +/// so if we didn't unindent comments, these list items +/// +/// /// A list: +/// /// +/// /// - Foo +/// /// - Bar +/// +/// would be parsed as if they were in a code block, which is likely not what the user intended. +pub fn unindent_doc_fragments(docs: &mut [DocFragment]) { + // `add` is used in case the most common sugared doc syntax is used ("/// "). The other + // fragments kind's lines are never starting with a whitespace unless they are using some + // markdown formatting requiring it. Therefore, if the doc block have a mix between the two, + // we need to take into account the fact that the minimum indent minus one (to take this + // whitespace into account). + // + // For example: + // + // /// hello! + // #[doc = "another"] + // + // In this case, you want "hello! another" and not "hello! another". + let add = if docs.windows(2).any(|arr| arr[0].kind != arr[1].kind) + && docs.iter().any(|d| d.kind == DocFragmentKind::SugaredDoc) + { + // In case we have a mix of sugared doc comments and "raw" ones, we want the sugared one to + // "decide" how much the minimum indent will be. + 1 + } else { + 0 + }; + + // `min_indent` is used to know how much whitespaces from the start of each lines must be + // removed. Example: + // + // /// hello! + // #[doc = "another"] + // + // In here, the `min_indent` is 1 (because non-sugared fragment are always counted with minimum + // 1 whitespace), meaning that "hello!" will be considered a codeblock because it starts with 4 + // (5 - 1) whitespaces. + let Some(min_indent) = docs + .iter() + .map(|fragment| { + fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| { + if line.chars().all(|c| c.is_whitespace()) { + min_indent + } else { + // Compare against either space or tab, ignoring whether they are + // mixed or not. + let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count(); + cmp::min(min_indent, whitespace) + + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add } + } + }) + }) + .min() + else { + return; + }; + + for fragment in docs { + if fragment.doc == kw::Empty { + continue; + } + + let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 { + min_indent - add + } else { + min_indent + }; + + fragment.indent = min_indent; + } +} + +/// The goal of this function is to apply the `DocFragment` transformation that is required when +/// transforming into the final Markdown, which is applying the computed indent to each line in +/// each doc fragment (a `DocFragment` can contain multiple lines in case of `#[doc = ""]`). +/// +/// Note: remove the trailing newline where appropriate +pub fn add_doc_fragment(out: &mut String, frag: &DocFragment) { + let s = frag.doc.as_str(); + let mut iter = s.lines(); + if s.is_empty() { + out.push('\n'); + return; + } + while let Some(line) = iter.next() { + if line.chars().any(|c| !c.is_whitespace()) { + assert!(line.len() >= frag.indent); + out.push_str(&line[frag.indent..]); + } else { + out.push_str(line); + } + out.push('\n'); + } +} + +pub fn attrs_to_doc_fragments<'a>( + attrs: impl Iterator)>, + doc_only: bool, +) -> (Vec, ast::AttrVec) { + let mut doc_fragments = Vec::new(); + let mut other_attrs = ast::AttrVec::new(); + for (attr, parent_module) in attrs { + if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() { + let doc = beautify_doc_string(doc_str, comment_kind); + let kind = if attr.is_doc_comment() { + DocFragmentKind::SugaredDoc + } else { + DocFragmentKind::RawDoc + }; + let fragment = DocFragment { span: attr.span, doc, kind, parent_module, indent: 0 }; + doc_fragments.push(fragment); + } else if !doc_only { + other_attrs.push(attr.clone()); + } + } + + unindent_doc_fragments(&mut doc_fragments); + + (doc_fragments, other_attrs) +} + +/// Return the doc-comments on this item, grouped by the module they came from. +/// The module can be different if this is a re-export with added documentation. +/// +/// The last newline is not trimmed so the produced strings are reusable between +/// early and late doc link resolution regardless of their position. +pub fn prepare_to_doc_link_resolution( + doc_fragments: &[DocFragment], +) -> FxHashMap, String> { + let mut res = FxHashMap::default(); + for fragment in doc_fragments { + let out_str = res.entry(fragment.parent_module).or_default(); + add_doc_fragment(out_str, fragment); + } + res +} + +/// Options for rendering Markdown in the main body of documentation. +pub fn main_body_opts() -> Options { + Options::ENABLE_TABLES + | Options::ENABLE_FOOTNOTES + | Options::ENABLE_STRIKETHROUGH + | Options::ENABLE_TASKLISTS + | Options::ENABLE_SMART_PUNCTUATION +} + +fn strip_generics_from_path_segment(segment: Vec) -> Result { + let mut stripped_segment = String::new(); + let mut param_depth = 0; + + let mut latest_generics_chunk = String::new(); + + for c in segment { + if c == '<' { + param_depth += 1; + latest_generics_chunk.clear(); + } else if c == '>' { + param_depth -= 1; + if latest_generics_chunk.contains(" as ") { + // The segment tries to use fully-qualified syntax, which is currently unsupported. + // Give a helpful error message instead of completely ignoring the angle brackets. + return Err(MalformedGenerics::HasFullyQualifiedSyntax); + } + } else { + if param_depth == 0 { + stripped_segment.push(c); + } else { + latest_generics_chunk.push(c); + } + } + } + + if param_depth == 0 { + Ok(stripped_segment) + } else { + // The segment has unbalanced angle brackets, e.g. `Vec>` + Err(MalformedGenerics::UnbalancedAngleBrackets) + } +} + +pub fn strip_generics_from_path(path_str: &str) -> Result { + if !path_str.contains(['<', '>']) { + return Ok(path_str.to_string()); + } + let mut stripped_segments = vec![]; + let mut path = path_str.chars().peekable(); + let mut segment = Vec::new(); + + while let Some(chr) = path.next() { + match chr { + ':' => { + if path.next_if_eq(&':').is_some() { + let stripped_segment = + strip_generics_from_path_segment(mem::take(&mut segment))?; + if !stripped_segment.is_empty() { + stripped_segments.push(stripped_segment); + } + } else { + return Err(MalformedGenerics::InvalidPathSeparator); + } + } + '<' => { + segment.push(chr); + + match path.next() { + Some('<') => { + return Err(MalformedGenerics::TooManyAngleBrackets); + } + Some('>') => { + return Err(MalformedGenerics::EmptyAngleBrackets); + } + Some(chr) => { + segment.push(chr); + + while let Some(chr) = path.next_if(|c| *c != '>') { + segment.push(chr); + } + } + None => break, + } + } + _ => segment.push(chr), + } + trace!("raw segment: {:?}", segment); + } + + if !segment.is_empty() { + let stripped_segment = strip_generics_from_path_segment(segment)?; + if !stripped_segment.is_empty() { + stripped_segments.push(stripped_segment); + } + } + + debug!("path_str: {:?}\nstripped segments: {:?}", path_str, &stripped_segments); + + let stripped_path = stripped_segments.join("::"); + + if !stripped_path.is_empty() { Ok(stripped_path) } else { Err(MalformedGenerics::MissingType) } +} + +/// Simplified version of the corresponding function in rustdoc. +/// If the rustdoc version returns a successful result, this function must return the same result. +/// Otherwise this function may return anything. +fn preprocess_link(link: &str) -> String { + let link = link.replace('`', ""); + let link = link.split('#').next().unwrap(); + let link = link.rsplit('@').next().unwrap(); + let link = link.strip_suffix("()").unwrap_or(link); + let link = link.strip_suffix("{}").unwrap_or(link); + let link = link.strip_suffix("[]").unwrap_or(link); + let link = if link != "!" { link.strip_suffix("!").unwrap_or(link) } else { link }; + strip_generics_from_path(link).unwrap_or_else(|_| link.to_string()) +} + +/// Simplified version of `preprocessed_markdown_links` from rustdoc. +/// Must return at least the same links as it, but may add some more links on top of that. +pub(crate) fn attrs_to_preprocessed_links(attrs: &[ast::Attribute]) -> Vec { + let (doc_fragments, _) = attrs_to_doc_fragments(attrs.iter().map(|attr| (attr, None)), true); + let doc = prepare_to_doc_link_resolution(&doc_fragments).into_values().next().unwrap(); + + let links = RefCell::new(Vec::new()); + let mut callback = |link: BrokenLink<'_>| { + links.borrow_mut().push(preprocess_link(&link.reference)); + None + }; + for event in Parser::new_with_broken_link_callback(&doc, main_body_opts(), Some(&mut callback)) + { + if let Event::Start(Tag::Link(_, dest, _)) = event { + links.borrow_mut().push(preprocess_link(&dest)); + } + } + links.into_inner() +} diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml index 0271c27b4f522..5e592227d49c5 100644 --- a/src/librustdoc/Cargo.toml +++ b/src/librustdoc/Cargo.toml @@ -12,7 +12,6 @@ askama = { version = "0.11", default-features = false, features = ["config"] } itertools = "0.10.1" minifier = "0.2.2" once_cell = "1.10.0" -pulldown-cmark = { version = "0.9.2", default-features = false } regex = "1" rustdoc-json-types = { path = "../rustdoc-json-types" } serde_json = "1.0" diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 85dd3881593ac..de350af515948 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -5,12 +5,11 @@ use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; use std::sync::OnceLock as OnceCell; -use std::{cmp, fmt, iter}; +use std::{fmt, iter}; use arrayvec::ArrayVec; use thin_vec::ThinVec; -use rustc_ast::util::comments::beautify_doc_string; use rustc_ast::{self as ast, AttrStyle}; use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel}; use rustc_const_eval::const_eval::is_unstable_const_fn; @@ -24,6 +23,7 @@ use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety; use rustc_index::vec::IndexVec; use rustc_middle::ty::fast_reject::SimplifiedType; use rustc_middle::ty::{self, DefIdTree, TyCtxt, Visibility}; +use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, DocFragment}; use rustc_session::Session; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Ident, Symbol}; @@ -1010,58 +1010,6 @@ impl> NestedAttributesExt for I { } } -/// A portion of documentation, extracted from a `#[doc]` attribute. -/// -/// Each variant contains the line number within the complete doc-comment where the fragment -/// starts, as well as the Span where the corresponding doc comment or attribute is located. -/// -/// Included files are kept separate from inline doc comments so that proper line-number -/// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are -/// kept separate because of issue #42760. -#[derive(Clone, PartialEq, Eq, Debug)] -pub(crate) struct DocFragment { - pub(crate) span: rustc_span::Span, - /// The module this doc-comment came from. - /// - /// This allows distinguishing between the original documentation and a pub re-export. - /// If it is `None`, the item was not re-exported. - pub(crate) parent_module: Option, - pub(crate) doc: Symbol, - pub(crate) kind: DocFragmentKind, - pub(crate) indent: usize, -} - -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub(crate) enum DocFragmentKind { - /// A doc fragment created from a `///` or `//!` doc comment. - SugaredDoc, - /// A doc fragment created from a "raw" `#[doc=""]` attribute. - RawDoc, -} - -/// The goal of this function is to apply the `DocFragment` transformation that is required when -/// transforming into the final Markdown, which is applying the computed indent to each line in -/// each doc fragment (a `DocFragment` can contain multiple lines in case of `#[doc = ""]`). -/// -/// Note: remove the trailing newline where appropriate -fn add_doc_fragment(out: &mut String, frag: &DocFragment) { - let s = frag.doc.as_str(); - let mut iter = s.lines(); - if s.is_empty() { - out.push('\n'); - return; - } - while let Some(line) = iter.next() { - if line.chars().any(|c| !c.is_whitespace()) { - assert!(line.len() >= frag.indent); - out.push_str(&line[frag.indent..]); - } else { - out.push_str(line); - } - out.push('\n'); - } -} - /// Collapse a collection of [`DocFragment`]s into one string, /// handling indentation and newlines as needed. pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String { @@ -1073,86 +1021,6 @@ pub(crate) fn collapse_doc_fragments(doc_strings: &[DocFragment]) -> String { acc } -/// Removes excess indentation on comments in order for the Markdown -/// to be parsed correctly. This is necessary because the convention for -/// writing documentation is to provide a space between the /// or //! marker -/// and the doc text, but Markdown is whitespace-sensitive. For example, -/// a block of text with four-space indentation is parsed as a code block, -/// so if we didn't unindent comments, these list items -/// -/// /// A list: -/// /// -/// /// - Foo -/// /// - Bar -/// -/// would be parsed as if they were in a code block, which is likely not what the user intended. -fn unindent_doc_fragments(docs: &mut Vec) { - // `add` is used in case the most common sugared doc syntax is used ("/// "). The other - // fragments kind's lines are never starting with a whitespace unless they are using some - // markdown formatting requiring it. Therefore, if the doc block have a mix between the two, - // we need to take into account the fact that the minimum indent minus one (to take this - // whitespace into account). - // - // For example: - // - // /// hello! - // #[doc = "another"] - // - // In this case, you want "hello! another" and not "hello! another". - let add = if docs.windows(2).any(|arr| arr[0].kind != arr[1].kind) - && docs.iter().any(|d| d.kind == DocFragmentKind::SugaredDoc) - { - // In case we have a mix of sugared doc comments and "raw" ones, we want the sugared one to - // "decide" how much the minimum indent will be. - 1 - } else { - 0 - }; - - // `min_indent` is used to know how much whitespaces from the start of each lines must be - // removed. Example: - // - // /// hello! - // #[doc = "another"] - // - // In here, the `min_indent` is 1 (because non-sugared fragment are always counted with minimum - // 1 whitespace), meaning that "hello!" will be considered a codeblock because it starts with 4 - // (5 - 1) whitespaces. - let Some(min_indent) = docs - .iter() - .map(|fragment| { - fragment.doc.as_str().lines().fold(usize::MAX, |min_indent, line| { - if line.chars().all(|c| c.is_whitespace()) { - min_indent - } else { - // Compare against either space or tab, ignoring whether they are - // mixed or not. - let whitespace = line.chars().take_while(|c| *c == ' ' || *c == '\t').count(); - cmp::min(min_indent, whitespace) - + if fragment.kind == DocFragmentKind::SugaredDoc { 0 } else { add } - } - }) - }) - .min() - else { - return; - }; - - for fragment in docs { - if fragment.doc == kw::Empty { - continue; - } - - let min_indent = if fragment.kind != DocFragmentKind::SugaredDoc && min_indent > 0 { - min_indent - add - } else { - min_indent - }; - - fragment.indent = min_indent; - } -} - /// A link that has not yet been rendered. /// /// This link will be turned into a rendered link by [`Item::links`]. @@ -1231,26 +1099,7 @@ impl Attributes { attrs: impl Iterator)>, doc_only: bool, ) -> Attributes { - let mut doc_strings = Vec::new(); - let mut other_attrs = ast::AttrVec::new(); - for (attr, parent_module) in attrs { - if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() { - trace!("got doc_str={doc_str:?}"); - let doc = beautify_doc_string(doc_str, comment_kind); - let kind = if attr.is_doc_comment() { - DocFragmentKind::SugaredDoc - } else { - DocFragmentKind::RawDoc - }; - let fragment = DocFragment { span: attr.span, doc, kind, parent_module, indent: 0 }; - doc_strings.push(fragment); - } else if !doc_only { - other_attrs.push(attr.clone()); - } - } - - unindent_doc_fragments(&mut doc_strings); - + let (doc_strings, other_attrs) = attrs_to_doc_fragments(attrs, doc_only); Attributes { doc_strings, other_attrs } } @@ -1269,20 +1118,6 @@ impl Attributes { if out.is_empty() { None } else { Some(out) } } - /// Return the doc-comments on this item, grouped by the module they came from. - /// The module can be different if this is a re-export with added documentation. - /// - /// The last newline is not trimmed so the produced strings are reusable between - /// early and late doc link resolution regardless of their position. - pub(crate) fn prepare_to_doc_link_resolution(&self) -> FxHashMap, String> { - let mut res = FxHashMap::default(); - for fragment in &self.doc_strings { - let out_str = res.entry(fragment.parent_module).or_default(); - add_doc_fragment(out_str, fragment); - } - res - } - /// Finds all `doc` attributes as NameValues and returns their corresponding values, joined /// with newlines. pub(crate) fn collapsed_doc_value(&self) -> Option { diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs index 71eddf4348f1e..20627c2cfc164 100644 --- a/src/librustdoc/clean/types/tests.rs +++ b/src/librustdoc/clean/types/tests.rs @@ -2,6 +2,7 @@ use super::*; use crate::clean::collapse_doc_fragments; +use rustc_resolve::rustdoc::{unindent_doc_fragments, DocFragment, DocFragmentKind}; use rustc_span::create_default_session_globals_then; use rustc_span::source_map::DUMMY_SP; use rustc_span::symbol::Symbol; diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 0ce43f7db8e8b..e8316ac976b5f 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -5,14 +5,13 @@ use rustc_data_structures::unord::UnordSet; use rustc_errors::emitter::{Emitter, EmitterWriter}; use rustc_errors::json::JsonEmitter; use rustc_feature::UnstableFeatures; -use rustc_hir::def::{Namespace, Res}; +use rustc_hir::def::Res; use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LocalDefId}; use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::{HirId, Path, TraitCandidate}; +use rustc_hir::{HirId, Path}; use rustc_interface::interface; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{ParamEnv, Ty, TyCtxt}; -use rustc_resolve as resolve; use rustc_session::config::{self, CrateType, ErrorOutputType}; use rustc_session::lint; use rustc_session::Session; @@ -35,10 +34,6 @@ pub(crate) use rustc_session::config::{Input, Options, UnstableOptions}; pub(crate) struct ResolverCaches { pub(crate) markdown_links: Option>>, - pub(crate) doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option>>, - /// Traits in scope for a given module. - /// See `collect_intra_doc_links::traits_implemented_by` for more details. - pub(crate) traits_in_scope: DefIdMap>, pub(crate) all_trait_impls: Option>, pub(crate) all_macro_rules: FxHashMap>, pub(crate) extern_doc_reachable: DefIdSet, @@ -46,12 +41,6 @@ pub(crate) struct ResolverCaches { pub(crate) struct DocContext<'tcx> { pub(crate) tcx: TyCtxt<'tcx>, - /// Name resolver. Used for intra-doc links. - /// - /// The `Rc>` wrapping is needed because that is what's returned by - /// [`rustc_interface::Queries::expansion()`]. - // FIXME: see if we can get rid of this RefCell somehow - pub(crate) resolver: Rc>, pub(crate) resolver_caches: ResolverCaches, /// Used for normalization. /// @@ -100,13 +89,6 @@ impl<'tcx> DocContext<'tcx> { ret } - pub(crate) fn enter_resolver(&self, f: F) -> R - where - F: FnOnce(&mut resolve::Resolver<'_>) -> R, - { - self.resolver.borrow_mut().access(f) - } - /// Call the closure with the given parameters set as /// the substitutions for a type alias' RHS. pub(crate) fn enter_alias(&mut self, substs: DefIdMap, f: F) -> R @@ -313,7 +295,6 @@ pub(crate) fn create_config( pub(crate) fn run_global_ctxt( tcx: TyCtxt<'_>, - resolver: Rc>, resolver_caches: ResolverCaches, show_coverage: bool, render_options: RenderOptions, @@ -348,7 +329,6 @@ pub(crate) fn run_global_ctxt( let mut ctxt = DocContext { tcx, - resolver, resolver_caches, param_env: ParamEnv::empty(), external_traits: Default::default(), diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 94de93e7a9916..dee0a01a65413 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -28,6 +28,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; use rustc_middle::ty::TyCtxt; +pub(crate) use rustc_resolve::rustdoc::main_body_opts; use rustc_span::edition::Edition; use rustc_span::{Span, Symbol}; @@ -58,15 +59,6 @@ mod tests; const MAX_HEADER_LEVEL: u32 = 6; -/// Options for rendering Markdown in the main body of documentation. -pub(crate) fn main_body_opts() -> Options { - Options::ENABLE_TABLES - | Options::ENABLE_FOOTNOTES - | Options::ENABLE_STRIKETHROUGH - | Options::ENABLE_TASKLISTS - | Options::ENABLE_SMART_PUNCTUATION -} - /// Options for rendering Markdown in summaries (e.g., in search results). pub(crate) fn summary_opts() -> Options { Options::ENABLE_TABLES diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 64108c8828518..0d4a87eb4fe3a 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -31,6 +31,7 @@ extern crate tracing; // // Dependencies listed in Cargo.toml do not need `extern crate`. +extern crate pulldown_cmark; extern crate rustc_ast; extern crate rustc_ast_pretty; extern crate rustc_attr; @@ -792,22 +793,13 @@ fn main_args(at_args: &[String]) -> MainResult { } compiler.enter(|queries| { - // We need to hold on to the complete resolver, so we cause everything to be - // cloned for the analysis passes to use. Suboptimal, but necessary in the - // current architecture. - // FIXME(#83761): Resolver cloning can lead to inconsistencies between data in the - // two copies because one of the copies can be modified after `TyCtxt` construction. - let (resolver, resolver_caches) = { + let resolver_caches = { let expansion = abort_on_err(queries.expansion(), sess); let (krate, resolver, _) = &*expansion.borrow(); let resolver_caches = resolver.borrow_mut().access(|resolver| { - collect_intra_doc_links::early_resolve_intra_doc_links( - resolver, - krate, - render_options.document_private, - ) + collect_intra_doc_links::early_resolve_intra_doc_links(resolver, krate) }); - (resolver.clone(), resolver_caches) + resolver_caches }; if sess.diagnostic().has_errors_or_lint_errors().is_some() { @@ -820,7 +812,6 @@ fn main_args(at_args: &[String]) -> MainResult { let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || { core::run_global_ctxt( tcx, - resolver, resolver_caches, show_coverage, render_options, diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 8435972bb11f2..692adcf0a8091 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -15,7 +15,8 @@ use rustc_hir::def_id::{DefId, CRATE_DEF_ID}; use rustc_hir::Mutability; use rustc_middle::ty::{DefIdTree, Ty, TyCtxt}; use rustc_middle::{bug, ty}; -use rustc_resolve::ParentScope; +use rustc_resolve::rustdoc::MalformedGenerics; +use rustc_resolve::rustdoc::{prepare_to_doc_link_resolution, strip_generics_from_path}; use rustc_session::lint::Lint; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{sym, Ident, Symbol}; @@ -23,7 +24,6 @@ use rustc_span::BytePos; use smallvec::{smallvec, SmallVec}; use std::borrow::Cow; -use std::mem; use std::ops::Range; use crate::clean::{self, utils::find_nearest_parent_module}; @@ -179,47 +179,6 @@ enum ResolutionFailure<'a> { NotResolved(UnresolvedPath<'a>), } -#[derive(Clone, Copy, Debug)] -enum MalformedGenerics { - /// This link has unbalanced angle brackets. - /// - /// For example, `Vec>`. - UnbalancedAngleBrackets, - /// The generics are not attached to a type. - /// - /// For example, `` should trigger this. - /// - /// This is detected by checking if the path is empty after the generics are stripped. - MissingType, - /// The link uses fully-qualified syntax, which is currently unsupported. - /// - /// For example, `::into_iter` should trigger this. - /// - /// This is detected by checking if ` as ` (the keyword `as` with spaces around it) is inside - /// angle brackets. - HasFullyQualifiedSyntax, - /// The link has an invalid path separator. - /// - /// For example, `Vec::new()` should trigger this. Note that `Vec:new()` will **not** - /// trigger this because it has no generics and thus [`strip_generics_from_path`] will not be - /// called. - /// - /// Note that this will also **not** be triggered if the invalid path separator is inside angle - /// brackets because rustdoc mostly ignores what's inside angle brackets (except for - /// [`HasFullyQualifiedSyntax`](MalformedGenerics::HasFullyQualifiedSyntax)). - /// - /// This is detected by checking if there is a colon followed by a non-colon in the link. - InvalidPathSeparator, - /// The link has too many angle brackets. - /// - /// For example, `Vec<>` should trigger this. - TooManyAngleBrackets, - /// The link has empty angle brackets. - /// - /// For example, `Vec<>` should trigger this. - EmptyAngleBrackets, -} - #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub(crate) enum UrlFragment { Item(DefId), @@ -407,10 +366,10 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { }) } - /// Convenience wrapper around `resolve_rustdoc_path`. + /// Convenience wrapper around `doc_link_resolutions`. /// /// This also handles resolving `true` and `false` as booleans. - /// NOTE: `resolve_rustdoc_path` knows only about paths, not about types. + /// NOTE: `doc_link_resolutions` knows only about paths, not about types. /// Associated items will never be resolved by this function. fn resolve_path( &self, @@ -426,17 +385,11 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { // Resolver doesn't know about true, false, and types that aren't paths (e.g. `()`). let result = self .cx - .resolver_caches - .doc_link_resolutions - .get(&(Symbol::intern(path_str), ns, module_id)) + .tcx + .doc_link_resolutions(module_id) + .get(&(Symbol::intern(path_str), ns)) .copied() - .unwrap_or_else(|| { - self.cx.enter_resolver(|resolver| { - let parent_scope = - ParentScope::module(resolver.expect_module(module_id), resolver); - resolver.resolve_rustdoc_path(path_str, ns, parent_scope) - }) - }) + .unwrap_or_else(|| panic!("no resolution for {:?} {:?} {:?}", path_str, ns, module_id)) .and_then(|res| res.try_into().ok()) .or_else(|| resolve_primitive(path_str, ns)); debug!("{} resolved to {:?} in namespace {:?}", path_str, result, ns); @@ -779,8 +732,7 @@ fn trait_impls_for<'a>( module: DefId, ) -> FxHashSet<(DefId, DefId)> { let tcx = cx.tcx; - let iter = cx.resolver_caches.traits_in_scope[&module].iter().flat_map(|trait_candidate| { - let trait_ = trait_candidate.def_id; + let iter = tcx.doc_link_traits_in_scope(module).iter().flat_map(|&trait_| { trace!("considering explicit impl for trait {:?}", trait_); // Look at each trait implementation to see if it's an impl for `did` @@ -846,7 +798,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> { // In the presence of re-exports, this is not the same as the module of the item. // Rather than merging all documentation into one, resolve it one attribute at a time // so we know which module it came from. - for (parent_module, doc) in item.attrs.prepare_to_doc_link_resolution() { + for (parent_module, doc) in prepare_to_doc_link_resolution(&item.attrs.doc_strings) { if !may_have_doc_links(&doc) { continue; } @@ -975,16 +927,12 @@ fn preprocess_link( } // Strip generics from the path. - let path_str = if path_str.contains(['<', '>'].as_slice()) { - match strip_generics_from_path(path_str) { - Ok(path) => path, - Err(err) => { - debug!("link has malformed generics: {}", path_str); - return Some(Err(PreprocessingError::MalformedGenerics(err, path_str.to_owned()))); - } + let path_str = match strip_generics_from_path(path_str) { + Ok(path) => path, + Err(err) => { + debug!("link has malformed generics: {}", path_str); + return Some(Err(PreprocessingError::MalformedGenerics(err, path_str.to_owned()))); } - } else { - path_str.to_owned() }; // Sanity check to make sure we don't have any angle brackets after stripping generics. @@ -2064,94 +2012,3 @@ fn resolve_primitive(path_str: &str, ns: Namespace) -> Option { debug!("resolved primitives {:?}", prim); Some(Res::Primitive(prim)) } - -fn strip_generics_from_path(path_str: &str) -> Result { - let mut stripped_segments = vec![]; - let mut path = path_str.chars().peekable(); - let mut segment = Vec::new(); - - while let Some(chr) = path.next() { - match chr { - ':' => { - if path.next_if_eq(&':').is_some() { - let stripped_segment = - strip_generics_from_path_segment(mem::take(&mut segment))?; - if !stripped_segment.is_empty() { - stripped_segments.push(stripped_segment); - } - } else { - return Err(MalformedGenerics::InvalidPathSeparator); - } - } - '<' => { - segment.push(chr); - - match path.next() { - Some('<') => { - return Err(MalformedGenerics::TooManyAngleBrackets); - } - Some('>') => { - return Err(MalformedGenerics::EmptyAngleBrackets); - } - Some(chr) => { - segment.push(chr); - - while let Some(chr) = path.next_if(|c| *c != '>') { - segment.push(chr); - } - } - None => break, - } - } - _ => segment.push(chr), - } - trace!("raw segment: {:?}", segment); - } - - if !segment.is_empty() { - let stripped_segment = strip_generics_from_path_segment(segment)?; - if !stripped_segment.is_empty() { - stripped_segments.push(stripped_segment); - } - } - - debug!("path_str: {:?}\nstripped segments: {:?}", path_str, &stripped_segments); - - let stripped_path = stripped_segments.join("::"); - - if !stripped_path.is_empty() { Ok(stripped_path) } else { Err(MalformedGenerics::MissingType) } -} - -fn strip_generics_from_path_segment(segment: Vec) -> Result { - let mut stripped_segment = String::new(); - let mut param_depth = 0; - - let mut latest_generics_chunk = String::new(); - - for c in segment { - if c == '<' { - param_depth += 1; - latest_generics_chunk.clear(); - } else if c == '>' { - param_depth -= 1; - if latest_generics_chunk.contains(" as ") { - // The segment tries to use fully-qualified syntax, which is currently unsupported. - // Give a helpful error message instead of completely ignoring the angle brackets. - return Err(MalformedGenerics::HasFullyQualifiedSyntax); - } - } else { - if param_depth == 0 { - stripped_segment.push(c); - } else { - latest_generics_chunk.push(c); - } - } - } - - if param_depth == 0 { - Ok(stripped_segment) - } else { - // The segment has unbalanced angle brackets, e.g. `Vec>` - Err(MalformedGenerics::UnbalancedAngleBrackets) - } -} diff --git a/src/librustdoc/passes/collect_intra_doc_links/early.rs b/src/librustdoc/passes/collect_intra_doc_links/early.rs index f690c49005d9c..75c3380ee9bb9 100644 --- a/src/librustdoc/passes/collect_intra_doc_links/early.rs +++ b/src/librustdoc/passes/collect_intra_doc_links/early.rs @@ -1,357 +1,56 @@ -use crate::clean::Attributes; use crate::core::ResolverCaches; -use crate::passes::collect_intra_doc_links::preprocessed_markdown_links; -use crate::passes::collect_intra_doc_links::{Disambiguator, PreprocessedMarkdownLink}; use crate::visit_lib::early_lib_embargo_visit_item; -use rustc_ast::visit::{self, AssocCtxt, Visitor}; +use rustc_ast::visit::{self, Visitor}; use rustc_ast::{self as ast, ItemKind}; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::def::Namespace::*; -use rustc_hir::def::{DefKind, Namespace, Res}; -use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, CRATE_DEF_ID}; -use rustc_hir::TraitCandidate; -use rustc_middle::ty::{DefIdTree, Visibility}; -use rustc_resolve::{ParentScope, Resolver}; -use rustc_span::symbol::sym; -use rustc_span::{Symbol, SyntaxContext}; - -use std::collections::hash_map::Entry; -use std::mem; +use rustc_hir::def::Res; +use rustc_hir::def_id::{DefId, DefIdSet}; +use rustc_resolve::Resolver; +use rustc_span::Symbol; pub(crate) fn early_resolve_intra_doc_links( resolver: &mut Resolver<'_>, krate: &ast::Crate, - document_private_items: bool, ) -> ResolverCaches { - let parent_scope = - ParentScope::module(resolver.expect_module(CRATE_DEF_ID.to_def_id()), resolver); let mut link_resolver = EarlyDocLinkResolver { resolver, - parent_scope, - visited_mods: Default::default(), - markdown_links: Default::default(), - doc_link_resolutions: Default::default(), - traits_in_scope: Default::default(), all_trait_impls: Default::default(), all_macro_rules: Default::default(), extern_doc_reachable: Default::default(), - local_doc_reachable: Default::default(), - document_private_items, }; - // Overridden `visit_item` below doesn't apply to the crate root, - // so we have to visit its attributes and reexports separately. - link_resolver.resolve_doc_links_local(&krate.attrs); - link_resolver.process_module_children_or_reexports(CRATE_DEF_ID.to_def_id()); visit::walk_crate(&mut link_resolver, krate); - - // FIXME: somehow rustdoc is still missing crates even though we loaded all - // the known necessary crates. Load them all unconditionally until we find a way to fix this. - // DO NOT REMOVE THIS without first testing on the reproducer in - // https://github.com/jyn514/objr/commit/edcee7b8124abf0e4c63873e8422ff81beb11ebb - for (extern_name, _) in - link_resolver.resolver.sess().opts.externs.iter().filter(|(_, entry)| entry.add_prelude) - { - link_resolver.resolver.resolve_rustdoc_path(extern_name, TypeNS, parent_scope); - } - link_resolver.process_extern_impls(); ResolverCaches { - markdown_links: Some(link_resolver.markdown_links), - doc_link_resolutions: link_resolver.doc_link_resolutions, - traits_in_scope: link_resolver.traits_in_scope, + markdown_links: Some(Default::default()), all_trait_impls: Some(link_resolver.all_trait_impls), all_macro_rules: link_resolver.all_macro_rules, extern_doc_reachable: link_resolver.extern_doc_reachable, } } -fn doc_attrs<'a>(attrs: impl Iterator) -> Attributes { - Attributes::from_ast_iter(attrs.map(|attr| (attr, None)), true) -} - struct EarlyDocLinkResolver<'r, 'ra> { resolver: &'r mut Resolver<'ra>, - parent_scope: ParentScope<'ra>, - visited_mods: DefIdSet, - markdown_links: FxHashMap>, - doc_link_resolutions: FxHashMap<(Symbol, Namespace, DefId), Option>>, - traits_in_scope: DefIdMap>, all_trait_impls: Vec, all_macro_rules: FxHashMap>, /// This set is used as a seed for `effective_visibilities`, which are then extended by some /// more items using `lib_embargo_visit_item` during doc inlining. extern_doc_reachable: DefIdSet, - /// This is an easily identifiable superset of items added to `effective_visibilities` - /// using `lib_embargo_visit_item` during doc inlining. - /// The union of `(extern,local)_doc_reachable` is therefore a superset of - /// `effective_visibilities` and can be used for pruning extern impls here - /// in early doc link resolution. - local_doc_reachable: DefIdSet, - document_private_items: bool, } impl<'ra> EarlyDocLinkResolver<'_, 'ra> { - fn add_traits_in_scope(&mut self, def_id: DefId) { - // Calls to `traits_in_scope` are expensive, so try to avoid them if only possible. - // Keys in the `traits_in_scope` cache are always module IDs. - if let Entry::Vacant(entry) = self.traits_in_scope.entry(def_id) { - let module = self.resolver.get_nearest_non_block_module(def_id); - let module_id = module.def_id(); - let entry = if module_id == def_id { - Some(entry) - } else if let Entry::Vacant(entry) = self.traits_in_scope.entry(module_id) { - Some(entry) - } else { - None - }; - if let Some(entry) = entry { - entry.insert(self.resolver.traits_in_scope( - None, - &ParentScope::module(module, self.resolver), - SyntaxContext::root(), - None, - )); - } - } - } - - fn is_doc_reachable(&self, def_id: DefId) -> bool { - self.extern_doc_reachable.contains(&def_id) || self.local_doc_reachable.contains(&def_id) - } - - /// Add traits in scope for links in impls collected by the `collect-intra-doc-links` pass. - /// That pass filters impls using type-based information, but we don't yet have such - /// information here, so we just conservatively calculate traits in scope for *all* modules - /// having impls in them. fn process_extern_impls(&mut self) { - // Resolving links in already existing crates may trigger loading of new crates. - let mut start_cnum = 0; - loop { - let crates = Vec::from_iter(self.resolver.cstore().crates_untracked()); - for cnum in &crates[start_cnum..] { - early_lib_embargo_visit_item( - self.resolver, - &mut self.extern_doc_reachable, - cnum.as_def_id(), - true, - ); - } - for &cnum in &crates[start_cnum..] { - let all_trait_impls = - Vec::from_iter(self.resolver.cstore().trait_impls_in_crate_untracked(cnum)); - let all_inherent_impls = - Vec::from_iter(self.resolver.cstore().inherent_impls_in_crate_untracked(cnum)); - let all_incoherent_impls = Vec::from_iter( - self.resolver.cstore().incoherent_impls_in_crate_untracked(cnum), - ); - - // Querying traits in scope is expensive so we try to prune the impl lists using - // privacy, private traits and impls from other crates are never documented in - // the current crate, and links in their doc comments are not resolved. - for &(trait_def_id, impl_def_id, simplified_self_ty) in &all_trait_impls { - if self.is_doc_reachable(trait_def_id) - && simplified_self_ty - .and_then(|ty| ty.def()) - .map_or(true, |ty_def_id| self.is_doc_reachable(ty_def_id)) - { - if self.visited_mods.insert(trait_def_id) { - self.resolve_doc_links_extern_impl(trait_def_id, false); - } - self.resolve_doc_links_extern_impl(impl_def_id, false); - } - self.all_trait_impls.push(impl_def_id); - } - for (ty_def_id, impl_def_id) in all_inherent_impls { - if self.is_doc_reachable(ty_def_id) { - self.resolve_doc_links_extern_impl(impl_def_id, true); - } - } - for impl_def_id in all_incoherent_impls { - self.resolve_doc_links_extern_impl(impl_def_id, true); - } - } - - if crates.len() > start_cnum { - start_cnum = crates.len(); - } else { - break; - } - } - } - - fn resolve_doc_links_extern_impl(&mut self, def_id: DefId, is_inherent: bool) { - self.resolve_doc_links_extern_outer_fixme(def_id, def_id); - let assoc_item_def_ids = Vec::from_iter( - self.resolver.cstore().associated_item_def_ids_untracked(def_id, self.resolver.sess()), - ); - for assoc_def_id in assoc_item_def_ids { - if !is_inherent || self.resolver.cstore().visibility_untracked(assoc_def_id).is_public() - { - self.resolve_doc_links_extern_outer_fixme(assoc_def_id, def_id); - } - } - } - - // FIXME: replace all uses with `resolve_doc_links_extern_outer` to actually resolve links, not - // just add traits in scope. This may be expensive and require benchmarking and optimization. - fn resolve_doc_links_extern_outer_fixme(&mut self, def_id: DefId, scope_id: DefId) { - if !self.resolver.cstore().may_have_doc_links_untracked(def_id) { - return; - } - if let Some(parent_id) = self.resolver.opt_parent(scope_id) { - self.add_traits_in_scope(parent_id); - } - } - - fn resolve_doc_links_extern_outer(&mut self, def_id: DefId, scope_id: DefId) { - if !self.resolver.cstore().may_have_doc_links_untracked(def_id) { - return; - } - let attrs = Vec::from_iter( - self.resolver.cstore().item_attrs_untracked(def_id, self.resolver.sess()), - ); - let parent_scope = ParentScope::module( - self.resolver.get_nearest_non_block_module( - self.resolver.opt_parent(scope_id).unwrap_or(scope_id), - ), - self.resolver, - ); - self.resolve_doc_links(doc_attrs(attrs.iter()), parent_scope); - } - - fn resolve_doc_links_extern_inner(&mut self, def_id: DefId) { - if !self.resolver.cstore().may_have_doc_links_untracked(def_id) { - return; - } - let attrs = Vec::from_iter( - self.resolver.cstore().item_attrs_untracked(def_id, self.resolver.sess()), - ); - let parent_scope = ParentScope::module(self.resolver.expect_module(def_id), self.resolver); - self.resolve_doc_links(doc_attrs(attrs.iter()), parent_scope); - } - - fn resolve_doc_links_local(&mut self, attrs: &[ast::Attribute]) { - if !attrs.iter().any(|attr| attr.may_have_doc_links()) { - return; - } - self.resolve_doc_links(doc_attrs(attrs.iter()), self.parent_scope); - } - - fn resolve_and_cache( - &mut self, - path_str: &str, - ns: Namespace, - parent_scope: &ParentScope<'ra>, - ) -> bool { - // FIXME: This caching may be incorrect in case of multiple `macro_rules` - // items with the same name in the same module. - self.doc_link_resolutions - .entry((Symbol::intern(path_str), ns, parent_scope.module.def_id())) - .or_insert_with_key(|(path, ns, _)| { - self.resolver.resolve_rustdoc_path(path.as_str(), *ns, *parent_scope) - }) - .is_some() - } - - fn resolve_doc_links(&mut self, attrs: Attributes, parent_scope: ParentScope<'ra>) { - let mut need_traits_in_scope = false; - for (doc_module, doc) in attrs.prepare_to_doc_link_resolution() { - assert_eq!(doc_module, None); - let mut tmp_links = mem::take(&mut self.markdown_links); - let links = - tmp_links.entry(doc).or_insert_with_key(|doc| preprocessed_markdown_links(doc)); - for PreprocessedMarkdownLink(pp_link, _) in links { - if let Ok(pinfo) = pp_link { - // The logic here is a conservative approximation for path resolution in - // `resolve_with_disambiguator`. - if let Some(ns) = pinfo.disambiguator.map(Disambiguator::ns) { - if self.resolve_and_cache(&pinfo.path_str, ns, &parent_scope) { - continue; - } - } - - // Resolve all namespaces due to no disambiguator or for diagnostics. - let mut any_resolved = false; - let mut need_assoc = false; - for ns in [TypeNS, ValueNS, MacroNS] { - if self.resolve_and_cache(&pinfo.path_str, ns, &parent_scope) { - any_resolved = true; - } else if ns != MacroNS { - need_assoc = true; - } - } - - // Resolve all prefixes for type-relative resolution or for diagnostics. - if need_assoc || !any_resolved { - let mut path = &pinfo.path_str[..]; - while let Some(idx) = path.rfind("::") { - path = &path[..idx]; - need_traits_in_scope = true; - for ns in [TypeNS, ValueNS, MacroNS] { - self.resolve_and_cache(path, ns, &parent_scope); - } - } - } - } - } - self.markdown_links = tmp_links; - } - - if need_traits_in_scope { - self.add_traits_in_scope(parent_scope.module.def_id()); - } - } - - /// When reexports are inlined, they are replaced with item which they refer to, those items - /// may have links in their doc comments, those links are resolved at the item definition site, - /// so we need to know traits in scope at that definition site. - fn process_module_children_or_reexports(&mut self, module_id: DefId) { - if !self.visited_mods.insert(module_id) { - return; // avoid infinite recursion - } - - for child in self.resolver.module_children_or_reexports(module_id) { - // This condition should give a superset of `denied` from `fn clean_use_statement`. - if child.vis.is_public() - || self.document_private_items - && child.vis != Visibility::Restricted(module_id) - && module_id.is_local() - { - if let Some(def_id) = child.res.opt_def_id() && !def_id.is_local() { - self.local_doc_reachable.insert(def_id); - let scope_id = match child.res { - Res::Def( - DefKind::Variant - | DefKind::AssocTy - | DefKind::AssocFn - | DefKind::AssocConst, - .., - ) => self.resolver.parent(def_id), - _ => def_id, - }; - self.resolve_doc_links_extern_outer(def_id, scope_id); // Outer attribute scope - if let Res::Def(DefKind::Mod, ..) = child.res { - self.resolve_doc_links_extern_inner(def_id); // Inner attribute scope - } - if let Res::Def(DefKind::Mod | DefKind::Enum | DefKind::Trait, ..) = child.res { - self.process_module_children_or_reexports(def_id); - } - if let Res::Def(DefKind::Struct | DefKind::Union | DefKind::Variant, _) = - child.res - { - let field_def_ids = Vec::from_iter( - self.resolver - .cstore() - .associated_item_def_ids_untracked(def_id, self.resolver.sess()), - ); - for field_def_id in field_def_ids { - self.resolve_doc_links_extern_outer(field_def_id, scope_id); - } - } - } + for cnum in self.resolver.cstore().crates_untracked() { + early_lib_embargo_visit_item( + self.resolver, + &mut self.extern_doc_reachable, + cnum.as_def_id(), + true, + ); + for (_, impl_def_id, _) in self.resolver.cstore().trait_impls_in_crate_untracked(cnum) { + self.all_trait_impls.push(impl_def_id); } } } @@ -359,73 +58,16 @@ impl<'ra> EarlyDocLinkResolver<'_, 'ra> { impl Visitor<'_> for EarlyDocLinkResolver<'_, '_> { fn visit_item(&mut self, item: &ast::Item) { - self.resolve_doc_links_local(&item.attrs); // Outer attribute scope - if let ItemKind::Mod(..) = item.kind { - let module_def_id = self.resolver.local_def_id(item.id).to_def_id(); - let module = self.resolver.expect_module(module_def_id); - let old_module = mem::replace(&mut self.parent_scope.module, module); - let old_macro_rules = self.parent_scope.macro_rules; - self.resolve_doc_links_local(&item.attrs); // Inner attribute scope - self.process_module_children_or_reexports(module_def_id); - visit::walk_item(self, item); - if item - .attrs - .iter() - .all(|attr| !attr.has_name(sym::macro_use) && !attr.has_name(sym::macro_escape)) - { - self.parent_scope.macro_rules = old_macro_rules; + match &item.kind { + ItemKind::Impl(impl_) if impl_.of_trait.is_some() => { + self.all_trait_impls.push(self.resolver.local_def_id(item.id).to_def_id()); } - self.parent_scope.module = old_module; - } else { - match &item.kind { - ItemKind::Impl(box ast::Impl { of_trait: Some(trait_ref), .. }) => { - if let Some(partial_res) = self.resolver.get_partial_res(trait_ref.ref_id) - && let Some(res) = partial_res.full_res() - && let Some(trait_def_id) = res.opt_def_id() - && !trait_def_id.is_local() - && self.visited_mods.insert(trait_def_id) { - self.resolve_doc_links_extern_impl(trait_def_id, false); - } - self.all_trait_impls.push(self.resolver.local_def_id(item.id).to_def_id()); - } - ItemKind::MacroDef(macro_def) if macro_def.macro_rules => { - let (macro_rules_scope, res) = - self.resolver.macro_rules_scope(self.resolver.local_def_id(item.id)); - self.parent_scope.macro_rules = macro_rules_scope; - self.all_macro_rules.insert(item.ident.name, res); - } - _ => {} + ItemKind::MacroDef(macro_def) if macro_def.macro_rules => { + let (_, res) = self.resolver.macro_rules_scope(self.resolver.local_def_id(item.id)); + self.all_macro_rules.insert(item.ident.name, res); } - visit::walk_item(self, item); + _ => {} } + visit::walk_item(self, item); } - - fn visit_assoc_item(&mut self, item: &ast::AssocItem, ctxt: AssocCtxt) { - self.resolve_doc_links_local(&item.attrs); - visit::walk_assoc_item(self, item, ctxt) - } - - fn visit_foreign_item(&mut self, item: &ast::ForeignItem) { - self.resolve_doc_links_local(&item.attrs); - visit::walk_foreign_item(self, item) - } - - fn visit_variant(&mut self, v: &ast::Variant) { - self.resolve_doc_links_local(&v.attrs); - visit::walk_variant(self, v) - } - - fn visit_field_def(&mut self, field: &ast::FieldDef) { - self.resolve_doc_links_local(&field.attrs); - visit::walk_field_def(self, field) - } - - fn visit_block(&mut self, block: &ast::Block) { - let old_macro_rules = self.parent_scope.macro_rules; - visit::walk_block(self, block); - self.parent_scope.macro_rules = old_macro_rules; - } - - // NOTE: if doc-comments are ever allowed on other nodes (e.g. function parameters), - // then this will have to implement other visitor methods too. } diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs index 634e70ec97a0d..4b1ff68df502f 100644 --- a/src/librustdoc/passes/mod.rs +++ b/src/librustdoc/passes/mod.rs @@ -2,11 +2,12 @@ //! process. use rustc_middle::ty::TyCtxt; +use rustc_resolve::rustdoc::DocFragmentKind; use rustc_span::{InnerSpan, Span, DUMMY_SP}; use std::ops::Range; use self::Condition::*; -use crate::clean::{self, DocFragmentKind}; +use crate::clean; use crate::core::DocContext; mod stripper; diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 8ce19c8b5145b..c4b994af13bfe 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -178,6 +178,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[ "ppv-lite86", "proc-macro-hack", "proc-macro2", + "pulldown-cmark", "psm", "punycode", "quote", @@ -246,6 +247,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[ "unic-langid-macros", "unic-langid-macros-impl", "unic-ucd-version", + "unicase", "unicode-ident", "unicode-normalization", "unicode-script", From 3b0866272a2f3e8c4211b76bc314fb7b878ed823 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sun, 5 Feb 2023 15:10:02 +0400 Subject: [PATCH 373/501] Stop resolving doc links on `mod` items twice --- compiler/rustc_resolve/src/late.rs | 10 ++++++++-- compiler/rustc_resolve/src/rustdoc.rs | 8 ++++++++ src/librustdoc/clean/types.rs | 16 +++------------- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index ed1e787f3962c..c767bcd464c0c 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -2199,7 +2199,11 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { } fn resolve_item(&mut self, item: &'ast Item) { - self.resolve_doc_links(&item.attrs); + let mod_inner_docs = + matches!(item.kind, ItemKind::Mod(..)) && rustdoc::inner_docs(&item.attrs); + if !mod_inner_docs { + self.resolve_doc_links(&item.attrs); + } let name = item.ident.name; debug!("(resolving item) resolving {} ({:?})", name, item.kind); @@ -2292,7 +2296,9 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { ItemKind::Mod(..) => { self.with_scope(item.id, |this| { - this.resolve_doc_links(&item.attrs); + if mod_inner_docs { + this.resolve_doc_links(&item.attrs); + } let old_macro_rules = this.parent_scope.macro_rules; visit::walk_item(this, item); // Maintain macro_rules scopes in the same way as during early resolution diff --git a/compiler/rustc_resolve/src/rustdoc.rs b/compiler/rustc_resolve/src/rustdoc.rs index 86172afb87693..a967f4b940c80 100644 --- a/compiler/rustc_resolve/src/rustdoc.rs +++ b/compiler/rustc_resolve/src/rustdoc.rs @@ -326,6 +326,14 @@ pub fn strip_generics_from_path(path_str: &str) -> Result bool { + attrs.iter().find(|a| a.doc_str().is_some()).map_or(true, |a| a.style == ast::AttrStyle::Inner) +} + /// Simplified version of the corresponding function in rustdoc. /// If the rustdoc version returns a successful result, this function must return the same result. /// Otherwise this function may return anything. diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index de350af515948..ffe6fea7ea447 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -10,7 +10,7 @@ use std::{fmt, iter}; use arrayvec::ArrayVec; use thin_vec::ThinVec; -use rustc_ast::{self as ast, AttrStyle}; +use rustc_ast as ast; use rustc_attr::{ConstStability, Deprecation, Stability, StabilityLevel}; use rustc_const_eval::const_eval::is_unstable_const_fn; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; @@ -23,7 +23,7 @@ use rustc_hir_analysis::check::intrinsic::intrinsic_operation_unsafety; use rustc_index::vec::IndexVec; use rustc_middle::ty::fast_reject::SimplifiedType; use rustc_middle::ty::{self, DefIdTree, TyCtxt, Visibility}; -use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, DocFragment}; +use rustc_resolve::rustdoc::{add_doc_fragment, attrs_to_doc_fragments, inner_docs, DocFragment}; use rustc_session::Session; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Ident, Symbol}; @@ -405,7 +405,7 @@ impl Item { pub(crate) fn inner_docs(&self, tcx: TyCtxt<'_>) -> bool { self.item_id .as_def_id() - .map(|did| tcx.get_attrs_unchecked(did).inner_docs()) + .map(|did| inner_docs(tcx.get_attrs_unchecked(did))) .unwrap_or(false) } @@ -874,8 +874,6 @@ pub(crate) trait AttributesExt { fn span(&self) -> Option; - fn inner_docs(&self) -> bool; - fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet) -> Option>; } @@ -894,14 +892,6 @@ impl AttributesExt for [ast::Attribute] { self.iter().find(|attr| attr.doc_str().is_some()).map(|attr| attr.span) } - /// Returns whether the first doc-comment is an inner attribute. - /// - //// If there are no doc-comments, return true. - /// FIXME(#78591): Support both inner and outer attributes on the same item. - fn inner_docs(&self) -> bool { - self.iter().find(|a| a.doc_str().is_some()).map_or(true, |a| a.style == AttrStyle::Inner) - } - fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet) -> Option> { let sess = tcx.sess; let doc_cfg_active = tcx.features().doc_cfg; From da4ce6b41e4465917ec160c914a857fe18608a42 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Mon, 6 Feb 2023 21:57:45 +0400 Subject: [PATCH 374/501] Skip doc link resolution for some crate types and non-exported items --- compiler/rustc_resolve/src/late.rs | 79 ++++++++++++++++++++++----- compiler/rustc_session/src/config.rs | 29 +++++++++- compiler/rustc_session/src/options.rs | 2 + src/librustdoc/core.rs | 11 +++- src/librustdoc/lib.rs | 4 +- 5 files changed, 106 insertions(+), 19 deletions(-) diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index c767bcd464c0c..ca2a99971c316 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -24,13 +24,13 @@ use rustc_hir::{BindingAnnotation, PrimTy, TraitCandidate}; use rustc_middle::middle::resolve_lifetime::Set1; use rustc_middle::ty::DefIdTree; use rustc_middle::{bug, span_bug}; -use rustc_session::config::CrateType; +use rustc_session::config::{CrateType, ResolveDocLinks}; use rustc_session::lint; +use rustc_span::source_map::{respan, Spanned}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{BytePos, Span, SyntaxContext}; use smallvec::{smallvec, SmallVec}; -use rustc_span::source_map::{respan, Spanned}; use std::assert_matches::debug_assert_matches; use std::borrow::Cow; use std::collections::{hash_map::Entry, BTreeSet}; @@ -494,6 +494,30 @@ impl<'a> PathSource<'a> { } } +/// At this point for most items we can answer whether that item is exported or not, +/// but some items like impls require type information to determine exported-ness, so we make a +/// conservative estimate for them (e.g. based on nominal visibility). +#[derive(Clone, Copy)] +enum MaybeExported<'a> { + Ok(NodeId), + Impl(Option), + ImplItem(Result), +} + +impl MaybeExported<'_> { + fn eval(self, r: &Resolver<'_>) -> bool { + let def_id = match self { + MaybeExported::Ok(node_id) => Some(r.local_def_id(node_id)), + MaybeExported::Impl(Some(trait_def_id)) | MaybeExported::ImplItem(Ok(trait_def_id)) => { + trait_def_id.as_local() + } + MaybeExported::Impl(None) => return true, + MaybeExported::ImplItem(Err(vis)) => return vis.kind.is_pub(), + }; + def_id.map_or(true, |def_id| r.effective_visibilities.is_exported(def_id)) + } +} + #[derive(Default)] struct DiagnosticMetadata<'ast> { /// The current trait's associated items' ident, used for diagnostic suggestions. @@ -774,7 +798,7 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { ); } fn visit_foreign_item(&mut self, foreign_item: &'ast ForeignItem) { - self.resolve_doc_links(&foreign_item.attrs); + self.resolve_doc_links(&foreign_item.attrs, MaybeExported::Ok(foreign_item.id)); match foreign_item.kind { ForeignItemKind::TyAlias(box TyAlias { ref generics, .. }) => { self.with_generic_param_rib( @@ -1165,12 +1189,12 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { } fn visit_variant(&mut self, v: &'ast Variant) { - self.resolve_doc_links(&v.attrs); + self.resolve_doc_links(&v.attrs, MaybeExported::Ok(v.id)); visit::walk_variant(self, v) } fn visit_field_def(&mut self, f: &'ast FieldDef) { - self.resolve_doc_links(&f.attrs); + self.resolve_doc_links(&f.attrs, MaybeExported::Ok(f.id)); visit::walk_field_def(self, f) } } @@ -2201,8 +2225,8 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { fn resolve_item(&mut self, item: &'ast Item) { let mod_inner_docs = matches!(item.kind, ItemKind::Mod(..)) && rustdoc::inner_docs(&item.attrs); - if !mod_inner_docs { - self.resolve_doc_links(&item.attrs); + if !mod_inner_docs && !matches!(item.kind, ItemKind::Impl(..)) { + self.resolve_doc_links(&item.attrs, MaybeExported::Ok(item.id)); } let name = item.ident.name; @@ -2249,7 +2273,14 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { .. }) => { self.diagnostic_metadata.current_impl_items = Some(impl_items); - self.resolve_implementation(generics, of_trait, &self_ty, item.id, impl_items); + self.resolve_implementation( + &item.attrs, + generics, + of_trait, + &self_ty, + item.id, + impl_items, + ); self.diagnostic_metadata.current_impl_items = None; } @@ -2297,7 +2328,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { ItemKind::Mod(..) => { self.with_scope(item.id, |this| { if mod_inner_docs { - this.resolve_doc_links(&item.attrs); + this.resolve_doc_links(&item.attrs, MaybeExported::Ok(item.id)); } let old_macro_rules = this.parent_scope.macro_rules; visit::walk_item(this, item); @@ -2583,7 +2614,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { }; for item in trait_items { - self.resolve_doc_links(&item.attrs); + self.resolve_doc_links(&item.attrs, MaybeExported::Ok(item.id)); match &item.kind { AssocItemKind::Const(_, ty, default) => { self.visit_ty(ty); @@ -2671,6 +2702,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { fn resolve_implementation( &mut self, + attrs: &[ast::Attribute], generics: &'ast Generics, opt_trait_reference: &'ast Option, self_type: &'ast Ty, @@ -2701,6 +2733,8 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { opt_trait_reference.as_ref(), self_type, |this, trait_id| { + this.resolve_doc_links(attrs, MaybeExported::Impl(trait_id)); + let item_def_id = this.r.local_def_id(item_id); // Register the trait definitions from here. @@ -2734,7 +2768,7 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { debug!("resolve_implementation with_self_rib_ns(ValueNS, ...)"); let mut seen_trait_items = Default::default(); for item in impl_items { - this.resolve_impl_item(&**item, &mut seen_trait_items); + this.resolve_impl_item(&**item, &mut seen_trait_items, trait_id); } }); }); @@ -2752,9 +2786,10 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { &mut self, item: &'ast AssocItem, seen_trait_items: &mut FxHashMap, + trait_id: Option, ) { use crate::ResolutionError::*; - self.resolve_doc_links(&item.attrs); + self.resolve_doc_links(&item.attrs, MaybeExported::ImplItem(trait_id.ok_or(&item.vis))); match &item.kind { AssocItemKind::Const(_, ty, default) => { debug!("resolve_implementation AssocItemKind::Const"); @@ -4183,7 +4218,23 @@ impl<'a: 'ast, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> { res } - fn resolve_doc_links(&mut self, attrs: &[Attribute]) { + fn resolve_doc_links(&mut self, attrs: &[Attribute], maybe_exported: MaybeExported<'_>) { + match self.r.session.opts.resolve_doc_links { + ResolveDocLinks::None => return, + ResolveDocLinks::ExportedMetadata + if !self.r.session.crate_types().iter().copied().any(CrateType::has_metadata) + || !maybe_exported.eval(self.r) => + { + return; + } + ResolveDocLinks::Exported if !maybe_exported.eval(self.r) => { + return; + } + ResolveDocLinks::ExportedMetadata + | ResolveDocLinks::Exported + | ResolveDocLinks::All => {} + } + if !attrs.iter().any(|attr| attr.may_have_doc_links()) { return; } @@ -4283,7 +4334,7 @@ impl<'a> Resolver<'a> { pub(crate) fn late_resolve_crate(&mut self, krate: &Crate) { visit::walk_crate(&mut LifetimeCountVisitor { r: self }, krate); let mut late_resolution_visitor = LateResolutionVisitor::new(self); - late_resolution_visitor.resolve_doc_links(&krate.attrs); + late_resolution_visitor.resolve_doc_links(&krate.attrs, MaybeExported::Ok(CRATE_NODE_ID)); visit::walk_crate(&mut late_resolution_visitor, krate); for (id, span) in late_resolution_visitor.diagnostic_metadata.unused_labels.iter() { self.lint_buffer.buffer_lint(lint::builtin::UNUSED_LABELS, *id, *span, "unused label"); diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 7d2fdf94baa36..e8bc19f88e3e3 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -419,6 +419,18 @@ pub enum TrimmedDefPaths { GoodPath, } +#[derive(Clone, Hash)] +pub enum ResolveDocLinks { + /// Do not resolve doc links. + None, + /// Resolve doc links on exported items only for crate types that have metadata. + ExportedMetadata, + /// Resolve doc links on exported items. + Exported, + /// Resolve doc links on all items. + All, +} + /// Use tree-based collections to cheaply get a deterministic `Hash` implementation. /// *Do not* switch `BTreeMap` out for an unsorted container type! That would break /// dependency tracking for command-line arguments. Also only hash keys, since tracking @@ -788,6 +800,7 @@ impl Default for Options { unstable_features: UnstableFeatures::Disallow, debug_assertions: true, actually_rustdoc: false, + resolve_doc_links: ResolveDocLinks::None, trimmed_def_paths: TrimmedDefPaths::default(), cli_forced_codegen_units: None, cli_forced_local_thinlto_off: false, @@ -883,6 +896,15 @@ pub enum CrateType { ProcMacro, } +impl CrateType { + pub fn has_metadata(self) -> bool { + match self { + CrateType::Rlib | CrateType::Dylib | CrateType::ProcMacro => true, + CrateType::Executable | CrateType::Cdylib | CrateType::Staticlib => false, + } + } +} + #[derive(Clone, Hash, Debug, PartialEq, Eq)] pub enum Passes { Some(Vec), @@ -2562,6 +2584,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { libs, debug_assertions, actually_rustdoc: false, + resolve_doc_links: ResolveDocLinks::ExportedMetadata, trimmed_def_paths: TrimmedDefPaths::default(), cli_forced_codegen_units: codegen_units, cli_forced_local_thinlto_off: disable_local_thinlto, @@ -2825,8 +2848,9 @@ pub(crate) mod dep_tracking { use super::{ BranchProtection, CFGuard, CFProtection, CrateType, DebugInfo, ErrorOutputType, InstrumentCoverage, InstrumentXRay, LdImpl, LinkerPluginLto, LocationDetail, LtoCli, - OomStrategy, OptLevel, OutputType, OutputTypes, Passes, SourceFileHashAlgorithm, - SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion, TraitSolver, TrimmedDefPaths, + OomStrategy, OptLevel, OutputType, OutputTypes, Passes, ResolveDocLinks, + SourceFileHashAlgorithm, SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion, + TraitSolver, TrimmedDefPaths, }; use crate::lint; use crate::options::WasiExecModel; @@ -2913,6 +2937,7 @@ pub(crate) mod dep_tracking { TargetTriple, Edition, LinkerPluginLto, + ResolveDocLinks, SplitDebuginfo, SplitDwarfKind, StackProtector, diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index b9723d35e5891..2305ac19a331f 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -169,6 +169,8 @@ top_level_options!( /// is currently just a hack and will be removed eventually, so please /// try to not rely on this too much. actually_rustdoc: bool [TRACKED], + /// Whether name resolver should resolve documentation links. + resolve_doc_links: ResolveDocLinks [TRACKED], /// Control path trimming. trimmed_def_paths: TrimmedDefPaths [TRACKED], diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index e8316ac976b5f..d85749cadbd76 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -12,7 +12,7 @@ use rustc_hir::{HirId, Path}; use rustc_interface::interface; use rustc_middle::hir::nested_filter; use rustc_middle::ty::{ParamEnv, Ty, TyCtxt}; -use rustc_session::config::{self, CrateType, ErrorOutputType}; +use rustc_session::config::{self, CrateType, ErrorOutputType, ResolveDocLinks}; use rustc_session::lint; use rustc_session::Session; use rustc_span::symbol::sym; @@ -200,6 +200,7 @@ pub(crate) fn create_config( scrape_examples_options, .. }: RustdocOptions, + RenderOptions { document_private, .. }: &RenderOptions, ) -> rustc_interface::Config { // Add the doc cfg into the doc build. cfgs.push("doc".to_string()); @@ -227,6 +228,13 @@ pub(crate) fn create_config( let crate_types = if proc_macro_crate { vec![CrateType::ProcMacro] } else { vec![CrateType::Rlib] }; + let resolve_doc_links = if *document_private { + ResolveDocLinks::All + } else { + // Should be `ResolveDocLinks::Exported` in theory, but for some reason rustdoc + // still tries to request resolutions for links on private items. + ResolveDocLinks::All + }; let test = scrape_examples_options.map(|opts| opts.scrape_tests).unwrap_or(false); // plays with error output here! let sessopts = config::Options { @@ -240,6 +248,7 @@ pub(crate) fn create_config( target_triple: target, unstable_features: UnstableFeatures::from_environment(crate_name.as_deref()), actually_rustdoc: true, + resolve_doc_links, unstable_opts, error_format, diagnostic_width, diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 0d4a87eb4fe3a..b22c12fa81054 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -742,7 +742,7 @@ fn main_args(at_args: &[String]) -> MainResult { (false, true) => { let input = options.input.clone(); let edition = options.edition; - let config = core::create_config(options); + let config = core::create_config(options, &render_options); // `markdown::render` can invoke `doctest::make_test`, which // requires session globals and a thread pool, so we use @@ -775,7 +775,7 @@ fn main_args(at_args: &[String]) -> MainResult { let scrape_examples_options = options.scrape_examples_options.clone(); let bin_crate = options.bin_crate; - let config = core::create_config(options); + let config = core::create_config(options, &render_options); interface::run_compiler(config, |compiler| { let sess = compiler.session(); From 626dc651ff41e718abc5c21261cf2b95801a2b90 Mon Sep 17 00:00:00 2001 From: lh123 <1585086582@qq.com> Date: Fri, 10 Feb 2023 14:22:56 +0800 Subject: [PATCH 375/501] fix: Insert spaces when inlining macros --- .../ide-assists/src/handlers/inline_macro.rs | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/crates/ide-assists/src/handlers/inline_macro.rs b/crates/ide-assists/src/handlers/inline_macro.rs index 9d03f03d201a9..3fc552306a6ff 100644 --- a/crates/ide-assists/src/handlers/inline_macro.rs +++ b/crates/ide-assists/src/handlers/inline_macro.rs @@ -1,3 +1,4 @@ +use ide_db::syntax_helpers::insert_whitespace_into_node::insert_ws_into; use syntax::ast::{self, AstNode}; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -35,7 +36,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; // ``` pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let unexpanded = ctx.find_node_at_offset::()?; - let expanded = ctx.sema.expand(&unexpanded)?.clone_for_update(); + let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update()); let text_range = unexpanded.syntax().text_range(); @@ -230,4 +231,27 @@ fn f() { let result = foo$0(); } "#, ); } + + #[test] + fn inline_macro_with_whitespace() { + check_assist( + inline_macro, + r#" +macro_rules! whitespace { + () => { + if true {} + }; +} +fn f() { whitespace$0!(); } +"#, + r#" +macro_rules! whitespace { + () => { + if true {} + }; +} +fn f() { if true{}; } +"#, + ) + } } From d560574c4bd621cfabaa413c79e76b1da1abf279 Mon Sep 17 00:00:00 2001 From: Zephaniah Ong Date: Fri, 10 Feb 2023 15:25:01 +0800 Subject: [PATCH 376/501] create symlink only for non-windows operating systems --- src/bootstrap/download.rs | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs index 5c863015adb27..7c5e7fb6ebc92 100644 --- a/src/bootstrap/download.rs +++ b/src/bootstrap/download.rs @@ -340,9 +340,12 @@ impl Config { let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); let rustfmt_stamp = bin_root.join(".rustfmt-stamp"); - let legacy_rustfmt = self.initial_rustc.with_file_name(exe("rustfmt", host)); - if !legacy_rustfmt.exists() { - t!(self.symlink_file(&rustfmt_path, &legacy_rustfmt)); + #[cfg(not(windows))] + { + let legacy_rustfmt = self.initial_rustc.with_file_name(exe("rustfmt", host)); + if !legacy_rustfmt.exists() { + t!(self.symlink_file(&rustfmt_path, &legacy_rustfmt)); + } } if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) { From 1af9b4f347c1ca725e43444e0068b5e520dde7e2 Mon Sep 17 00:00:00 2001 From: Jubilee <46493976+workingjubilee@users.noreply.github.com> Date: Thu, 9 Feb 2023 23:56:20 -0800 Subject: [PATCH 377/501] Clarify `new_size` for realloc means bytes --- library/core/src/alloc/global.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/library/core/src/alloc/global.rs b/library/core/src/alloc/global.rs index 1d80b8bf9ec76..18da70451f299 100644 --- a/library/core/src/alloc/global.rs +++ b/library/core/src/alloc/global.rs @@ -203,7 +203,7 @@ pub unsafe trait GlobalAlloc { ptr } - /// Shrink or grow a block of memory to the given `new_size`. + /// Shrink or grow a block of memory to the given `new_size` in bytes. /// The block is described by the given `ptr` pointer and `layout`. /// /// If this returns a non-null pointer, then ownership of the memory block @@ -211,10 +211,11 @@ pub unsafe trait GlobalAlloc { /// Any access to the old `ptr` is Undefined Behavior, even if the /// allocation remained in-place. The newly returned pointer is the only valid pointer /// for accessing this memory now. + /// /// The new memory block is allocated with `layout`, - /// but with the `size` updated to `new_size`. This new layout must be - /// used when deallocating the new memory block with `dealloc`. The range - /// `0..min(layout.size(), new_size)` of the new memory block is + /// but with the `size` updated to `new_size` in bytes. + /// This new layout must be used when deallocating the new memory block with `dealloc`. + /// The range `0..min(layout.size(), new_size)` of the new memory block is /// guaranteed to have the same values as the original block. /// /// If this method returns null, then ownership of the memory From 257389882de113945d3f9f1abb9d0a431ca3cf92 Mon Sep 17 00:00:00 2001 From: yukang Date: Fri, 10 Feb 2023 08:01:50 +0000 Subject: [PATCH 378/501] add test for no input file --- compiler/rustc_driver_impl/src/lib.rs | 5 ++++- tests/run-make/no-input-file/Makefile | 4 ++++ tests/run-make/no-input-file/no-input-file.stderr | 2 ++ 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 tests/run-make/no-input-file/Makefile create mode 100644 tests/run-make/no-input-file/no-input-file.stderr diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 5e403d846e1ef..e5104e2d3f9d1 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -628,7 +628,10 @@ fn print_crate_info( println!("{}", serde_json::to_string_pretty(&sess.target.to_json()).unwrap()); } FileNames | CrateName => { - let Some(attrs) = attrs.as_ref() else { return Compilation::Continue; }; + let Some(attrs) = attrs.as_ref() else { + // no crate attributes, print out an error and exit + return Compilation::Continue; + }; let t_outputs = rustc_interface::util::build_output_filenames(attrs, sess); let id = rustc_session::output::find_crate_name(sess, attrs); if *req == PrintRequest::CrateName { diff --git a/tests/run-make/no-input-file/Makefile b/tests/run-make/no-input-file/Makefile new file mode 100644 index 0000000000000..2f02159229d3c --- /dev/null +++ b/tests/run-make/no-input-file/Makefile @@ -0,0 +1,4 @@ +include ../../run-make-fulldeps/tools.mk + +all: + $(RUSTC) --print crate-name 2>&1 | diff - no-input-file.stderr diff --git a/tests/run-make/no-input-file/no-input-file.stderr b/tests/run-make/no-input-file/no-input-file.stderr new file mode 100644 index 0000000000000..b843eb524f3d2 --- /dev/null +++ b/tests/run-make/no-input-file/no-input-file.stderr @@ -0,0 +1,2 @@ +error: no input filename given + From 557aa1e37859dfa1d8de29b24183e18a19aae563 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 10 Feb 2023 09:57:03 +0100 Subject: [PATCH 379/501] Render discriminant inlay hints for mixed variants if at least one discriminant is specified --- crates/ide/src/inlay_hints/discriminant.rs | 30 ++++++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/crates/ide/src/inlay_hints/discriminant.rs b/crates/ide/src/inlay_hints/discriminant.rs index c5c947150b377..5dd51ad11f441 100644 --- a/crates/ide/src/inlay_hints/discriminant.rs +++ b/crates/ide/src/inlay_hints/discriminant.rs @@ -19,12 +19,15 @@ pub(super) fn enum_hints( _: FileId, enum_: ast::Enum, ) -> Option<()> { - let disabled = match config.discriminant_hints { - DiscriminantHints::Always => false, - DiscriminantHints::Fieldless => sema.to_def(&enum_)?.is_data_carrying(sema.db), - DiscriminantHints::Never => true, + let enabled = match config.discriminant_hints { + DiscriminantHints::Always => true, + DiscriminantHints::Fieldless => { + !sema.to_def(&enum_)?.is_data_carrying(sema.db) + || enum_.variant_list()?.variants().any(|v| v.expr().is_some()) + } + DiscriminantHints::Never => false, }; - if disabled { + if !enabled { return None; } for variant in enum_.variant_list()?.variants() { @@ -161,8 +164,25 @@ enum Enum { Variant1, Variant2 {}, Variant3, + Variant5, + Variant6, +} +"#, + ); + check_discriminants_fieldless( + r#" +enum Enum { + Variant(), + //^^^^^^^^^0 + Variant1, + //^^^^^^^^1 + Variant2 {}, + //^^^^^^^^^^^2 + Variant3, + //^^^^^^^^3 Variant5 = 5, Variant6, + //^^^^^^^^6 } "#, ); From 1f76cea515cf8155a880951e99797c55289ccd97 Mon Sep 17 00:00:00 2001 From: Jubilee <46493976+workingjubilee@users.noreply.github.com> Date: Fri, 10 Feb 2023 01:48:45 -0800 Subject: [PATCH 380/501] Cleanup typos in en_US/borrowck.ftl --- compiler/rustc_error_messages/locales/en-US/borrowck.ftl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_error_messages/locales/en-US/borrowck.ftl b/compiler/rustc_error_messages/locales/en-US/borrowck.ftl index fe77cf23e8f94..a3b6b5e8138b6 100644 --- a/compiler/rustc_error_messages/locales/en-US/borrowck.ftl +++ b/compiler/rustc_error_messages/locales/en-US/borrowck.ftl @@ -18,7 +18,7 @@ borrowck_generic_does_not_live_long_enough = `{$kind}` does not live long enough borrowck_move_borrowed = - cannot move out of `{$desc}` beacause it is borrowed + cannot move out of `{$desc}` because it is borrowed borrowck_var_does_not_need_mut = variable does not need to be mutable @@ -87,10 +87,10 @@ borrowck_use_due_to_use_closure = use occurs due to use in closure borrowck_assign_due_to_use_closure = - assign occurs due to use in closure + assignment occurs due to use in closure borrowck_assign_part_due_to_use_closure = - assign to part occurs due to use in closure + assignment to part occurs due to use in closure borrowck_capture_immute = capture is immutable because of use here From 19c07f88ee6ecbbe5086e8435941aa68d58f35db Mon Sep 17 00:00:00 2001 From: Philipp Krones Date: Fri, 10 Feb 2023 11:34:02 +0100 Subject: [PATCH 381/501] Bump nightly version -> 2023-02-10 --- rust-toolchain | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-toolchain b/rust-toolchain index 4e7fc565a322a..adea8c53df278 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-01-27" +channel = "nightly-2023-02-10" components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] From fabada0c816de9bc2780afad3a1f1bcf50159b08 Mon Sep 17 00:00:00 2001 From: Philipp Krones Date: Fri, 10 Feb 2023 11:38:56 +0100 Subject: [PATCH 382/501] Fix CLI of clippy_dev Clap was updated in rust-lang/rust-clippy#10270, which broke the command line of clippy_dev. This swaps out contains_id, which now returns always true in the places it was used with get_flag. --- clippy_dev/src/main.rs | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/clippy_dev/src/main.rs b/clippy_dev/src/main.rs index b2d67a72fd2b8..e2457e5a8a5e9 100644 --- a/clippy_dev/src/main.rs +++ b/clippy_dev/src/main.rs @@ -11,22 +11,22 @@ fn main() { match matches.subcommand() { Some(("bless", matches)) => { - bless::bless(matches.contains_id("ignore-timestamp")); + bless::bless(matches.get_flag("ignore-timestamp")); }, Some(("dogfood", matches)) => { dogfood::dogfood( - matches.contains_id("fix"), - matches.contains_id("allow-dirty"), - matches.contains_id("allow-staged"), + matches.get_flag("fix"), + matches.get_flag("allow-dirty"), + matches.get_flag("allow-staged"), ); }, Some(("fmt", matches)) => { - fmt::run(matches.contains_id("check"), matches.contains_id("verbose")); + fmt::run(matches.get_flag("check"), matches.get_flag("verbose")); }, Some(("update_lints", matches)) => { - if matches.contains_id("print-only") { + if matches.get_flag("print-only") { update_lints::print_lints(); - } else if matches.contains_id("check") { + } else if matches.get_flag("check") { update_lints::update(update_lints::UpdateMode::Check); } else { update_lints::update(update_lints::UpdateMode::Change); @@ -38,7 +38,7 @@ fn main() { matches.get_one::("name"), matches.get_one::("category").map(String::as_str), matches.get_one::("type").map(String::as_str), - matches.contains_id("msrv"), + matches.get_flag("msrv"), ) { Ok(_) => update_lints::update(update_lints::UpdateMode::Change), Err(e) => eprintln!("Unable to create lint: {e}"), @@ -46,7 +46,7 @@ fn main() { }, Some(("setup", sub_command)) => match sub_command.subcommand() { Some(("intellij", matches)) => { - if matches.contains_id("remove") { + if matches.get_flag("remove") { setup::intellij::remove_rustc_src(); } else { setup::intellij::setup_rustc_src( @@ -57,17 +57,17 @@ fn main() { } }, Some(("git-hook", matches)) => { - if matches.contains_id("remove") { + if matches.get_flag("remove") { setup::git_hook::remove_hook(); } else { - setup::git_hook::install_hook(matches.contains_id("force-override")); + setup::git_hook::install_hook(matches.get_flag("force-override")); } }, Some(("vscode-tasks", matches)) => { - if matches.contains_id("remove") { + if matches.get_flag("remove") { setup::vscode::remove_tasks(); } else { - setup::vscode::install_tasks(matches.contains_id("force-override")); + setup::vscode::install_tasks(matches.get_flag("force-override")); } }, _ => {}, @@ -91,7 +91,7 @@ fn main() { Some(("rename_lint", matches)) => { let old_name = matches.get_one::("old_name").unwrap(); let new_name = matches.get_one::("new_name").unwrap_or(old_name); - let uplift = matches.contains_id("uplift"); + let uplift = matches.get_flag("uplift"); update_lints::rename(old_name, new_name, uplift); }, Some(("deprecate", matches)) => { From 5566eb4da2bac6e8cec937dc26a86ef019b505df Mon Sep 17 00:00:00 2001 From: Philipp Krones Date: Fri, 10 Feb 2023 14:01:31 +0100 Subject: [PATCH 383/501] Update Cargo.lock --- Cargo.lock | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1ddf8cadd7280..ff7abca476241 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -449,7 +449,7 @@ dependencies = [ name = "cargo-miri" version = "0.1.0" dependencies = [ - "cargo_metadata 0.15.0", + "cargo_metadata 0.15.3", "directories", "rustc-build-sysroot", "rustc-workspace-hack", @@ -540,15 +540,16 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.15.0" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36" +checksum = "08a1ec454bc3eead8719cb56e15dbbfecdbc14e4b3a3ae4936cc6e31f5fc0d07" dependencies = [ "camino", "cargo-platform 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "semver", "serde", "serde_json", + "thiserror", ] [[package]] @@ -732,6 +733,7 @@ dependencies = [ name = "clippy" version = "0.1.69" dependencies = [ + "clap 4.1.4", "clippy_lints", "clippy_utils", "compiletest_rs", @@ -762,7 +764,7 @@ name = "clippy_dev" version = "0.0.1" dependencies = [ "aho-corasick", - "clap 3.2.20", + "clap 4.1.4", "indoc", "itertools", "opener", @@ -774,7 +776,7 @@ dependencies = [ name = "clippy_lints" version = "0.1.69" dependencies = [ - "cargo_metadata 0.14.0", + "cargo_metadata 0.15.3", "clippy_utils", "declare_clippy_lint", "if_chain", @@ -5865,7 +5867,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54ddb6f31025943e2f9d59237f433711c461a43d9415974c3eb3a4902edc1c1f" dependencies = [ "bstr 1.0.1", - "cargo_metadata 0.15.0", + "cargo_metadata 0.15.3", "color-eyre", "colored", "crossbeam-channel", From 23ab2464befc41d74e04cc2259366a439b346cb9 Mon Sep 17 00:00:00 2001 From: Boxy Date: Fri, 10 Feb 2023 13:43:29 +0000 Subject: [PATCH 384/501] add `AliasEq` to `PredicateKind` --- compiler/rustc_hir_analysis/src/astconv/mod.rs | 1 + .../src/impl_wf_check/min_specialization.rs | 1 + .../rustc_hir_analysis/src/outlives/explicit.rs | 1 + compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs | 1 + compiler/rustc_hir_typeck/src/method/probe.rs | 1 + compiler/rustc_infer/src/infer/outlives/mod.rs | 1 + compiler/rustc_infer/src/traits/util.rs | 3 +++ compiler/rustc_lint/src/builtin.rs | 2 ++ compiler/rustc_middle/src/ty/flags.rs | 16 ++++++++++++---- compiler/rustc_middle/src/ty/mod.rs | 10 ++++++++++ compiler/rustc_middle/src/ty/print/pretty.rs | 1 + compiler/rustc_middle/src/ty/structural_impls.rs | 1 + .../rustc_trait_selection/src/solve/fulfill.rs | 5 +++++ compiler/rustc_trait_selection/src/solve/mod.rs | 4 ++++ .../src/traits/auto_trait.rs | 3 +++ .../src/traits/error_reporting/mod.rs | 5 +++++ .../rustc_trait_selection/src/traits/fulfill.rs | 6 ++++++ .../src/traits/object_safety.rs | 4 ++++ .../src/traits/select/mod.rs | 3 +++ compiler/rustc_trait_selection/src/traits/wf.rs | 4 ++++ compiler/rustc_traits/src/chalk/lowering.rs | 4 ++++ .../rustc_traits/src/implied_outlives_bounds.rs | 4 +++- .../src/normalize_erasing_regions.rs | 1 + src/librustdoc/clean/mod.rs | 2 ++ .../clippy_utils/src/qualify_min_const_fn.rs | 1 + 25 files changed, 80 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/astconv/mod.rs b/compiler/rustc_hir_analysis/src/astconv/mod.rs index 3d5f189e233bb..de50b1ab2135d 100644 --- a/compiler/rustc_hir_analysis/src/astconv/mod.rs +++ b/compiler/rustc_hir_analysis/src/astconv/mod.rs @@ -1328,6 +1328,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ty::Clause::RegionOutlives(_) => bug!(), }, ty::PredicateKind::WellFormed(_) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::ObjectSafe(_) | ty::PredicateKind::ClosureKind(_, _, _) | ty::PredicateKind::Subtype(_) diff --git a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs index a5dcfab9be8e8..02f77f9d6afba 100644 --- a/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs +++ b/compiler/rustc_hir_analysis/src/impl_wf_check/min_specialization.rs @@ -517,6 +517,7 @@ fn trait_predicate_kind<'tcx>( ty::PredicateKind::Clause(ty::Clause::RegionOutlives(_)) | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(_)) | ty::PredicateKind::Clause(ty::Clause::Projection(_)) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::WellFormed(_) | ty::PredicateKind::Subtype(_) | ty::PredicateKind::Coerce(_) diff --git a/compiler/rustc_hir_analysis/src/outlives/explicit.rs b/compiler/rustc_hir_analysis/src/outlives/explicit.rs index 663f1c49db7db..ecd6849426dbf 100644 --- a/compiler/rustc_hir_analysis/src/outlives/explicit.rs +++ b/compiler/rustc_hir_analysis/src/outlives/explicit.rs @@ -55,6 +55,7 @@ impl<'tcx> ExplicitPredicatesMap<'tcx> { ty::PredicateKind::Clause(ty::Clause::Trait(..)) | ty::PredicateKind::Clause(ty::Clause::Projection(..)) | ty::PredicateKind::WellFormed(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::ClosureKind(..) | ty::PredicateKind::Subtype(..) diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index e84b3de124c58..52c2dabee293e 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -669,6 +669,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..)) | ty::PredicateKind::WellFormed(..) | ty::PredicateKind::ObjectSafe(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) // N.B., this predicate is created by breaking down a diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs index 0cf58179ec272..16b0d48002efc 100644 --- a/compiler/rustc_hir_typeck/src/method/probe.rs +++ b/compiler/rustc_hir_typeck/src/method/probe.rs @@ -837,6 +837,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) | ty::PredicateKind::Ambiguous + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::TypeWellFormedFromEnv(..) => None, } }); diff --git a/compiler/rustc_infer/src/infer/outlives/mod.rs b/compiler/rustc_infer/src/infer/outlives/mod.rs index 4daa257672cfc..a8e668d81eae3 100644 --- a/compiler/rustc_infer/src/infer/outlives/mod.rs +++ b/compiler/rustc_infer/src/infer/outlives/mod.rs @@ -21,6 +21,7 @@ pub fn explicit_outlives_bounds<'tcx>( .filter_map(move |kind| match kind { ty::PredicateKind::Clause(ty::Clause::Projection(..)) | ty::PredicateKind::Clause(ty::Clause::Trait(..)) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::Coerce(..) | ty::PredicateKind::Subtype(..) | ty::PredicateKind::WellFormed(..) diff --git a/compiler/rustc_infer/src/traits/util.rs b/compiler/rustc_infer/src/traits/util.rs index 18a966449aa72..e617eb68d4775 100644 --- a/compiler/rustc_infer/src/traits/util.rs +++ b/compiler/rustc_infer/src/traits/util.rs @@ -294,6 +294,9 @@ impl<'tcx> Elaborator<'tcx> { // Nothing to elaborate } ty::PredicateKind::Ambiguous => {} + ty::PredicateKind::AliasEq(..) => { + // No + } } } } diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index 5d85cfe330acd..7a50b6aec87a6 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1594,12 +1594,14 @@ impl<'tcx> LateLintPass<'tcx> for TrivialConstraints { // Ignore projections, as they can only be global // if the trait bound is global Clause(Clause::Projection(..)) | + AliasEq(..) | // Ignore bounds that a user can't type WellFormed(..) | ObjectSafe(..) | ClosureKind(..) | Subtype(..) | Coerce(..) | + // FIXME(generic_const_exprs): `ConstEvaluatable` can be written ConstEvaluatable(..) | ConstEquate(..) | Ambiguous | diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index dc6f5851b7d88..258bc9c3e4188 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -264,10 +264,7 @@ impl FlagComputation { term, })) => { self.add_projection_ty(projection_ty); - match term.unpack() { - ty::TermKind::Ty(ty) => self.add_ty(ty), - ty::TermKind::Const(c) => self.add_const(c), - } + self.add_term(term); } ty::PredicateKind::WellFormed(arg) => { self.add_substs(slice::from_ref(&arg)); @@ -287,6 +284,10 @@ impl FlagComputation { self.add_ty(ty); } ty::PredicateKind::Ambiguous => {} + ty::PredicateKind::AliasEq(t1, t2) => { + self.add_term(t1); + self.add_term(t2); + } } } @@ -380,4 +381,11 @@ impl FlagComputation { } } } + + fn add_term(&mut self, term: ty::Term<'_>) { + match term.unpack() { + ty::TermKind::Ty(ty) => self.add_ty(ty), + ty::TermKind::Const(ct) => self.add_const(ct), + } + } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 09c3d5b736cf1..22ccbfd0e3a01 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -545,6 +545,7 @@ impl<'tcx> Predicate<'tcx> { | PredicateKind::Clause(Clause::RegionOutlives(_)) | PredicateKind::Clause(Clause::TypeOutlives(_)) | PredicateKind::Clause(Clause::Projection(_)) + | PredicateKind::AliasEq(..) | PredicateKind::ObjectSafe(_) | PredicateKind::ClosureKind(_, _, _) | PredicateKind::Subtype(_) @@ -632,6 +633,12 @@ pub enum PredicateKind<'tcx> { /// A marker predicate that is always ambiguous. /// Used for coherence to mark opaque types as possibly equal to each other but ambiguous. Ambiguous, + + /// Separate from `Clause::Projection` which is used for normalization in new solver. + /// This predicate requires two terms to be equal to eachother. + /// + /// Only used for new solver + AliasEq(Term<'tcx>, Term<'tcx>), } /// The crate outlives map is computed during typeck and contains the @@ -1152,6 +1159,7 @@ impl<'tcx> Predicate<'tcx> { match predicate.skip_binder() { PredicateKind::Clause(Clause::Trait(t)) => Some(predicate.rebind(t)), PredicateKind::Clause(Clause::Projection(..)) + | PredicateKind::AliasEq(..) | PredicateKind::Subtype(..) | PredicateKind::Coerce(..) | PredicateKind::Clause(Clause::RegionOutlives(..)) @@ -1171,6 +1179,7 @@ impl<'tcx> Predicate<'tcx> { match predicate.skip_binder() { PredicateKind::Clause(Clause::Projection(t)) => Some(predicate.rebind(t)), PredicateKind::Clause(Clause::Trait(..)) + | PredicateKind::AliasEq(..) | PredicateKind::Subtype(..) | PredicateKind::Coerce(..) | PredicateKind::Clause(Clause::RegionOutlives(..)) @@ -1191,6 +1200,7 @@ impl<'tcx> Predicate<'tcx> { PredicateKind::Clause(Clause::TypeOutlives(data)) => Some(predicate.rebind(data)), PredicateKind::Clause(Clause::Trait(..)) | PredicateKind::Clause(Clause::Projection(..)) + | PredicateKind::AliasEq(..) | PredicateKind::Subtype(..) | PredicateKind::Coerce(..) | PredicateKind::Clause(Clause::RegionOutlives(..)) diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index bbb4fd999bc76..3850ac2a6bbf3 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -2841,6 +2841,7 @@ define_print_and_forward_display! { p!("the type `", print(ty), "` is found in the environment") } ty::PredicateKind::Ambiguous => p!("ambiguous"), + ty::PredicateKind::AliasEq(t1, t2) => p!(print(t1), " == ", print(t2)), } } diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 8df639750c701..1ef66b01ea0c6 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -177,6 +177,7 @@ impl<'tcx> fmt::Debug for ty::PredicateKind<'tcx> { write!(f, "TypeWellFormedFromEnv({:?})", ty) } ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"), + ty::PredicateKind::AliasEq(t1, t2) => write!(f, "AliasEq({t1:?}, {t2:?})"), } } } diff --git a/compiler/rustc_trait_selection/src/solve/fulfill.rs b/compiler/rustc_trait_selection/src/solve/fulfill.rs index c1936b7dbe41e..a55b984fd630d 100644 --- a/compiler/rustc_trait_selection/src/solve/fulfill.rs +++ b/compiler/rustc_trait_selection/src/solve/fulfill.rs @@ -73,6 +73,11 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentCtxt<'tcx> { MismatchedProjectionTypes { err: TypeError::Mismatch }, ) } + ty::PredicateKind::AliasEq(_, _) => { + FulfillmentErrorCode::CodeProjectionError( + MismatchedProjectionTypes { err: TypeError::Mismatch }, + ) + } ty::PredicateKind::Subtype(pred) => { let (a, b) = infcx.instantiate_binder_with_placeholders( goal.predicate.kind().rebind((pred.a, pred.b)), diff --git a/compiler/rustc_trait_selection/src/solve/mod.rs b/compiler/rustc_trait_selection/src/solve/mod.rs index 9f092b6018f48..edfe95b30592a 100644 --- a/compiler/rustc_trait_selection/src/solve/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/mod.rs @@ -302,6 +302,10 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { ty::PredicateKind::TypeWellFormedFromEnv(..) => { bug!("TypeWellFormedFromEnv is only used for Chalk") } + ty::PredicateKind::AliasEq(..) => { + // FIXME(deferred_projection_equality) + todo!() + } } } else { let kind = self.infcx.instantiate_binder_with_placeholders(kind); diff --git a/compiler/rustc_trait_selection/src/traits/auto_trait.rs b/compiler/rustc_trait_selection/src/traits/auto_trait.rs index 948632ccc6c40..6a840704e8637 100644 --- a/compiler/rustc_trait_selection/src/traits/auto_trait.rs +++ b/compiler/rustc_trait_selection/src/traits/auto_trait.rs @@ -823,14 +823,17 @@ impl<'tcx> AutoTraitFinder<'tcx> { _ => return false, } } + // There's not really much we can do with these predicates - // we start out with a `ParamEnv` with no inference variables, // and these don't correspond to adding any new bounds to // the `ParamEnv`. ty::PredicateKind::WellFormed(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::ClosureKind(..) | ty::PredicateKind::Subtype(..) + // FIXME(generic_const_exprs): you can absolutely add this as a where clauses | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::Coerce(..) | ty::PredicateKind::TypeWellFormedFromEnv(..) => {} diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs index cf1e05ada4713..4867855c2ae95 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs @@ -1278,6 +1278,11 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> { span, "TypeWellFormedFromEnv predicate should only exist in the environment" ), + + ty::PredicateKind::AliasEq(..) => span_bug!( + span, + "AliasEq predicate should never be the predicate cause of a SelectionError" + ), } } diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index 3adc1e62e0d48..19d47d33f671f 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -328,6 +328,9 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { ty::PredicateKind::TypeWellFormedFromEnv(..) => { bug!("TypeWellFormedFromEnv is only used for Chalk") } + ty::PredicateKind::AliasEq(..) => { + bug!("AliasEq is only used for new solver") + } }, Some(pred) => match pred { ty::PredicateKind::Clause(ty::Clause::Trait(data)) => { @@ -594,6 +597,9 @@ impl<'a, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'tcx> { ty::PredicateKind::TypeWellFormedFromEnv(..) => { bug!("TypeWellFormedFromEnv is only used for Chalk") } + ty::PredicateKind::AliasEq(..) => { + bug!("AliasEq is only used for new solver") + } }, } } diff --git a/compiler/rustc_trait_selection/src/traits/object_safety.rs b/compiler/rustc_trait_selection/src/traits/object_safety.rs index bafa2981a8739..977446894e770 100644 --- a/compiler/rustc_trait_selection/src/traits/object_safety.rs +++ b/compiler/rustc_trait_selection/src/traits/object_safety.rs @@ -327,6 +327,8 @@ fn predicate_references_self<'tcx>( // possible alternatives. if data.projection_ty.substs[1..].iter().any(has_self_ty) { Some(sp) } else { None } } + ty::PredicateKind::AliasEq(..) => bug!("`AliasEq` not allowed as assumption"), + ty::PredicateKind::WellFormed(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..)) @@ -334,6 +336,7 @@ fn predicate_references_self<'tcx>( | ty::PredicateKind::ClosureKind(..) | ty::PredicateKind::Subtype(..) | ty::PredicateKind::Coerce(..) + // FIXME(generic_const_exprs): this can mention `Self` | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) | ty::PredicateKind::Ambiguous @@ -368,6 +371,7 @@ fn generics_require_sized_self(tcx: TyCtxt<'_>, def_id: DefId) -> bool { | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..)) | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::Ambiguous | ty::PredicateKind::TypeWellFormedFromEnv(..) => false, } diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 984d6fde2686c..45c4811321a01 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -991,6 +991,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::PredicateKind::TypeWellFormedFromEnv(..) => { bug!("TypeWellFormedFromEnv is only used for chalk") } + ty::PredicateKind::AliasEq(..) => { + bug!("AliasEq is only used for new solver") + } ty::PredicateKind::Ambiguous => Ok(EvaluatedToAmbig), } }) diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs index 7c5e147a950f1..1136b70a0b91e 100644 --- a/compiler/rustc_trait_selection/src/traits/wf.rs +++ b/compiler/rustc_trait_selection/src/traits/wf.rs @@ -187,6 +187,9 @@ pub fn predicate_obligations<'tcx>( ty::PredicateKind::TypeWellFormedFromEnv(..) => { bug!("TypeWellFormedFromEnv is only used for Chalk") } + ty::PredicateKind::AliasEq(..) => { + bug!("We should only wf check where clauses and `AliasEq` is not a `Clause`") + } } wf.normalize(infcx) @@ -928,6 +931,7 @@ pub(crate) fn required_region_bounds<'tcx>( | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) | ty::PredicateKind::Ambiguous + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::TypeWellFormedFromEnv(..) => None, ty::PredicateKind::Clause(ty::Clause::TypeOutlives(ty::OutlivesPredicate( ref t, diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 9c5db3314c5cd..3c031b1b5f6a8 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -116,6 +116,7 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::InEnvironment LowerInto<'tcx, chalk_ir::GoalData>> for ty::Predi // We can defer this, but ultimately we'll want to express // some of these in terms of chalk operations. ty::PredicateKind::ClosureKind(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::Coerce(..) | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::Ambiguous @@ -642,6 +644,7 @@ impl<'tcx> LowerInto<'tcx, Option None, ty::PredicateKind::ObjectSafe(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::ClosureKind(..) | ty::PredicateKind::Subtype(..) | ty::PredicateKind::Coerce(..) @@ -775,6 +778,7 @@ impl<'tcx> LowerInto<'tcx, Option None, ty::PredicateKind::Clause(ty::Clause::RegionOutlives(..)) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::ClosureKind(..) | ty::PredicateKind::Subtype(..) diff --git a/compiler/rustc_traits/src/implied_outlives_bounds.rs b/compiler/rustc_traits/src/implied_outlives_bounds.rs index 2c6c77072e60e..93f9b66e0f855 100644 --- a/compiler/rustc_traits/src/implied_outlives_bounds.rs +++ b/compiler/rustc_traits/src/implied_outlives_bounds.rs @@ -85,7 +85,8 @@ fn compute_implied_outlives_bounds<'tcx>( // learn anything new from those. if obligation.predicate.has_non_region_infer() { match obligation.predicate.kind().skip_binder() { - ty::PredicateKind::Clause(ty::Clause::Projection(..)) => { + ty::PredicateKind::Clause(ty::Clause::Projection(..)) + | ty::PredicateKind::AliasEq(..) => { ocx.register_obligation(obligation.clone()); } _ => {} @@ -106,6 +107,7 @@ fn compute_implied_outlives_bounds<'tcx>( | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) | ty::PredicateKind::Ambiguous + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::TypeWellFormedFromEnv(..) => {} // We need to search through *all* WellFormed predicates diff --git a/compiler/rustc_traits/src/normalize_erasing_regions.rs b/compiler/rustc_traits/src/normalize_erasing_regions.rs index 5cad2c2ccb0f7..07e716cda42cc 100644 --- a/compiler/rustc_traits/src/normalize_erasing_regions.rs +++ b/compiler/rustc_traits/src/normalize_erasing_regions.rs @@ -60,6 +60,7 @@ fn not_outlives_predicate(p: ty::Predicate<'_>) -> bool { | ty::PredicateKind::Clause(ty::Clause::TypeOutlives(..)) => false, ty::PredicateKind::Clause(ty::Clause::Trait(..)) | ty::PredicateKind::Clause(ty::Clause::Projection(..)) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::WellFormed(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::ClosureKind(..) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 80493b100bb45..4acc9fb3d6257 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -310,10 +310,12 @@ pub(crate) fn clean_predicate<'tcx>( ty::PredicateKind::Clause(ty::Clause::Projection(pred)) => { Some(clean_projection_predicate(bound_predicate.rebind(pred), cx)) } + // FIXME(generic_const_exprs): should this do something? ty::PredicateKind::ConstEvaluatable(..) => None, ty::PredicateKind::WellFormed(..) => None, ty::PredicateKind::Subtype(..) + | ty::PredicateKind::AliasEq(..) | ty::PredicateKind::Coerce(..) | ty::PredicateKind::ObjectSafe(..) | ty::PredicateKind::ClosureKind(..) diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index 727058780752e..26b1d01974990 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -36,6 +36,7 @@ pub fn is_min_const_fn<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, msrv: &Msrv) | ty::PredicateKind::ConstEvaluatable(..) | ty::PredicateKind::ConstEquate(..) | ty::PredicateKind::TypeWellFormedFromEnv(..) => continue, + ty::PredicateKind::AliasEq(..) => panic!("alias eq predicate on function: {predicate:#?}"), ty::PredicateKind::ObjectSafe(_) => panic!("object safe predicate on function: {predicate:#?}"), ty::PredicateKind::ClosureKind(..) => panic!("closure kind predicate on function: {predicate:#?}"), ty::PredicateKind::Subtype(_) => panic!("subtype predicate on function: {predicate:#?}"), From a03da2bdeb7c7b466b7b4a9b361d1d19c171a589 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Thu, 9 Feb 2023 20:45:01 +0100 Subject: [PATCH 385/501] Add test for implementation on projection --- .../ui/const-generics/wrong-normalization.rs | 19 +++++++++++++++++++ .../const-generics/wrong-normalization.stderr | 11 +++++++++++ 2 files changed, 30 insertions(+) create mode 100644 tests/ui/const-generics/wrong-normalization.rs create mode 100644 tests/ui/const-generics/wrong-normalization.stderr diff --git a/tests/ui/const-generics/wrong-normalization.rs b/tests/ui/const-generics/wrong-normalization.rs new file mode 100644 index 0000000000000..f1ce317b3f78b --- /dev/null +++ b/tests/ui/const-generics/wrong-normalization.rs @@ -0,0 +1,19 @@ +// This test ensures that if implementation on projections is supported, +// it doesn't end in very weird cycle error. + +#![crate_type = "lib"] + +pub trait Identity { + type Identity: ?Sized; +} + +impl Identity for T { + type Identity = Self; +} + +pub struct I8; + +impl as Identity>::Identity { +//~^ ERROR no nominal type found for inherent implementation + pub fn foo(&self) {} +} diff --git a/tests/ui/const-generics/wrong-normalization.stderr b/tests/ui/const-generics/wrong-normalization.stderr new file mode 100644 index 0000000000000..fb806bdb1e747 --- /dev/null +++ b/tests/ui/const-generics/wrong-normalization.stderr @@ -0,0 +1,11 @@ +error[E0118]: no nominal type found for inherent implementation + --> $DIR/wrong-normalization.rs:16:6 + | +LL | impl as Identity>::Identity { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ impl requires a nominal type + | + = note: either implement a trait on it or create a newtype to wrap it instead + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0118`. From 8f5deb4ff2e38896599866b1a33e1fffb0b7304a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 10 Feb 2023 14:34:58 +0100 Subject: [PATCH 386/501] Remove a few allocations in hir-ty::utils --- crates/hir-ty/src/display.rs | 2 +- crates/hir-ty/src/lower.rs | 6 +- crates/hir-ty/src/utils.rs | 181 ++++++++++++++++++----------------- 3 files changed, 96 insertions(+), 93 deletions(-) diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 462c9b4575950..5fcbdf34f3cbb 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -927,7 +927,7 @@ impl HirDisplay for CallableSig { } } -fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator { +fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator + '_ { let krate = trait_.lookup(db).container.krate(); utils::fn_traits(db, krate) } diff --git a/crates/hir-ty/src/lower.rs b/crates/hir-ty/src/lower.rs index 86abe1af68a60..7cce13a793e02 100644 --- a/crates/hir-ty/src/lower.rs +++ b/crates/hir-ty/src/lower.rs @@ -1235,7 +1235,7 @@ fn named_associated_type_shorthand_candidates( mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option, ) -> Option { let mut search = |t| { - for t in all_super_trait_refs(db, t) { + all_super_trait_refs(db, t, |t| { let data = db.trait_data(t.hir_trait_id()); for (name, assoc_id) in &data.items { @@ -1245,8 +1245,8 @@ fn named_associated_type_shorthand_candidates( } } } - } - None + None + }) }; match res { diff --git a/crates/hir-ty/src/utils.rs b/crates/hir-ty/src/utils.rs index 396cba89b67d7..70d2d5efa6cde 100644 --- a/crates/hir-ty/src/utils.rs +++ b/crates/hir-ty/src/utils.rs @@ -27,23 +27,84 @@ use crate::{ db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, WhereClause, }; -pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator { - [ - db.lang_item(krate, LangItem::Fn), - db.lang_item(krate, LangItem::FnMut), - db.lang_item(krate, LangItem::FnOnce), - ] - .into_iter() - .flatten() - .flat_map(|it| it.as_trait()) +pub(crate) fn fn_traits( + db: &dyn DefDatabase, + krate: CrateId, +) -> impl Iterator + '_ { + [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce] + .into_iter() + .filter_map(move |lang| db.lang_item(krate, lang)) + .flat_map(|it| it.as_trait()) } -fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> { +/// Returns an iterator over the whole super trait hierarchy (including the +/// trait itself). +pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> { + // we need to take care a bit here to avoid infinite loops in case of cycles + // (i.e. if we have `trait A: B; trait B: A;`) + + let mut result = smallvec![trait_]; + let mut i = 0; + while let Some(&t) = result.get(i) { + // yeah this is quadratic, but trait hierarchies should be flat + // enough that this doesn't matter + direct_super_traits(db, t, |tt| { + if !result.contains(&tt) { + result.push(tt); + } + }); + i += 1; + } + result +} + +/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for +/// super traits. The original trait ref will be included. So the difference to +/// `all_super_traits` is that we keep track of type parameters; for example if +/// we have `Self: Trait` and `Trait: OtherTrait` we'll get +/// `Self: OtherTrait`. +pub(super) fn all_super_trait_refs( + db: &dyn HirDatabase, + trait_ref: TraitRef, + cb: impl FnMut(TraitRef) -> Option, +) -> Option { + let seen = iter::once(trait_ref.trait_id).collect(); + let mut stack = Vec::new(); + stack.push(trait_ref); + SuperTraits { db, seen, stack }.find_map(cb) +} + +struct SuperTraits<'a> { + db: &'a dyn HirDatabase, + stack: Vec, + seen: FxHashSet, +} + +impl<'a> SuperTraits<'a> { + fn elaborate(&mut self, trait_ref: &TraitRef) { + direct_super_trait_refs(self.db, trait_ref, |trait_ref| { + if !self.seen.contains(&trait_ref.trait_id) { + self.stack.push(trait_ref); + } + }); + } +} + +impl<'a> Iterator for SuperTraits<'a> { + type Item = TraitRef; + + fn next(&mut self) -> Option { + if let Some(next) = self.stack.pop() { + self.elaborate(&next); + Some(next) + } else { + None + } + } +} + +fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) { let resolver = trait_.resolver(db); - // returning the iterator directly doesn't easily work because of - // lifetime problems, but since there usually shouldn't be more than a - // few direct traits this should be fine (we could even use some kind of - // SmallVec if performance is a concern) let generic_params = db.generic_params(trait_.into()); let trait_self = generic_params.find_trait_self_param(); generic_params @@ -73,18 +134,14 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[Trait Some(TypeNs::TraitId(t)) => Some(t), _ => None, }) - .collect() + .for_each(cb); } -fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec { - // returning the iterator directly doesn't easily work because of - // lifetime problems, but since there usually shouldn't be more than a - // few direct traits this should be fine (we could even use some kind of - // SmallVec if performance is a concern) +fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) { let generic_params = db.generic_params(trait_ref.hir_trait_id().into()); let trait_self = match generic_params.find_trait_self_param() { Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p }, - None => return Vec::new(), + None => return, }; db.generic_predicates_for_param(trait_self.parent, trait_self, None) .iter() @@ -100,64 +157,7 @@ fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec SmallVec<[TraitId; 4]> { - // we need to take care a bit here to avoid infinite loops in case of cycles - // (i.e. if we have `trait A: B; trait B: A;`) - - let mut result = smallvec![trait_]; - let mut i = 0; - while let Some(&t) = result.get(i) { - // yeah this is quadratic, but trait hierarchies should be flat - // enough that this doesn't matter - for tt in direct_super_traits(db, t) { - if !result.contains(&tt) { - result.push(tt); - } - } - i += 1; - } - result -} - -/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for -/// super traits. The original trait ref will be included. So the difference to -/// `all_super_traits` is that we keep track of type parameters; for example if -/// we have `Self: Trait` and `Trait: OtherTrait` we'll get -/// `Self: OtherTrait`. -pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits<'_> { - SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] } -} - -pub(super) struct SuperTraits<'a> { - db: &'a dyn HirDatabase, - stack: Vec, - seen: FxHashSet, -} - -impl<'a> SuperTraits<'a> { - fn elaborate(&mut self, trait_ref: &TraitRef) { - let mut trait_refs = direct_super_trait_refs(self.db, trait_ref); - trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id)); - self.stack.extend(trait_refs); - } -} - -impl<'a> Iterator for SuperTraits<'a> { - type Item = TraitRef; - - fn next(&mut self) -> Option { - if let Some(next) = self.stack.pop() { - self.elaborate(&next); - Some(next) - } else { - None - } - } + .for_each(cb); } pub(super) fn associated_type_by_name_including_super_traits( @@ -165,7 +165,7 @@ pub(super) fn associated_type_by_name_including_super_traits( trait_ref: TraitRef, name: &Name, ) -> Option<(TraitRef, TypeAliasId)> { - all_super_trait_refs(db, trait_ref).find_map(|t| { + all_super_trait_refs(db, trait_ref, |t| { let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?; Some((t, assoc_type)) }) @@ -238,15 +238,18 @@ impl Generics { /// (parent total, self param, type param list, const param list, impl trait) pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) { - let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param()); - - let self_params = - ty_iter().filter(|p| p.provenance == TypeParamProvenance::TraitSelf).count(); - let type_params = - ty_iter().filter(|p| p.provenance == TypeParamProvenance::TypeParamList).count(); - let impl_trait_params = - ty_iter().filter(|p| p.provenance == TypeParamProvenance::ArgumentImplTrait).count(); - let const_params = self.params.iter().filter_map(|x| x.1.const_param()).count(); + let mut self_params = 0; + let mut type_params = 0; + let mut impl_trait_params = 0; + let mut const_params = 0; + self.params.iter().for_each(|(_, data)| match data { + TypeOrConstParamData::TypeParamData(p) => match p.provenance { + TypeParamProvenance::TypeParamList => type_params += 1, + TypeParamProvenance::TraitSelf => self_params += 1, + TypeParamProvenance::ArgumentImplTrait => impl_trait_params += 1, + }, + TypeOrConstParamData::ConstParamData(_) => const_params += 1, + }); let parent_len = self.parent_generics().map_or(0, Generics::len); (parent_len, self_params, type_params, const_params, impl_trait_params) From 623ed8e481204b2ed3876084f765ac4fb59798b5 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 10 Feb 2023 15:07:55 +0100 Subject: [PATCH 387/501] fix rustc_log doctest --- compiler/rustc_log/src/lib.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs index e3d850e6a3b02..de26fd61e4d60 100644 --- a/compiler/rustc_log/src/lib.rs +++ b/compiler/rustc_log/src/lib.rs @@ -14,7 +14,7 @@ //! //! ``` //! fn main() { -//! rustc_log::init_rustc_env_logger().unwrap(); +//! rustc_log::init_env_logger("LOG").unwrap(); //! //! let edition = rustc_span::edition::Edition::Edition2021; //! rustc_span::create_session_globals_then(edition, || { @@ -23,9 +23,9 @@ //! } //! ``` //! -//! Now `RUSTC_LOG=debug cargo run` will run your minimal main.rs and show +//! Now `LOG=debug cargo run` will run your minimal main.rs and show //! rustc's debug logging. In a workflow like this, one might also add -//! `std::env::set_var("RUSTC_LOG", "debug")` to the top of main so that `cargo +//! `std::env::set_var("LOG", "debug")` to the top of main so that `cargo //! run` by itself is sufficient to get logs. //! //! The reason rustc_log is a tiny separate crate, as opposed to exposing the From 3a72238aa6c78d071f4b10d60c76550ccea6493f Mon Sep 17 00:00:00 2001 From: lcnr Date: Fri, 10 Feb 2023 14:58:49 +0100 Subject: [PATCH 388/501] revert #107074 --- compiler/rustc_const_eval/src/transform/validate.rs | 11 ++++++++++- tests/ui/impl-trait/nested-return-type2.rs | 3 +++ tests/ui/impl-trait/nested-return-type2.stderr | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_const_eval/src/transform/validate.rs b/compiler/rustc_const_eval/src/transform/validate.rs index 76b316cdf0c3f..56c60d59d2858 100644 --- a/compiler/rustc_const_eval/src/transform/validate.rs +++ b/compiler/rustc_const_eval/src/transform/validate.rs @@ -13,7 +13,7 @@ use rustc_middle::mir::{ RetagKind, RuntimePhase, Rvalue, SourceScope, Statement, StatementKind, Terminator, TerminatorKind, UnOp, START_BLOCK, }; -use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt}; +use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeVisitable}; use rustc_mir_dataflow::impls::MaybeStorageLive; use rustc_mir_dataflow::storage::always_storage_live_locals; use rustc_mir_dataflow::{Analysis, ResultsCursor}; @@ -231,6 +231,15 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { return true; } + // We sometimes have to use `defining_opaque_types` for subtyping + // to succeed here and figuring out how exactly that should work + // is annoying. It is harmless enough to just not validate anything + // in that case. We still check this after analysis as all opque + // types have been revealed at this point. + if (src, dest).has_opaque_types() { + return true; + } + crate::util::is_subtype(self.tcx, self.param_env, src, dest) } } diff --git a/tests/ui/impl-trait/nested-return-type2.rs b/tests/ui/impl-trait/nested-return-type2.rs index cc1f1f4ec44c8..fe883ce6fc8ed 100644 --- a/tests/ui/impl-trait/nested-return-type2.rs +++ b/tests/ui/impl-trait/nested-return-type2.rs @@ -1,4 +1,7 @@ // check-pass +// compile-flags: -Zvalidate-mir + +// Using -Zvalidate-mir as a regression test for #107346. trait Duh {} diff --git a/tests/ui/impl-trait/nested-return-type2.stderr b/tests/ui/impl-trait/nested-return-type2.stderr index 3aed05ca13298..09ad3bd05c1b3 100644 --- a/tests/ui/impl-trait/nested-return-type2.stderr +++ b/tests/ui/impl-trait/nested-return-type2.stderr @@ -1,5 +1,5 @@ warning: opaque type `impl Trait` does not satisfy its associated type bounds - --> $DIR/nested-return-type2.rs:25:24 + --> $DIR/nested-return-type2.rs:28:24 | LL | type Assoc: Duh; | --- this associated type bound is unsatisfied for `impl Send` From 1f89e2aef272e1222adc4bf95de2baf802c92a2d Mon Sep 17 00:00:00 2001 From: Boxy Date: Fri, 10 Feb 2023 14:29:28 +0000 Subject: [PATCH 389/501] emit `AliasEq` when relating type and const aliases --- .../src/type_check/relate_tys.rs | 6 +- .../src/infer/canonical/query_response.rs | 6 +- compiler/rustc_infer/src/infer/combine.rs | 87 ++++++++++++------ compiler/rustc_infer/src/infer/equate.rs | 17 +++- compiler/rustc_infer/src/infer/glb.rs | 22 ++--- compiler/rustc_infer/src/infer/lattice.rs | 11 ++- compiler/rustc_infer/src/infer/lub.rs | 28 +++--- compiler/rustc_infer/src/infer/mod.rs | 1 + .../rustc_infer/src/infer/nll_relate/mod.rs | 89 +++++-------------- compiler/rustc_infer/src/infer/sub.rs | 18 ++-- compiler/rustc_middle/src/ty/context.rs | 4 + .../traits/error_reporting/method_chain.rs | 15 ++++ 12 files changed, 157 insertions(+), 147 deletions(-) diff --git a/compiler/rustc_borrowck/src/type_check/relate_tys.rs b/compiler/rustc_borrowck/src/type_check/relate_tys.rs index b2702eafd33bd..8dd06187877c8 100644 --- a/compiler/rustc_borrowck/src/type_check/relate_tys.rs +++ b/compiler/rustc_borrowck/src/type_check/relate_tys.rs @@ -1,4 +1,4 @@ -use rustc_infer::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate}; +use rustc_infer::infer::nll_relate::{TypeRelating, TypeRelatingDelegate}; use rustc_infer::infer::NllRegionVariableOrigin; use rustc_infer::traits::PredicateObligations; use rustc_middle::mir::ConstraintCategory; @@ -140,10 +140,6 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> ); } - fn normalization() -> NormalizationStrategy { - NormalizationStrategy::Eager - } - fn forbid_inference_vars() -> bool { true } diff --git a/compiler/rustc_infer/src/infer/canonical/query_response.rs b/compiler/rustc_infer/src/infer/canonical/query_response.rs index 7cc9e49b1b62a..0c97217bd6a5d 100644 --- a/compiler/rustc_infer/src/infer/canonical/query_response.rs +++ b/compiler/rustc_infer/src/infer/canonical/query_response.rs @@ -12,7 +12,7 @@ use crate::infer::canonical::{ Canonical, CanonicalQueryResponse, CanonicalVarValues, Certainty, OriginalQueryValues, QueryOutlivesConstraint, QueryRegionConstraints, QueryResponse, }; -use crate::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate}; +use crate::infer::nll_relate::{TypeRelating, TypeRelatingDelegate}; use crate::infer::region_constraints::{Constraint, RegionConstraintData}; use crate::infer::{InferCtxt, InferOk, InferResult, NllRegionVariableOrigin}; use crate::traits::query::{Fallible, NoSolution}; @@ -717,10 +717,6 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for QueryTypeRelatingDelegate<'_, 'tcx> { }); } - fn normalization() -> NormalizationStrategy { - NormalizationStrategy::Eager - } - fn forbid_inference_vars() -> bool { true } diff --git a/compiler/rustc_infer/src/infer/combine.rs b/compiler/rustc_infer/src/infer/combine.rs index a567b6acdbeeb..cb1e14aaeaf9f 100644 --- a/compiler/rustc_infer/src/infer/combine.rs +++ b/compiler/rustc_infer/src/infer/combine.rs @@ -38,8 +38,8 @@ use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation}; use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::{ - self, FallibleTypeFolder, InferConst, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable, - TypeVisitable, + self, AliasKind, FallibleTypeFolder, InferConst, ToPredicate, Ty, TyCtxt, TypeFoldable, + TypeSuperFoldable, TypeVisitable, }; use rustc_middle::ty::{IntType, UintType}; use rustc_span::{Span, DUMMY_SP}; @@ -74,7 +74,7 @@ impl<'tcx> InferCtxt<'tcx> { b: Ty<'tcx>, ) -> RelateResult<'tcx, Ty<'tcx>> where - R: TypeRelation<'tcx>, + R: ObligationEmittingRelation<'tcx>, { let a_is_expected = relation.a_is_expected(); @@ -122,6 +122,15 @@ impl<'tcx> InferCtxt<'tcx> { Err(TypeError::Sorts(ty::relate::expected_found(relation, a, b))) } + (ty::Alias(AliasKind::Projection, _), _) if self.tcx.trait_solver_next() => { + relation.register_type_equate_obligation(a.into(), b.into()); + Ok(b) + } + (_, ty::Alias(AliasKind::Projection, _)) if self.tcx.trait_solver_next() => { + relation.register_type_equate_obligation(b.into(), a.into()); + Ok(a) + } + _ => ty::relate::super_relate_tys(relation, a, b), } } @@ -133,7 +142,7 @@ impl<'tcx> InferCtxt<'tcx> { b: ty::Const<'tcx>, ) -> RelateResult<'tcx, ty::Const<'tcx>> where - R: ConstEquateRelation<'tcx>, + R: ObligationEmittingRelation<'tcx>, { debug!("{}.consts({:?}, {:?})", relation.tag(), a, b); if a == b { @@ -169,7 +178,7 @@ impl<'tcx> InferCtxt<'tcx> { // FIXME(#59490): Need to remove the leak check to accommodate // escaping bound variables here. if !a.has_escaping_bound_vars() && !b.has_escaping_bound_vars() { - relation.const_equate_obligation(a, b); + relation.register_const_equate_obligation(a, b); } return Ok(b); } @@ -177,7 +186,7 @@ impl<'tcx> InferCtxt<'tcx> { // FIXME(#59490): Need to remove the leak check to accommodate // escaping bound variables here. if !a.has_escaping_bound_vars() && !b.has_escaping_bound_vars() { - relation.const_equate_obligation(a, b); + relation.register_const_equate_obligation(a, b); } return Ok(a); } @@ -435,32 +444,21 @@ impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> { Ok(Generalization { ty, needs_wf }) } - pub fn add_const_equate_obligation( + pub fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { + self.obligations.extend(obligations.into_iter()); + } + + pub fn register_predicates( &mut self, - a_is_expected: bool, - a: ty::Const<'tcx>, - b: ty::Const<'tcx>, + obligations: impl IntoIterator>, ) { - let predicate = if a_is_expected { - ty::PredicateKind::ConstEquate(a, b) - } else { - ty::PredicateKind::ConstEquate(b, a) - }; - self.obligations.push(Obligation::new( - self.tcx(), - self.trace.cause.clone(), - self.param_env, - ty::Binder::dummy(predicate), - )); + self.obligations.extend(obligations.into_iter().map(|to_pred| { + Obligation::new(self.infcx.tcx, self.trace.cause.clone(), self.param_env, to_pred) + })) } pub fn mark_ambiguous(&mut self) { - self.obligations.push(Obligation::new( - self.tcx(), - self.trace.cause.clone(), - self.param_env, - ty::Binder::dummy(ty::PredicateKind::Ambiguous), - )); + self.register_predicates([ty::Binder::dummy(ty::PredicateKind::Ambiguous)]); } } @@ -775,11 +773,42 @@ impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> { } } -pub trait ConstEquateRelation<'tcx>: TypeRelation<'tcx> { +pub trait ObligationEmittingRelation<'tcx>: TypeRelation<'tcx> { + /// Register obligations that must hold in order for this relation to hold + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>); + + /// Register predicates that must hold in order for this relation to hold. Uses + /// a default obligation cause, [`ObligationEmittingRelation::register_obligations`] should + /// be used if control over the obligaton causes is required. + fn register_predicates( + &mut self, + obligations: impl IntoIterator>, + ); + /// Register an obligation that both constants must be equal to each other. /// /// If they aren't equal then the relation doesn't hold. - fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>); + fn register_const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) { + let (a, b) = if self.a_is_expected() { (a, b) } else { (b, a) }; + + self.register_predicates([ty::Binder::dummy(if self.tcx().trait_solver_next() { + ty::PredicateKind::AliasEq(a.into(), b.into()) + } else { + ty::PredicateKind::ConstEquate(a, b) + })]); + } + + /// Register an obligation that both types must be equal to each other. + /// + /// If they aren't equal then the relation doesn't hold. + fn register_type_equate_obligation(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) { + let (a, b) = if self.a_is_expected() { (a, b) } else { (b, a) }; + + self.register_predicates([ty::Binder::dummy(ty::PredicateKind::AliasEq( + a.into(), + b.into(), + ))]); + } } fn int_unification_error<'tcx>( diff --git a/compiler/rustc_infer/src/infer/equate.rs b/compiler/rustc_infer/src/infer/equate.rs index 7db4d92a177a1..742c01efff603 100644 --- a/compiler/rustc_infer/src/infer/equate.rs +++ b/compiler/rustc_infer/src/infer/equate.rs @@ -1,4 +1,6 @@ -use super::combine::{CombineFields, ConstEquateRelation, RelationDir}; +use crate::traits::PredicateObligations; + +use super::combine::{CombineFields, ObligationEmittingRelation, RelationDir}; use super::Subtype; use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation}; @@ -198,8 +200,15 @@ impl<'tcx> TypeRelation<'tcx> for Equate<'_, '_, 'tcx> { } } -impl<'tcx> ConstEquateRelation<'tcx> for Equate<'_, '_, 'tcx> { - fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) { - self.fields.add_const_equate_obligation(self.a_is_expected, a, b); +impl<'tcx> ObligationEmittingRelation<'tcx> for Equate<'_, '_, 'tcx> { + fn register_predicates( + &mut self, + obligations: impl IntoIterator>, + ) { + self.fields.register_predicates(obligations); + } + + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { + self.fields.register_obligations(obligations); } } diff --git a/compiler/rustc_infer/src/infer/glb.rs b/compiler/rustc_infer/src/infer/glb.rs index b92b162a9786a..74abca7bbea36 100644 --- a/compiler/rustc_infer/src/infer/glb.rs +++ b/compiler/rustc_infer/src/infer/glb.rs @@ -1,12 +1,11 @@ //! Greatest lower bound. See [`lattice`]. -use super::combine::CombineFields; +use super::combine::{CombineFields, ObligationEmittingRelation}; use super::lattice::{self, LatticeDir}; use super::InferCtxt; use super::Subtype; -use crate::infer::combine::ConstEquateRelation; -use crate::traits::{ObligationCause, PredicateObligation}; +use crate::traits::{ObligationCause, PredicateObligations}; use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation}; use rustc_middle::ty::{self, Ty, TyCtxt}; @@ -136,10 +135,6 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Glb<'combine, 'infcx, &self.fields.trace.cause } - fn add_obligations(&mut self, obligations: Vec>) { - self.fields.obligations.extend(obligations) - } - fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()> { let mut sub = self.fields.sub(self.a_is_expected); sub.relate(v, a)?; @@ -152,8 +147,15 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Glb<'combine, 'infcx, } } -impl<'tcx> ConstEquateRelation<'tcx> for Glb<'_, '_, 'tcx> { - fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) { - self.fields.add_const_equate_obligation(self.a_is_expected, a, b); +impl<'tcx> ObligationEmittingRelation<'tcx> for Glb<'_, '_, 'tcx> { + fn register_predicates( + &mut self, + obligations: impl IntoIterator>, + ) { + self.fields.register_predicates(obligations); + } + + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { + self.fields.register_obligations(obligations); } } diff --git a/compiler/rustc_infer/src/infer/lattice.rs b/compiler/rustc_infer/src/infer/lattice.rs index 4dbb4b4d7b4da..f377ac1d19e9c 100644 --- a/compiler/rustc_infer/src/infer/lattice.rs +++ b/compiler/rustc_infer/src/infer/lattice.rs @@ -17,11 +17,12 @@ //! //! [lattices]: https://en.wikipedia.org/wiki/Lattice_(order) +use super::combine::ObligationEmittingRelation; use super::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use super::InferCtxt; -use crate::traits::{ObligationCause, PredicateObligation}; -use rustc_middle::ty::relate::{RelateResult, TypeRelation}; +use crate::traits::ObligationCause; +use rustc_middle::ty::relate::RelateResult; use rustc_middle::ty::TyVar; use rustc_middle::ty::{self, Ty}; @@ -30,13 +31,11 @@ use rustc_middle::ty::{self, Ty}; /// /// GLB moves "down" the lattice (to smaller values); LUB moves /// "up" the lattice (to bigger values). -pub trait LatticeDir<'f, 'tcx>: TypeRelation<'tcx> { +pub trait LatticeDir<'f, 'tcx>: ObligationEmittingRelation<'tcx> { fn infcx(&self) -> &'f InferCtxt<'tcx>; fn cause(&self) -> &ObligationCause<'tcx>; - fn add_obligations(&mut self, obligations: Vec>); - fn define_opaque_types(&self) -> bool; // Relates the type `v` to `a` and `b` such that `v` represents @@ -113,7 +112,7 @@ where | (_, &ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. })) if this.define_opaque_types() && def_id.is_local() => { - this.add_obligations( + this.register_obligations( infcx .handle_opaque_type(a, b, this.a_is_expected(), this.cause(), this.param_env())? .obligations, diff --git a/compiler/rustc_infer/src/infer/lub.rs b/compiler/rustc_infer/src/infer/lub.rs index f6e0554fd1f95..f997171b97f27 100644 --- a/compiler/rustc_infer/src/infer/lub.rs +++ b/compiler/rustc_infer/src/infer/lub.rs @@ -1,12 +1,11 @@ //! Least upper bound. See [`lattice`]. -use super::combine::CombineFields; +use super::combine::{CombineFields, ObligationEmittingRelation}; use super::lattice::{self, LatticeDir}; use super::InferCtxt; use super::Subtype; -use crate::infer::combine::ConstEquateRelation; -use crate::traits::{ObligationCause, PredicateObligation}; +use crate::traits::{ObligationCause, PredicateObligations}; use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation}; use rustc_middle::ty::{self, Ty, TyCtxt}; @@ -127,12 +126,6 @@ impl<'tcx> TypeRelation<'tcx> for Lub<'_, '_, 'tcx> { } } -impl<'tcx> ConstEquateRelation<'tcx> for Lub<'_, '_, 'tcx> { - fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) { - self.fields.add_const_equate_obligation(self.a_is_expected, a, b); - } -} - impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Lub<'combine, 'infcx, 'tcx> { fn infcx(&self) -> &'infcx InferCtxt<'tcx> { self.fields.infcx @@ -142,10 +135,6 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Lub<'combine, 'infcx, &self.fields.trace.cause } - fn add_obligations(&mut self, obligations: Vec>) { - self.fields.obligations.extend(obligations) - } - fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()> { let mut sub = self.fields.sub(self.a_is_expected); sub.relate(a, v)?; @@ -157,3 +146,16 @@ impl<'combine, 'infcx, 'tcx> LatticeDir<'infcx, 'tcx> for Lub<'combine, 'infcx, self.fields.define_opaque_types } } + +impl<'tcx> ObligationEmittingRelation<'tcx> for Lub<'_, '_, 'tcx> { + fn register_predicates( + &mut self, + obligations: impl IntoIterator>, + ) { + self.fields.register_predicates(obligations); + } + + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { + self.fields.register_obligations(obligations) + } +} diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 35918b8bae1c2..e77924900a071 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -4,6 +4,7 @@ pub use self::LateBoundRegionConversionTime::*; pub use self::RegionVariableOrigin::*; pub use self::SubregionOrigin::*; pub use self::ValuePairs::*; +pub use combine::ObligationEmittingRelation; use self::opaque_types::OpaqueTypeStorage; pub(crate) use self::undo_log::{InferCtxtUndoLogs, Snapshot, UndoLog}; diff --git a/compiler/rustc_infer/src/infer/nll_relate/mod.rs b/compiler/rustc_infer/src/infer/nll_relate/mod.rs index a2cfe8d88816c..1dd5062acaf05 100644 --- a/compiler/rustc_infer/src/infer/nll_relate/mod.rs +++ b/compiler/rustc_infer/src/infer/nll_relate/mod.rs @@ -21,11 +21,10 @@ //! thing we relate in chalk are basically domain goals and their //! constituents) -use crate::infer::combine::ConstEquateRelation; use crate::infer::InferCtxt; use crate::infer::{ConstVarValue, ConstVariableValue}; use crate::infer::{TypeVariableOrigin, TypeVariableOriginKind}; -use crate::traits::{Obligation, PredicateObligation}; +use crate::traits::{Obligation, PredicateObligations}; use rustc_data_structures::fx::FxHashMap; use rustc_middle::traits::ObligationCause; use rustc_middle::ty::error::TypeError; @@ -36,11 +35,7 @@ use rustc_span::Span; use std::fmt::Debug; use std::ops::ControlFlow; -#[derive(PartialEq)] -pub enum NormalizationStrategy { - Lazy, - Eager, -} +use super::combine::ObligationEmittingRelation; pub struct TypeRelating<'me, 'tcx, D> where @@ -92,7 +87,7 @@ pub trait TypeRelatingDelegate<'tcx> { info: ty::VarianceDiagInfo<'tcx>, ); - fn register_obligations(&mut self, obligations: Vec>); + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>); /// Creates a new universe index. Used when instantiating placeholders. fn create_next_universe(&mut self) -> ty::UniverseIndex; @@ -125,9 +120,6 @@ pub trait TypeRelatingDelegate<'tcx> { /// relation stating that `'?0: 'a`). fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx>; - /// Define the normalization strategy to use, eager or lazy. - fn normalization() -> NormalizationStrategy; - /// Enables some optimizations if we do not expect inference variables /// in the RHS of the relation. fn forbid_inference_vars() -> bool; @@ -265,38 +257,6 @@ where self.delegate.push_outlives(sup, sub, info); } - /// Relate a projection type and some value type lazily. This will always - /// succeed, but we push an additional `ProjectionEq` goal depending - /// on the value type: - /// - if the value type is any type `T` which is not a projection, we push - /// `ProjectionEq(projection = T)`. - /// - if the value type is another projection `other_projection`, we create - /// a new inference variable `?U` and push the two goals - /// `ProjectionEq(projection = ?U)`, `ProjectionEq(other_projection = ?U)`. - fn relate_projection_ty( - &mut self, - projection_ty: ty::AliasTy<'tcx>, - value_ty: Ty<'tcx>, - ) -> Ty<'tcx> { - use rustc_span::DUMMY_SP; - - match *value_ty.kind() { - ty::Alias(ty::Projection, other_projection_ty) => { - let var = self.infcx.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::MiscVariable, - span: DUMMY_SP, - }); - // FIXME(lazy-normalization): This will always ICE, because the recursive - // call will end up in the _ arm below. - self.relate_projection_ty(projection_ty, var); - self.relate_projection_ty(other_projection_ty, var); - var - } - - _ => bug!("should never be invoked with eager normalization"), - } - } - /// Relate a type inference variable with a value type. This works /// by creating a "generalization" G of the value where all the /// lifetimes are replaced with fresh inference values. This @@ -335,12 +295,6 @@ where return Ok(value_ty); } - ty::Alias(ty::Projection, projection_ty) - if D::normalization() == NormalizationStrategy::Lazy => - { - return Ok(self.relate_projection_ty(projection_ty, self.infcx.tcx.mk_ty_var(vid))); - } - _ => (), } @@ -627,18 +581,6 @@ where self.relate_opaques(a, b) } - (&ty::Alias(ty::Projection, projection_ty), _) - if D::normalization() == NormalizationStrategy::Lazy => - { - Ok(self.relate_projection_ty(projection_ty, b)) - } - - (_, &ty::Alias(ty::Projection, projection_ty)) - if D::normalization() == NormalizationStrategy::Lazy => - { - Ok(self.relate_projection_ty(projection_ty, a)) - } - _ => { debug!(?a, ?b, ?self.ambient_variance); @@ -813,17 +755,26 @@ where } } -impl<'tcx, D> ConstEquateRelation<'tcx> for TypeRelating<'_, 'tcx, D> +impl<'tcx, D> ObligationEmittingRelation<'tcx> for TypeRelating<'_, 'tcx, D> where D: TypeRelatingDelegate<'tcx>, { - fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) { - self.delegate.register_obligations(vec![Obligation::new( - self.tcx(), - ObligationCause::dummy(), - self.param_env(), - ty::Binder::dummy(ty::PredicateKind::ConstEquate(a, b)), - )]); + fn register_predicates( + &mut self, + obligations: impl IntoIterator>, + ) { + self.delegate.register_obligations( + obligations + .into_iter() + .map(|to_pred| { + Obligation::new(self.tcx(), ObligationCause::dummy(), self.param_env(), to_pred) + }) + .collect(), + ); + } + + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { + self.delegate.register_obligations(obligations); } } diff --git a/compiler/rustc_infer/src/infer/sub.rs b/compiler/rustc_infer/src/infer/sub.rs index 532fbd0ffe4c4..bf1b34415470c 100644 --- a/compiler/rustc_infer/src/infer/sub.rs +++ b/compiler/rustc_infer/src/infer/sub.rs @@ -1,8 +1,7 @@ use super::combine::{CombineFields, RelationDir}; -use super::SubregionOrigin; +use super::{ObligationEmittingRelation, SubregionOrigin}; -use crate::infer::combine::ConstEquateRelation; -use crate::traits::Obligation; +use crate::traits::{Obligation, PredicateObligations}; use rustc_middle::ty::relate::{Cause, Relate, RelateResult, TypeRelation}; use rustc_middle::ty::visit::TypeVisitable; use rustc_middle::ty::TyVar; @@ -228,8 +227,15 @@ impl<'tcx> TypeRelation<'tcx> for Sub<'_, '_, 'tcx> { } } -impl<'tcx> ConstEquateRelation<'tcx> for Sub<'_, '_, 'tcx> { - fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>) { - self.fields.add_const_equate_obligation(self.a_is_expected, a, b); +impl<'tcx> ObligationEmittingRelation<'tcx> for Sub<'_, '_, 'tcx> { + fn register_predicates( + &mut self, + obligations: impl IntoIterator>, + ) { + self.fields.register_predicates(obligations); + } + + fn register_obligations(&mut self, obligations: PredicateObligations<'tcx>) { + self.fields.register_obligations(obligations); } } diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index d07d9190e011e..bf36b3e5ada29 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -2230,6 +2230,10 @@ impl<'tcx> TyCtxt<'tcx> { }) ) } + + pub fn trait_solver_next(self) -> bool { + self.sess.opts.unstable_opts.trait_solver == rustc_session::config::TraitSolver::Next + } } impl<'tcx> TyCtxtAt<'tcx> { diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/method_chain.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/method_chain.rs index ba9ee57d4099c..9474c70cb535e 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/method_chain.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/method_chain.rs @@ -1,5 +1,7 @@ use crate::infer::InferCtxt; +use rustc_infer::infer::ObligationEmittingRelation; +use rustc_infer::traits::PredicateObligations; use rustc_middle::ty::error::TypeError; use rustc_middle::ty::relate::{self, Relate, RelateResult, TypeRelation}; use rustc_middle::ty::{self, Ty, TyCtxt}; @@ -88,3 +90,16 @@ impl<'a, 'tcx> TypeRelation<'tcx> for CollectAllMismatches<'a, 'tcx> { Ok(a.rebind(self.relate(a.skip_binder(), b.skip_binder())?)) } } + +impl<'tcx> ObligationEmittingRelation<'tcx> for CollectAllMismatches<'_, 'tcx> { + fn register_obligations(&mut self, _obligations: PredicateObligations<'tcx>) { + // FIXME(deferred_projection_equality) + } + + fn register_predicates( + &mut self, + _obligations: impl IntoIterator>, + ) { + // FIXME(deferred_projection_equality) + } +} From fa83c10e96bc7eecf2c01a87fa802ee70b42344c Mon Sep 17 00:00:00 2001 From: Boxy Date: Fri, 10 Feb 2023 14:54:50 +0000 Subject: [PATCH 390/501] implement `compute_alias_eq_goal` --- compiler/rustc_infer/src/infer/projection.rs | 34 +++-- compiler/rustc_middle/src/ty/mod.rs | 27 ++++ .../rustc_trait_selection/src/solve/mod.rs | 116 +++++++++++++++++- 3 files changed, 163 insertions(+), 14 deletions(-) diff --git a/compiler/rustc_infer/src/infer/projection.rs b/compiler/rustc_infer/src/infer/projection.rs index 4667d99ff0008..f795047709e40 100644 --- a/compiler/rustc_infer/src/infer/projection.rs +++ b/compiler/rustc_infer/src/infer/projection.rs @@ -21,16 +21,28 @@ impl<'tcx> InferCtxt<'tcx> { recursion_depth: usize, obligations: &mut Vec>, ) -> Ty<'tcx> { - let def_id = projection_ty.def_id; - let ty_var = self.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::NormalizeProjectionType, - span: self.tcx.def_span(def_id), - }); - let projection = - ty::Binder::dummy(ty::ProjectionPredicate { projection_ty, term: ty_var.into() }); - let obligation = - Obligation::with_depth(self.tcx, cause, recursion_depth, param_env, projection); - obligations.push(obligation); - ty_var + if self.tcx.trait_solver_next() { + // FIXME(-Ztrait-solver=next): Instead of branching here, + // completely change the normalization routine with the new solver. + // + // The new solver correctly handles projection equality so this hack + // is not necessary. if re-enabled it should emit `PredicateKind::AliasEq` + // not `PredicateKind::Clause(Clause::Projection(..))` as in the new solver + // `Projection` is used as `normalizes-to` which will fail for `::Assoc eq ?0`. + return projection_ty.to_ty(self.tcx); + } else { + let def_id = projection_ty.def_id; + let ty_var = self.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::NormalizeProjectionType, + span: self.tcx.def_span(def_id), + }); + let projection = ty::Binder::dummy(ty::PredicateKind::Clause(ty::Clause::Projection( + ty::ProjectionPredicate { projection_ty, term: ty_var.into() }, + ))); + let obligation = + Obligation::with_depth(self.tcx, cause, recursion_depth, param_env, projection); + obligations.push(obligation); + ty_var + } } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 22ccbfd0e3a01..0e86b2666b2d6 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -970,6 +970,33 @@ impl<'tcx> Term<'tcx> { TermKind::Const(c) => c.into(), } } + + /// This function returns `None` for `AliasKind::Opaque`. + /// + /// FIXME: rename `AliasTy` to `AliasTerm` and make sure we correctly + /// deal with constants. + pub fn to_alias_term_no_opaque(&self, tcx: TyCtxt<'tcx>) -> Option> { + match self.unpack() { + TermKind::Ty(ty) => match ty.kind() { + ty::Alias(kind, alias_ty) => match kind { + AliasKind::Projection => Some(*alias_ty), + AliasKind::Opaque => None, + }, + _ => None, + }, + TermKind::Const(ct) => match ct.kind() { + ConstKind::Unevaluated(uv) => Some(tcx.mk_alias_ty(uv.def.did, uv.substs)), + _ => None, + }, + } + } + + pub fn is_infer(&self) -> bool { + match self.unpack() { + TermKind::Ty(ty) => ty.is_ty_or_numeric_infer(), + TermKind::Const(ct) => ct.is_ct_infer(), + } + } } const TAG_MASK: usize = 0b11; diff --git a/compiler/rustc_trait_selection/src/solve/mod.rs b/compiler/rustc_trait_selection/src/solve/mod.rs index edfe95b30592a..e56588c58bd05 100644 --- a/compiler/rustc_trait_selection/src/solve/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/mod.rs @@ -42,6 +42,8 @@ mod trait_goals; pub use fulfill::FulfillmentCtxt; +use self::infcx_ext::InferCtxtExt; + /// A goal is a statement, i.e. `predicate`, we want to prove /// given some assumptions, i.e. `param_env`. /// @@ -81,6 +83,21 @@ pub struct Response<'tcx> { pub certainty: Certainty, } +trait CanonicalResponseExt { + fn has_no_inference_or_external_constraints(&self) -> bool; +} + +impl<'tcx> CanonicalResponseExt for Canonical<'tcx, Response<'tcx>> { + fn has_no_inference_or_external_constraints(&self) -> bool { + // so that we get a compile error when regions are supported + // so this code can be checked for being correct + let _: () = self.value.external_constraints.regions; + + self.value.var_values.is_identity() + && self.value.external_constraints.opaque_types.is_empty() + } +} + #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, TypeFoldable, TypeVisitable)] pub enum Certainty { Yes, @@ -302,9 +319,8 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { ty::PredicateKind::TypeWellFormedFromEnv(..) => { bug!("TypeWellFormedFromEnv is only used for Chalk") } - ty::PredicateKind::AliasEq(..) => { - // FIXME(deferred_projection_equality) - todo!() + ty::PredicateKind::AliasEq(lhs, rhs) => { + self.compute_alias_eq_goal(Goal { param_env, predicate: (lhs, rhs) }) } } } else { @@ -402,6 +418,63 @@ impl<'a, 'tcx> EvalCtxt<'a, 'tcx> { None => self.make_canonical_response(Certainty::AMBIGUOUS), } } + + #[instrument(level = "debug", skip(self), ret)] + fn compute_alias_eq_goal( + &mut self, + goal: Goal<'tcx, (ty::Term<'tcx>, ty::Term<'tcx>)>, + ) -> QueryResult<'tcx> { + let tcx = self.tcx(); + + let evaluate_normalizes_to = |ecx: &mut EvalCtxt<'_, 'tcx>, alias, other| { + debug!("evaluate_normalizes_to(alias={:?}, other={:?})", alias, other); + let r = ecx.infcx.probe(|_| { + let (_, certainty) = ecx.evaluate_goal(goal.with( + tcx, + ty::Binder::dummy(ty::ProjectionPredicate { + projection_ty: alias, + term: other, + }), + ))?; + ecx.make_canonical_response(certainty) + }); + debug!("evaluate_normalizes_to(..) -> {:?}", r); + r + }; + + if goal.predicate.0.is_infer() || goal.predicate.1.is_infer() { + bug!( + "`AliasEq` goal with an infer var on lhs or rhs which should have been instantiated" + ); + } + + match ( + goal.predicate.0.to_alias_term_no_opaque(tcx), + goal.predicate.1.to_alias_term_no_opaque(tcx), + ) { + (None, None) => bug!("`AliasEq` goal without an alias on either lhs or rhs"), + (Some(alias), None) => evaluate_normalizes_to(self, alias, goal.predicate.1), + (None, Some(alias)) => evaluate_normalizes_to(self, alias, goal.predicate.0), + (Some(alias_lhs), Some(alias_rhs)) => { + debug!("compute_alias_eq_goal: both sides are aliases"); + + let mut candidates = Vec::with_capacity(3); + + // Evaluate all 3 potential candidates for the alias' being equal + candidates.push(evaluate_normalizes_to(self, alias_lhs, goal.predicate.1)); + candidates.push(evaluate_normalizes_to(self, alias_rhs, goal.predicate.0)); + candidates.push(self.infcx.probe(|_| { + debug!("compute_alias_eq_goal: alias defids are equal, equating substs"); + let nested_goals = self.infcx.eq(goal.param_env, alias_lhs, alias_rhs)?; + self.evaluate_all_and_make_canonical_response(nested_goals) + })); + + debug!(?candidates); + + self.try_merge_responses(candidates.into_iter()) + } + } + } } impl<'tcx> EvalCtxt<'_, 'tcx> { @@ -453,6 +526,43 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { ) -> QueryResult<'tcx> { self.evaluate_all(goals).and_then(|certainty| self.make_canonical_response(certainty)) } + + fn try_merge_responses( + &mut self, + responses: impl Iterator>, + ) -> QueryResult<'tcx> { + let candidates = responses.into_iter().flatten().collect::>(); + + if candidates.is_empty() { + return Err(NoSolution); + } + + // FIXME(-Ztreat-solver=next): We should instead try to find a `Certainty::Yes` response with + // a subset of the constraints that all the other responses have. + let one = candidates[0]; + if candidates[1..].iter().all(|resp| resp == &one) { + return Ok(one); + } + + if let Some(response) = candidates.iter().find(|response| { + response.value.certainty == Certainty::Yes + && response.has_no_inference_or_external_constraints() + }) { + return Ok(response.clone()); + } + + let certainty = candidates.iter().fold(Certainty::AMBIGUOUS, |certainty, response| { + certainty.unify_and(response.value.certainty) + }); + // FIXME(-Ztrait-solver=next): We should take the intersection of the constraints on all the + // responses and use that for the constraints of this ambiguous response. + let response = self.make_canonical_response(certainty); + if let Ok(response) = &response { + assert!(response.has_no_inference_or_external_constraints()); + } + + response + } } #[instrument(level = "debug", skip(infcx), ret)] From 4c98429d8c7b05276fa94eac2e78b24c947509c3 Mon Sep 17 00:00:00 2001 From: Boxy Date: Fri, 10 Feb 2023 14:46:08 +0000 Subject: [PATCH 391/501] Add tests --- .../alias_eq_cant_be_furthur_normalized.rs | 29 ++++++++++++ ..._eq_dont_use_normalizes_to_if_substs_eq.rs | 45 +++++++++++++++++++ ...dont_use_normalizes_to_if_substs_eq.stderr | 9 ++++ tests/ui/traits/new-solver/alias_eq_simple.rs | 22 +++++++++ .../alias_eq_substs_eq_not_intercrate.rs | 20 +++++++++ .../alias_eq_substs_eq_not_intercrate.stderr | 9 ++++ ...zes_to_ignores_unnormalizable_candidate.rs | 40 +++++++++++++++++ ...unnormalizable_candidate.self_infer.stderr | 14 ++++++ 8 files changed, 188 insertions(+) create mode 100644 tests/ui/traits/new-solver/alias_eq_cant_be_furthur_normalized.rs create mode 100644 tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.rs create mode 100644 tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.stderr create mode 100644 tests/ui/traits/new-solver/alias_eq_simple.rs create mode 100644 tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.rs create mode 100644 tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.stderr create mode 100644 tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.rs create mode 100644 tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.self_infer.stderr diff --git a/tests/ui/traits/new-solver/alias_eq_cant_be_furthur_normalized.rs b/tests/ui/traits/new-solver/alias_eq_cant_be_furthur_normalized.rs new file mode 100644 index 0000000000000..dc726ba51f94f --- /dev/null +++ b/tests/ui/traits/new-solver/alias_eq_cant_be_furthur_normalized.rs @@ -0,0 +1,29 @@ +// check-pass +// compile-flags: -Ztrait-solver=next + +// check that a goal such as `alias-eq(::Assoc, ::Assoc)` +// succeeds with a constraint that `?0 = bool` + +// FIXME(deferred_projection_equality): add a test that this is true during coherence + +trait TraitA {} + +trait TraitB { + type Assoc; +} + +impl TraitA for (T, T::Assoc) {} + +impl TraitB for i32 { + type Assoc = u32; +} + +fn needs_a() {} + +fn bar() { + needs_a::<(T, ::Assoc<_>)>(); +} + +fn main() { + bar::(); +} diff --git a/tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.rs b/tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.rs new file mode 100644 index 0000000000000..fd5d0e3b1946e --- /dev/null +++ b/tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.rs @@ -0,0 +1,45 @@ +// compile-flags: -Ztrait-solver=next + +// check that when computing `alias-eq(<() as Foo>::Assoc, <() as Foo>::Assoc)` +// we do not infer `?0 = u8` via the `for (): Foo` impl or `?0 = u16` by +// relating substs as either could be a valid solution. + +trait Foo { + type Assoc; +} + +impl Foo for () +where + (): Foo, +{ + type Assoc = <() as Foo>::Assoc; +} + +impl Foo for () { + type Assoc = u8; +} + +impl Foo for () { + type Assoc = u16; +} + +fn output() -> <() as Foo>::Assoc +where + (): Foo, +{ + todo!() +} + +fn incomplete() +where + (): Foo, +{ + // `<() as Foo>::Assoc == <() as Foo<_, STOP>>::Assoc` + let _: <() as Foo>::Assoc = output::<_, T>(); + //~^ error: type annotations needed + + // let _: <() as Foo>::Assoc = output::(); // OK + // let _: <() as Foo>::Assoc = output::(); // OK +} + +fn main() {} diff --git a/tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.stderr b/tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.stderr new file mode 100644 index 0000000000000..a6712332c37c5 --- /dev/null +++ b/tests/ui/traits/new-solver/alias_eq_dont_use_normalizes_to_if_substs_eq.stderr @@ -0,0 +1,9 @@ +error[E0282]: type annotations needed + --> $DIR/alias_eq_dont_use_normalizes_to_if_substs_eq.rs:38:41 + | +LL | let _: <() as Foo>::Assoc = output::<_, T>(); + | ^^^^^^^^^^^^^^ cannot infer type of the type parameter `T` declared on the function `output` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0282`. diff --git a/tests/ui/traits/new-solver/alias_eq_simple.rs b/tests/ui/traits/new-solver/alias_eq_simple.rs new file mode 100644 index 0000000000000..6792cf3ce35ab --- /dev/null +++ b/tests/ui/traits/new-solver/alias_eq_simple.rs @@ -0,0 +1,22 @@ +// check-pass +// compile-flags: -Ztrait-solver=next + +// test that the new solver can handle `alias-eq(::Assoc, u32)` + +trait TraitA {} + +trait TraitB { + type Assoc; +} + +impl TraitA for (T, T::Assoc) {} + +impl TraitB for i32 { + type Assoc = u32; +} + +fn needs_a() {} + +fn main() { + needs_a::<(i32, u32)>(); +} diff --git a/tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.rs b/tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.rs new file mode 100644 index 0000000000000..d4cc380fa211b --- /dev/null +++ b/tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.rs @@ -0,0 +1,20 @@ +// compile-flags: -Ztrait-solver=next + +// check that a `alias-eq(::Assoc, ::Assoc)` goal fails. + +// FIXME(deferred_projection_equality): add a test that this is true during coherence + +trait TraitB { + type Assoc; +} + +fn needs_a() -> T::Assoc { + unimplemented!() +} + +fn bar() { + let _: <_ as TraitB>::Assoc = needs_a::(); + //~^ error: type annotations needed +} + +fn main() {} diff --git a/tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.stderr b/tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.stderr new file mode 100644 index 0000000000000..d063d8fce111c --- /dev/null +++ b/tests/ui/traits/new-solver/alias_eq_substs_eq_not_intercrate.stderr @@ -0,0 +1,9 @@ +error[E0282]: type annotations needed + --> $DIR/alias_eq_substs_eq_not_intercrate.rs:16:12 + | +LL | let _: <_ as TraitB>::Assoc = needs_a::(); + | ^^^^^^^^^^^^^^^^^^^^ cannot infer type for associated type `<_ as TraitB>::Assoc` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0282`. diff --git a/tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.rs b/tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.rs new file mode 100644 index 0000000000000..46343241b4528 --- /dev/null +++ b/tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.rs @@ -0,0 +1,40 @@ +// [no_self_infer] check-pass +// compile-flags: -Ztrait-solver=next +// revisions: self_infer no_self_infer + +// checks that the new solver is smart enough to infer `?0 = U` when solving: +// `normalizes-to( as Trait>::Assoc, u8)` +// with `normalizes-to( as Trait>::Assoc, u8)` in the paramenv even when +// there is a separate `Vec: Trait` bound in the paramenv. +// +// FIXME(-Ztrait-solver=next) +// This could also compile for `normalizes-to(::Assoc, u8)` but +// we currently immediately consider a goal ambiguous if the self type is an +// inference variable. + +trait Trait { + type Assoc; +} + +fn foo>(x: T) {} + +#[cfg(self_infer)] +fn unconstrained() -> T { + todo!() +} + +#[cfg(no_self_infer)] +fn unconstrained() -> Vec { + todo!() +} + +fn bar() +where + Vec: Trait, + Vec: Trait, +{ + foo(unconstrained()) + //[self_infer]~^ ERROR type annotations needed +} + +fn main() {} diff --git a/tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.self_infer.stderr b/tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.self_infer.stderr new file mode 100644 index 0000000000000..0628320126104 --- /dev/null +++ b/tests/ui/traits/new-solver/normalizes_to_ignores_unnormalizable_candidate.self_infer.stderr @@ -0,0 +1,14 @@ +error[E0282]: type annotations needed + --> $DIR/normalizes_to_ignores_unnormalizable_candidate.rs:36:5 + | +LL | foo(unconstrained()) + | ^^^ cannot infer type of the type parameter `T` declared on the function `foo` + | +help: consider specifying the generic argument + | +LL | foo::(unconstrained()) + | +++++ + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0282`. From 79492cb8ae0057389940507f4a8f2a342614a1f7 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 10 Feb 2023 16:08:47 +0100 Subject: [PATCH 392/501] internal: Revert castable expectation and simplify --- crates/hir-ty/src/infer.rs | 7 ++++ crates/hir-ty/src/infer/closure.rs | 2 +- crates/hir-ty/src/infer/expr.rs | 50 +++++++++++++------------- crates/hir-ty/src/infer/path.rs | 2 +- crates/hir-ty/src/tests/simple.rs | 58 ++++++++++++++++++++++++++++-- 5 files changed, 88 insertions(+), 31 deletions(-) diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 43a24c7136dac..767afdf9eb4e8 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -219,6 +219,7 @@ struct InternedStandardTypes { unknown: Ty, bool_: Ty, unit: Ty, + never: Ty, } impl Default for InternedStandardTypes { @@ -227,6 +228,7 @@ impl Default for InternedStandardTypes { unknown: TyKind::Error.intern(Interner), bool_: TyKind::Scalar(Scalar::Bool).intern(Interner), unit: TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner), + never: TyKind::Never.intern(Interner), } } } @@ -1024,6 +1026,7 @@ impl<'a> InferenceContext<'a> { pub(crate) enum Expectation { None, HasType(Ty), + #[allow(dead_code)] Castable(Ty), RValueLikeUnsized(Ty), } @@ -1102,6 +1105,10 @@ impl Expectation { } } + fn coercion_target_type(&self, table: &mut unify::InferenceTable<'_>) -> Ty { + self.only_has_type(table).unwrap_or_else(|| table.new_type_var()) + } + /// Comment copied from rustc: /// Disregard "castable to" expectations because they /// can lead us astray. Consider for example `if cond diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index 094e460dbf79b..7aa2176d67421 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -51,7 +51,7 @@ impl InferenceContext<'_> { .map(to_chalk_trait_id) .collect(); - let self_ty = TyKind::Error.intern(Interner); + let self_ty = self.result.standard_types.unknown.clone(); let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]); for bound in bounds.iter(Interner) { // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer` diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index f0655291b8bba..9f97261486be1 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -87,16 +87,15 @@ impl<'a> InferenceContext<'a> { let expected = &expected.adjust_for_branches(&mut self.table); self.infer_expr( condition, - &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), + &Expectation::HasType(self.result.standard_types.bool_.clone()), ); let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe); let mut both_arms_diverge = Diverges::Always; - let result_ty = self.table.new_type_var(); let then_ty = self.infer_expr_inner(then_branch, expected); both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe); - let mut coerce = CoerceMany::new(result_ty); + let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table)); coerce.coerce(self, Some(then_branch), &then_ty); let else_ty = match else_branch { Some(else_branch) => self.infer_expr_inner(else_branch, expected), @@ -113,7 +112,7 @@ impl<'a> InferenceContext<'a> { &Expr::Let { pat, expr } => { let input_ty = self.infer_expr(expr, &Expectation::none()); self.infer_pat(pat, &input_ty, BindingMode::default()); - TyKind::Scalar(Scalar::Bool).intern(Interner) + self.result.standard_types.bool_.clone() } Expr::Block { statements, tail, label, id: _ } => { let old_resolver = mem::replace( @@ -188,10 +187,12 @@ impl<'a> InferenceContext<'a> { .intern(Interner) } &Expr::Loop { body, label } => { + // FIXME: should be: + // let ty = expected.coercion_target_type(&mut self.table); let ty = self.table.new_type_var(); let (breaks, ()) = self.with_breakable_ctx(BreakableKind::Loop, ty, label, |this| { - this.infer_expr(body, &Expectation::has_type(TyBuilder::unit())); + this.infer_expr(body, &Expectation::HasType(TyBuilder::unit())); }); match breaks { @@ -199,16 +200,16 @@ impl<'a> InferenceContext<'a> { self.diverges = Diverges::Maybe; breaks } - None => TyKind::Never.intern(Interner), + None => self.result.standard_types.never.clone(), } } &Expr::While { condition, body, label } => { self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| { this.infer_expr( condition, - &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), + &Expectation::HasType(this.result.standard_types.bool_.clone()), ); - this.infer_expr(body, &Expectation::has_type(TyBuilder::unit())); + this.infer_expr(body, &Expectation::HasType(TyBuilder::unit())); }); // the body may not run, so it diverging doesn't mean we diverge @@ -224,7 +225,7 @@ impl<'a> InferenceContext<'a> { self.infer_pat(pat, &pat_ty, BindingMode::default()); self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| { - this.infer_expr(body, &Expectation::has_type(TyBuilder::unit())); + this.infer_expr(body, &Expectation::HasType(TyBuilder::unit())); }); // the body may not run, so it diverging doesn't mean we diverge @@ -382,12 +383,9 @@ impl<'a> InferenceContext<'a> { let expected = expected.adjust_for_branches(&mut self.table); let result_ty = if arms.is_empty() { - TyKind::Never.intern(Interner) + self.result.standard_types.never.clone() } else { - match &expected { - Expectation::HasType(ty) => ty.clone(), - _ => self.table.new_type_var(), - } + expected.coercion_target_type(&mut self.table) }; let mut coerce = CoerceMany::new(result_ty); @@ -400,7 +398,7 @@ impl<'a> InferenceContext<'a> { if let Some(guard_expr) = arm.guard { self.infer_expr( guard_expr, - &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)), + &Expectation::HasType(self.result.standard_types.bool_.clone()), ); } @@ -425,7 +423,7 @@ impl<'a> InferenceContext<'a> { is_break: false, }); }; - TyKind::Never.intern(Interner) + self.result.standard_types.never.clone() } Expr::Break { expr, label } => { let val_ty = if let Some(expr) = *expr { @@ -439,7 +437,7 @@ impl<'a> InferenceContext<'a> { // avoiding the borrowck let mut coerce = mem::replace( &mut ctxt.coerce, - CoerceMany::new(self.result.standard_types.unknown.clone()), + CoerceMany::new(expected.coercion_target_type(&mut self.table)), ); // FIXME: create a synthetic `()` during lowering so we have something to refer to here? @@ -457,7 +455,7 @@ impl<'a> InferenceContext<'a> { }); } } - TyKind::Never.intern(Interner) + self.result.standard_types.never.clone() } Expr::Return { expr } => { if let Some(expr) = expr { @@ -466,7 +464,7 @@ impl<'a> InferenceContext<'a> { let unit = TyBuilder::unit(); let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone()); } - TyKind::Never.intern(Interner) + self.result.standard_types.never.clone() } Expr::Yield { expr } => { if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { @@ -479,14 +477,14 @@ impl<'a> InferenceContext<'a> { resume_ty } else { // FIXME: report error (yield expr in non-generator) - TyKind::Error.intern(Interner) + self.result.standard_types.unknown.clone() } } Expr::Yeet { expr } => { if let &Some(expr) = expr { self.infer_expr_inner(expr, &Expectation::None); } - TyKind::Never.intern(Interner) + self.result.standard_types.never.clone() } Expr::RecordLit { path, fields, spread, .. } => { let (ty, def_id) = self.resolve_variant(path.as_deref(), false); @@ -611,8 +609,8 @@ impl<'a> InferenceContext<'a> { } Expr::Cast { expr, type_ref } => { let cast_ty = self.make_ty(type_ref); - let _inner_ty = - self.infer_expr_inner(*expr, &Expectation::Castable(cast_ty.clone())); + // FIXME: propagate the "castable to" expectation + let _inner_ty = self.infer_expr_inner(*expr, &Expectation::None); // FIXME check the cast... cast_ty } @@ -829,7 +827,7 @@ impl<'a> InferenceContext<'a> { self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty)); self.infer_expr( repeat, - &Expectation::has_type( + &Expectation::HasType( TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner), ), ); @@ -852,7 +850,7 @@ impl<'a> InferenceContext<'a> { TyKind::Array(coerce.complete(), len).intern(Interner) } Expr::Literal(lit) => match lit { - Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner), + Literal::Bool(..) => self.result.standard_types.bool_.clone(), Literal::String(..) => { TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner)) .intern(Interner) @@ -1148,7 +1146,7 @@ impl<'a> InferenceContext<'a> { if let Some(expr) = else_branch { self.infer_expr_coerce( *expr, - &Expectation::has_type(Ty::new(Interner, TyKind::Never)), + &Expectation::HasType(self.result.standard_types.never.clone()), ); } diff --git a/crates/hir-ty/src/infer/path.rs b/crates/hir-ty/src/infer/path.rs index 8bd17c0f39f4d..0a8527afbd043 100644 --- a/crates/hir-ty/src/infer/path.rs +++ b/crates/hir-ty/src/infer/path.rs @@ -112,7 +112,7 @@ impl<'a> InferenceContext<'a> { let ty = TyBuilder::value_ty(self.db, typable, parent_substs) .fill(|x| { it.next().unwrap_or_else(|| match x { - ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner), + ParamKind::Type => self.result.standard_types.unknown.clone().cast(Interner), ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()), }) }) diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 6f6b334c9476a..2e5787b701cae 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3200,6 +3200,8 @@ fn func() { "#, ); } + +// FIXME #[test] fn castable_to() { check_infer( @@ -3224,10 +3226,60 @@ fn func() { 120..122 '{}': () 138..184 '{ ...0]>; }': () 148..149 'x': Box<[i32; 0]> - 152..160 'Box::new': fn new<[i32; 0]>([i32; 0]) -> Box<[i32; 0]> - 152..164 'Box::new([])': Box<[i32; 0]> + 152..160 'Box::new': fn new<[{unknown}; 0]>([{unknown}; 0]) -> Box<[{unknown}; 0]> + 152..164 'Box::new([])': Box<[{unknown}; 0]> 152..181 'Box::n...2; 0]>': Box<[i32; 0]> - 161..163 '[]': [i32; 0] + 161..163 '[]': [{unknown}; 0] + "#]], + ); +} + +#[test] +fn castable_to1() { + check_infer( + r#" +struct Ark(T); +impl Ark { + fn foo(&self) -> *const T { + &self.0 + } +} +fn f(t: Ark) { + Ark::foo(&t) as *const (); +} +"#, + expect![[r#" + 47..51 'self': &Ark + 65..88 '{ ... }': *const T + 75..82 '&self.0': &T + 76..80 'self': &Ark + 76..82 'self.0': T + 99..100 't': Ark + 110..144 '{ ... (); }': () + 116..124 'Ark::foo': fn foo(&Ark) -> *const T + 116..128 'Ark::foo(&t)': *const T + 116..141 'Ark::f...nst ()': *const () + 125..127 '&t': &Ark + 126..127 't': Ark + "#]], + ); +} + +// FIXME +#[test] +fn castable_to2() { + check_infer( + r#" +fn func() { + let x = &0u32 as *const _; +} +"#, + expect![[r#" + 10..44 '{ ...t _; }': () + 20..21 'x': *const {unknown} + 24..29 '&0u32': &u32 + 24..41 '&0u32 ...onst _': *const {unknown} + 25..29 '0u32': u32 "#]], ); } From 7677f41f4128e4941fe48052364834f63ada4c02 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 10 Feb 2023 16:42:09 +0100 Subject: [PATCH 393/501] Preallocate some vecs --- crates/hir-ty/src/infer/closure.rs | 2 +- crates/hir-ty/src/infer/expr.rs | 23 ++++++++++++----------- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index 7aa2176d67421..a6449d019ff6a 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -67,7 +67,7 @@ impl InferenceContext<'_> { let arg = projection.substitution.as_slice(Interner).get(1)?; if let Some(subst) = arg.ty(Interner)?.as_tuple() { let generic_args = subst.as_slice(Interner); - let mut sig_tys = Vec::new(); + let mut sig_tys = Vec::with_capacity(generic_args.len() + 1); for arg in generic_args { sig_tys.push(arg.ty(Interner)?.clone()); } diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 9f97261486be1..175fded8ccae1 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -235,7 +235,7 @@ impl<'a> InferenceContext<'a> { Expr::Closure { body, args, ret_type, arg_types, closure_kind } => { assert_eq!(args.len(), arg_types.len()); - let mut sig_tys = Vec::new(); + let mut sig_tys = Vec::with_capacity(arg_types.len() + 1); // collect explicitly written argument types for arg_type in arg_types.iter() { @@ -256,7 +256,8 @@ impl<'a> InferenceContext<'a> { num_binders: 0, sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false }, substitution: FnSubst( - Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner), + Substitution::from_iter(Interner, sig_tys.iter().cloned()) + .shifted_in(Interner), ), }) .intern(Interner); @@ -318,16 +319,16 @@ impl<'a> InferenceContext<'a> { Expr::Call { callee, args, .. } => { let callee_ty = self.infer_expr(*callee, &Expectation::none()); let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone()); - let mut res = None; - let mut derefed_callee = callee_ty.clone(); - // manual loop to be able to access `derefs.table` - while let Some((callee_deref_ty, _)) = derefs.next() { - res = derefs.table.callable_sig(&callee_deref_ty, args.len()); - if res.is_some() { - derefed_callee = callee_deref_ty; - break; + let (res, derefed_callee) = 'b: { + // manual loop to be able to access `derefs.table` + while let Some((callee_deref_ty, _)) = derefs.next() { + let res = derefs.table.callable_sig(&callee_deref_ty, args.len()); + if res.is_some() { + break 'b (res, callee_deref_ty); + } } - } + (None, callee_ty.clone()) + }; // if the function is unresolved, we use is_varargs=true to // suppress the arg count diagnostic here let is_varargs = From 414eb48b66ff694126bb12cf4ab8aed06ca4965e Mon Sep 17 00:00:00 2001 From: yukang Date: Sat, 4 Feb 2023 00:23:11 +0800 Subject: [PATCH 394/501] add only modified for compiletest --- Cargo.lock | 1 + src/bootstrap/builder/tests.rs | 2 + src/bootstrap/flags.rs | 10 ++++ src/bootstrap/format.rs | 20 ++------ src/bootstrap/test.rs | 4 ++ src/tools/build_helper/src/git.rs | 72 ++++++++++++++++++++++++++--- src/tools/compiletest/Cargo.toml | 1 + src/tools/compiletest/src/common.rs | 3 ++ src/tools/compiletest/src/main.rs | 53 ++++++++++++++++++--- 9 files changed, 137 insertions(+), 29 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 14758d0f07ef3..613813e6d91ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -887,6 +887,7 @@ dependencies = [ name = "compiletest" version = "0.0.0" dependencies = [ + "build_helper", "colored", "diff", "getopts", diff --git a/src/bootstrap/builder/tests.rs b/src/bootstrap/builder/tests.rs index d5fcd107502e3..3574f11189ee9 100644 --- a/src/bootstrap/builder/tests.rs +++ b/src/bootstrap/builder/tests.rs @@ -557,6 +557,7 @@ mod dist { rustfix_coverage: false, pass: None, run: None, + only_modified: false, }; let build = Build::new(config); @@ -627,6 +628,7 @@ mod dist { rustfix_coverage: false, pass: None, run: None, + only_modified: false, }; // Make sure rustfmt binary not being found isn't an error. config.channel = "beta".to_string(); diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 52c3dc0bf7591..ff927ed561b2f 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -124,6 +124,7 @@ pub enum Subcommand { fail_fast: bool, doc_tests: DocTests, rustfix_coverage: bool, + only_modified: bool, }, Bench { paths: Vec, @@ -301,6 +302,7 @@ To learn more about a subcommand, run `./x.py -h`", opts.optflag("", "doc", "only run doc tests"); opts.optflag("", "bless", "update all stderr/stdout files of failing ui tests"); opts.optflag("", "force-rerun", "rerun tests even if the inputs are unchanged"); + opts.optflag("", "only-modified", "only run tests that result has been changed"); opts.optopt( "", "compare-mode", @@ -598,6 +600,7 @@ Arguments: rustc_args: matches.opt_strs("rustc-args"), fail_fast: !matches.opt_present("no-fail-fast"), rustfix_coverage: matches.opt_present("rustfix-coverage"), + only_modified: matches.opt_present("only-modified"), doc_tests: if matches.opt_present("doc") { DocTests::Only } else if matches.opt_present("no-doc") { @@ -777,6 +780,13 @@ impl Subcommand { } } + pub fn only_modified(&self) -> bool { + match *self { + Subcommand::Test { only_modified, .. } => only_modified, + _ => false, + } + } + pub fn force_rerun(&self) -> bool { match *self { Subcommand::Test { force_rerun, .. } => force_rerun, diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs index bfc57a85cdb42..3166cabd13190 100644 --- a/src/bootstrap/format.rs +++ b/src/bootstrap/format.rs @@ -1,8 +1,8 @@ //! Runs rustfmt on the repository. use crate::builder::Builder; -use crate::util::{output, output_result, program_out_of_date, t}; -use build_helper::git::updated_master_branch; +use crate::util::{output, program_out_of_date, t}; +use build_helper::git::get_git_modified_files; use ignore::WalkBuilder; use std::collections::VecDeque; use std::path::{Path, PathBuf}; @@ -80,23 +80,11 @@ fn update_rustfmt_version(build: &Builder<'_>) { /// /// Returns `None` if all files should be formatted. fn get_modified_rs_files(build: &Builder<'_>) -> Result>, String> { - let Ok(updated_master) = updated_master_branch(Some(&build.config.src)) else { return Ok(None); }; - if !verify_rustfmt_version(build) { return Ok(None); } - let merge_base = - output_result(build.config.git().arg("merge-base").arg(&updated_master).arg("HEAD"))?; - Ok(Some( - output_result( - build.config.git().arg("diff-index").arg("--name-only").arg(merge_base.trim()), - )? - .lines() - .map(|s| s.trim().to_owned()) - .filter(|f| Path::new(f).extension().map_or(false, |ext| ext == "rs")) - .collect(), - )) + get_git_modified_files(Some(&build.config.src), &vec!["rs"]) } #[derive(serde::Deserialize)] @@ -169,7 +157,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { ignore_fmt.add(&format!("!/{}", untracked_path)).expect(&untracked_path); } if !check && paths.is_empty() { - match get_modified_rs_files(build) { + match get_modified_rs_files(&build) { Ok(Some(files)) => { for file in files { println!("formatting modified file {file}"); diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 6078e39ac9d3b..30380da7ba236 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -1508,6 +1508,10 @@ note: if you're sure you want to do this, please open an issue as to why. In the if builder.config.rust_optimize_tests { cmd.arg("--optimize-tests"); } + if builder.config.cmd.only_modified() { + cmd.arg("--only-modified"); + } + let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string())); diff --git a/src/tools/build_helper/src/git.rs b/src/tools/build_helper/src/git.rs index dc62051cb85da..168633c8f6334 100644 --- a/src/tools/build_helper/src/git.rs +++ b/src/tools/build_helper/src/git.rs @@ -1,5 +1,24 @@ +use std::process::Stdio; use std::{path::Path, process::Command}; +/// Runs a command and returns the output +fn output_result(cmd: &mut Command) -> Result { + let output = match cmd.stderr(Stdio::inherit()).output() { + Ok(status) => status, + Err(e) => return Err(format!("failed to run command: {:?}: {}", cmd, e)), + }; + if !output.status.success() { + return Err(format!( + "command did not execute successfully: {:?}\n\ + expected success, got: {}\n{}", + cmd, + output.status, + String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))? + )); + } + Ok(String::from_utf8(output.stdout).map_err(|err| format!("{err:?}"))?) +} + /// Finds the remote for rust-lang/rust. /// For example for these remotes it will return `upstream`. /// ```text @@ -14,13 +33,7 @@ pub fn get_rust_lang_rust_remote(git_dir: Option<&Path>) -> Result) -> Result { // We could implement smarter logic here in the future. Ok("origin/master".into()) } + +/// Returns the files that have been modified in the current branch compared to the master branch. +/// The `extensions` parameter can be used to filter the files by their extension. +/// If `extensions` is empty, all files will be returned. +pub fn get_git_modified_files( + git_dir: Option<&Path>, + extensions: &Vec<&str>, +) -> Result>, String> { + let Ok(updated_master) = updated_master_branch(git_dir) else { return Ok(None); }; + + let git = || { + let mut git = Command::new("git"); + if let Some(git_dir) = git_dir { + git.current_dir(git_dir); + } + git + }; + + let merge_base = output_result(git().arg("merge-base").arg(&updated_master).arg("HEAD"))?; + let files = output_result(git().arg("diff-index").arg("--name-only").arg(merge_base.trim()))? + .lines() + .map(|s| s.trim().to_owned()) + .filter(|f| { + Path::new(f).extension().map_or(false, |ext| { + extensions.is_empty() || extensions.contains(&ext.to_str().unwrap()) + }) + }) + .collect(); + Ok(Some(files)) +} + +/// Returns the files that haven't been added to git yet. +pub fn get_git_untracked_files(git_dir: Option<&Path>) -> Result>, String> { + let Ok(_updated_master) = updated_master_branch(git_dir) else { return Ok(None); }; + let mut git = Command::new("git"); + if let Some(git_dir) = git_dir { + git.current_dir(git_dir); + } + + let files = output_result(git.arg("ls-files").arg("--others").arg("--exclude-standard"))? + .lines() + .map(|s| s.trim().to_owned()) + .collect(); + Ok(Some(files)) +} diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index 1911f0f9c941c..deed6fbd4391f 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -9,6 +9,7 @@ diff = "0.1.10" unified-diff = "0.2.1" getopts = "0.2" miropt-test-tools = { path = "../miropt-test-tools" } +build_helper = { path = "../build_helper" } tracing = "0.1" tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] } regex = "1.0" diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 3676f69b100db..7fe2e6257d9e7 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -380,6 +380,9 @@ pub struct Config { /// Whether to rerun tests even if the inputs are unchanged. pub force_rerun: bool, + /// Only rerun the tests that result has been modified accoring to Git status + pub only_modified: bool, + pub target_cfg: LazyCell, } diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs index 3092c656cd729..47640f4a4170b 100644 --- a/src/tools/compiletest/src/main.rs +++ b/src/tools/compiletest/src/main.rs @@ -8,15 +8,17 @@ extern crate test; use crate::common::{expected_output_path, output_base_dir, output_relative_path, UI_EXTENSIONS}; use crate::common::{CompareMode, Config, Debugger, Mode, PassMode, TestPaths}; use crate::util::logv; +use build_helper::git::{get_git_modified_files, get_git_untracked_files}; +use core::panic; use getopts::Options; use lazycell::LazyCell; -use std::env; use std::ffi::OsString; use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::{Command, Stdio}; use std::time::SystemTime; +use std::{env, vec}; use test::ColorConfig; use tracing::*; use walkdir::WalkDir; @@ -145,9 +147,10 @@ pub fn parse_config(args: Vec) -> Config { "", "rustfix-coverage", "enable this to generate a Rustfix coverage file, which is saved in \ - `.//rustfix_missing_coverage.txt`", + `.//rustfix_missing_coverage.txt`", ) .optflag("", "force-rerun", "rerun tests even if the inputs are unchanged") + .optflag("", "only-modified", "only run tests that result been modified") .optflag("h", "help", "show this message") .reqopt("", "channel", "current Rust channel", "CHANNEL") .optopt("", "edition", "default Rust edition", "EDITION"); @@ -279,6 +282,7 @@ pub fn parse_config(args: Vec) -> Config { lldb_python_dir: matches.opt_str("lldb-python-dir"), verbose: matches.opt_present("verbose"), quiet: matches.opt_present("quiet"), + only_modified: matches.opt_present("only-modified"), color, remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from), compare_mode: matches.opt_str("compare-mode").map(CompareMode::parse), @@ -521,8 +525,16 @@ pub fn test_opts(config: &Config) -> test::TestOpts { pub fn make_tests(config: &Config, tests: &mut Vec) { debug!("making tests from {:?}", config.src_base.display()); let inputs = common_inputs_stamp(config); - collect_tests_from_dir(config, &config.src_base, &PathBuf::new(), &inputs, tests) - .unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display())); + let modified_tests = modified_tests(config, &config.src_base); + collect_tests_from_dir( + config, + &config.src_base, + &PathBuf::new(), + &inputs, + tests, + &modified_tests, + ) + .unwrap_or_else(|_| panic!("Could not read tests from {}", config.src_base.display())); } /// Returns a stamp constructed from input files common to all test cases. @@ -561,12 +573,34 @@ fn common_inputs_stamp(config: &Config) -> Stamp { stamp } +fn modified_tests(config: &Config, dir: &Path) -> Vec { + if !config.only_modified { + return vec![]; + } + let Ok(Some(files)) = get_git_modified_files(Some(dir), &vec!["rs", "stderr", "fixed"]) else { return vec![]; }; + // Add new test cases to the list, it will be convenient in daily development. + let Ok(Some(untracked_files)) = get_git_untracked_files(None) else { return vec![]; }; + + let all_paths = [&files[..], &untracked_files[..]].concat(); + let full_paths = { + let mut full_paths: Vec = all_paths + .into_iter() + .map(|f| fs::canonicalize(&f).unwrap().with_extension("").with_extension("rs")) + .collect(); + full_paths.dedup(); + full_paths.sort_unstable(); + full_paths + }; + full_paths +} + fn collect_tests_from_dir( config: &Config, dir: &Path, relative_dir_path: &Path, inputs: &Stamp, tests: &mut Vec, + only_modified: &Vec, ) -> io::Result<()> { // Ignore directories that contain a file named `compiletest-ignore-dir`. if dir.join("compiletest-ignore-dir").exists() { @@ -597,7 +631,7 @@ fn collect_tests_from_dir( let file = file?; let file_path = file.path(); let file_name = file.file_name(); - if is_test(&file_name) { + if is_test(&file_name) && (!config.only_modified || only_modified.contains(&file_path)) { debug!("found test file: {:?}", file_path.display()); let paths = TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() }; @@ -607,7 +641,14 @@ fn collect_tests_from_dir( let relative_file_path = relative_dir_path.join(file.file_name()); if &file_name != "auxiliary" { debug!("found directory: {:?}", file_path.display()); - collect_tests_from_dir(config, &file_path, &relative_file_path, inputs, tests)?; + collect_tests_from_dir( + config, + &file_path, + &relative_file_path, + inputs, + tests, + only_modified, + )?; } } else { debug!("found other file/directory: {:?}", file_path.display()); From 7e072199a6e650698c2f5f1e1053b20d48be43d3 Mon Sep 17 00:00:00 2001 From: Lukas Bergdoll Date: Fri, 10 Feb 2023 18:00:31 +0100 Subject: [PATCH 395/501] Speedup heapsort by 1.5x by making it branchless `slice::sort_unstable` will fall back to heapsort if it repeatedly fails to find a good pivot. By making the core child update code branchless it is much faster. On Zen3 sorting 10k `u64` and forcing the sort to pick heapsort, results in: 455us -> 278us --- library/core/src/slice/sort.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index 2181f9a811855..990540f55f53d 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -198,9 +198,7 @@ where } // Choose the greater child. - if child + 1 < v.len() && is_less(&v[child], &v[child + 1]) { - child += 1; - } + child += (child + 1 < v.len() && is_less(&v[child], &v[child + 1])) as usize; // Stop if the invariant holds at `node`. if !is_less(&v[node], &v[child]) { From 5201bb53bbdc24a35cb9aac5797187f677decd36 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Fri, 10 Feb 2023 18:08:25 +0100 Subject: [PATCH 396/501] remove redundant clones --- compiler/rustc_codegen_ssa/src/back/archive.rs | 2 +- compiler/rustc_log/src/lib.rs | 2 +- compiler/rustc_parse_format/src/lib.rs | 4 +--- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_codegen_ssa/src/back/archive.rs b/compiler/rustc_codegen_ssa/src/back/archive.rs index d3cd085cfb668..66ec8f5f57d21 100644 --- a/compiler/rustc_codegen_ssa/src/back/archive.rs +++ b/compiler/rustc_codegen_ssa/src/back/archive.rs @@ -203,7 +203,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> { } } - self.src_archives.push((archive_path.to_owned(), archive_map)); + self.src_archives.push((archive_path, archive_map)); Ok(()) } diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs index 019fdc30dcec5..12d6281daad15 100644 --- a/compiler/rustc_log/src/lib.rs +++ b/compiler/rustc_log/src/lib.rs @@ -98,7 +98,7 @@ pub fn init_env_logger(env: &str) -> Result<(), Error> { let fmt_layer = tracing_subscriber::fmt::layer() .with_writer(io::stderr) .without_time() - .event_format(BacktraceFormatter { backtrace_target: str.to_string() }); + .event_format(BacktraceFormatter { backtrace_target: str }); let subscriber = subscriber.with(fmt_layer); tracing::subscriber::set_global_default(subscriber).unwrap(); } diff --git a/compiler/rustc_parse_format/src/lib.rs b/compiler/rustc_parse_format/src/lib.rs index 088a87ca57104..34a4fd02ea691 100644 --- a/compiler/rustc_parse_format/src/lib.rs +++ b/compiler/rustc_parse_format/src/lib.rs @@ -847,9 +847,7 @@ impl<'a> Parser<'a> { 0, ParseError { description: "expected format parameter to occur after `:`".to_owned(), - note: Some( - format!("`?` comes after `:`, try `{}:{}` instead", word, "?").to_owned(), - ), + note: Some(format!("`?` comes after `:`, try `{}:{}` instead", word, "?")), label: "expected `?` to occur after `:`".to_owned(), span: pos.to(pos), secondary_label: None, From c52435a338a256a7a09d48c96aeb429530b2fe6d Mon Sep 17 00:00:00 2001 From: yukang Date: Fri, 10 Feb 2023 16:49:28 +0000 Subject: [PATCH 397/501] cleanup and fix naming --- src/bootstrap/format.rs | 2 +- src/tools/compiletest/src/main.rs | 21 ++++++++++++--------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs index 3166cabd13190..6c9c26faef6aa 100644 --- a/src/bootstrap/format.rs +++ b/src/bootstrap/format.rs @@ -157,7 +157,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { ignore_fmt.add(&format!("!/{}", untracked_path)).expect(&untracked_path); } if !check && paths.is_empty() { - match get_modified_rs_files(&build) { + match get_modified_rs_files(build) { Ok(Some(files)) => { for file in files { println!("formatting modified file {file}"); diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs index 47640f4a4170b..c648b2f12f101 100644 --- a/src/tools/compiletest/src/main.rs +++ b/src/tools/compiletest/src/main.rs @@ -525,7 +525,9 @@ pub fn test_opts(config: &Config) -> test::TestOpts { pub fn make_tests(config: &Config, tests: &mut Vec) { debug!("making tests from {:?}", config.src_base.display()); let inputs = common_inputs_stamp(config); - let modified_tests = modified_tests(config, &config.src_base); + let modified_tests = modified_tests(config, &config.src_base).unwrap_or_else(|err| { + panic!("modified_tests got error from dir: {}, error: {}", config.src_base.display(), err) + }); collect_tests_from_dir( config, &config.src_base, @@ -573,13 +575,14 @@ fn common_inputs_stamp(config: &Config) -> Stamp { stamp } -fn modified_tests(config: &Config, dir: &Path) -> Vec { +fn modified_tests(config: &Config, dir: &Path) -> Result, String> { if !config.only_modified { - return vec![]; + return Ok(vec![]); } - let Ok(Some(files)) = get_git_modified_files(Some(dir), &vec!["rs", "stderr", "fixed"]) else { return vec![]; }; + let files = + get_git_modified_files(Some(dir), &vec!["rs", "stderr", "fixed"])?.unwrap_or(vec![]); // Add new test cases to the list, it will be convenient in daily development. - let Ok(Some(untracked_files)) = get_git_untracked_files(None) else { return vec![]; }; + let untracked_files = get_git_untracked_files(None)?.unwrap_or(vec![]); let all_paths = [&files[..], &untracked_files[..]].concat(); let full_paths = { @@ -591,7 +594,7 @@ fn modified_tests(config: &Config, dir: &Path) -> Vec { full_paths.sort_unstable(); full_paths }; - full_paths + Ok(full_paths) } fn collect_tests_from_dir( @@ -600,7 +603,7 @@ fn collect_tests_from_dir( relative_dir_path: &Path, inputs: &Stamp, tests: &mut Vec, - only_modified: &Vec, + modified_tests: &Vec, ) -> io::Result<()> { // Ignore directories that contain a file named `compiletest-ignore-dir`. if dir.join("compiletest-ignore-dir").exists() { @@ -631,7 +634,7 @@ fn collect_tests_from_dir( let file = file?; let file_path = file.path(); let file_name = file.file_name(); - if is_test(&file_name) && (!config.only_modified || only_modified.contains(&file_path)) { + if is_test(&file_name) && (!config.only_modified || modified_tests.contains(&file_path)) { debug!("found test file: {:?}", file_path.display()); let paths = TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() }; @@ -647,7 +650,7 @@ fn collect_tests_from_dir( &relative_file_path, inputs, tests, - only_modified, + modified_tests, )?; } } else { From c003c01a03e972d43444862c275836ac12f20a98 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 10 Feb 2023 18:32:18 +0100 Subject: [PATCH 398/501] Correctly handle reexports for macros --- src/librustdoc/visit_ast.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 088cb3f339492..9c1e5f4a3cddb 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -378,7 +378,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { let nonexported = !tcx.has_attr(def_id, sym::macro_export); if is_macro_2_0 || nonexported || self.inlining { - self.add_to_current_mod(item, renamed, None); + self.add_to_current_mod(item, renamed, import_id); } } hir::ItemKind::Mod(ref m) => { From ddb31de281d2c1edf2d09b8eb0b6f8e53a13b846 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 10 Feb 2023 18:32:33 +0100 Subject: [PATCH 399/501] Also get current import attributes --- src/librustdoc/clean/mod.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 80493b100bb45..02e6b0edc720c 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2206,10 +2206,12 @@ fn clean_maybe_renamed_item<'tcx>( }; let mut extra_attrs = Vec::new(); - if let Some(hir::Node::Item(use_node)) = - import_id.and_then(|def_id| cx.tcx.hir().find_by_def_id(def_id)) + if let Some(import_id) = import_id && + let Some(hir::Node::Item(use_node)) = cx.tcx.hir().find_by_def_id(import_id) { - // We get all the various imports' attributes. + // First, we add the attributes from the current import. + extra_attrs.extend_from_slice(inline::load_attrs(cx, import_id.to_def_id())); + // Then we get all the various imports' attributes. get_all_import_attributes(use_node, cx.tcx, item.owner_id.def_id, &mut extra_attrs); } From 295fd0d8352ca4cff048b29064cfafbf5f29592c Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 10 Feb 2023 18:37:32 +0100 Subject: [PATCH 400/501] Add regression test for reexported macros docs --- tests/rustdoc/reexport-macro.rs | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 tests/rustdoc/reexport-macro.rs diff --git a/tests/rustdoc/reexport-macro.rs b/tests/rustdoc/reexport-macro.rs new file mode 100644 index 0000000000000..c4dec703aed3b --- /dev/null +++ b/tests/rustdoc/reexport-macro.rs @@ -0,0 +1,23 @@ +// Ensure that macros are correctly reexported and that they get both the comment from the +// `pub use` and from the macro. + +#![crate_name = "foo"] + +// @has 'foo/macro.foo.html' +// @!has - '//*[@class="toggle top-doc"]/*[@class="docblock"]' 'x y' +// @has - '//*[@class="toggle top-doc"]/*[@class="docblock"]' 'y' +#[macro_use] +mod my_module { + /// y + #[macro_export] + macro_rules! foo { + () => (); + } +} + +// @has 'foo/another_mod/macro.bar.html' +// @has - '//*[@class="toggle top-doc"]/*[@class="docblock"]' 'x y' +pub mod another_mod { + /// x + pub use crate::foo as bar; +} From 9790d6fbdd07d1efbec8088c88dc2b0d1f9ba283 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Thu, 9 Feb 2023 18:57:02 +0000 Subject: [PATCH 401/501] Add a couple random projection tests --- .../param-candidate-doesnt-shadow-project.rs | 25 ++++++++++++++++ ...ojection-param-candidates-are-ambiguous.rs | 30 +++++++++++++++++++ ...tion-param-candidates-are-ambiguous.stderr | 16 ++++++++++ 3 files changed, 71 insertions(+) create mode 100644 tests/ui/traits/new-solver/param-candidate-doesnt-shadow-project.rs create mode 100644 tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.rs create mode 100644 tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.stderr diff --git a/tests/ui/traits/new-solver/param-candidate-doesnt-shadow-project.rs b/tests/ui/traits/new-solver/param-candidate-doesnt-shadow-project.rs new file mode 100644 index 0000000000000..bdf999ec5dd00 --- /dev/null +++ b/tests/ui/traits/new-solver/param-candidate-doesnt-shadow-project.rs @@ -0,0 +1,25 @@ +// compile-flags: -Ztrait-solver=next +// check-pass + +trait Foo { + type Assoc; +} + +trait Bar {} + +impl Foo for T { + type Assoc = i32; +} + +impl Bar for T where T: Foo {} + +fn require_bar() {} + +fn foo() { + // Unlike the classic solver, `::Assoc = _` will still project + // down to `i32` even though there's a param-env candidate here, since we + // don't assemble any param-env projection candidates for `T: Foo` alone. + require_bar::(); +} + +fn main() {} diff --git a/tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.rs b/tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.rs new file mode 100644 index 0000000000000..cde2059ca9b91 --- /dev/null +++ b/tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.rs @@ -0,0 +1,30 @@ +// compile-flags: -Ztrait-solver=next + +// When we're solving `::Assoc = i32`, we actually first solve +// `::Assoc = _#1t`, then unify `_#1t` with `i32`. That goal +// with the inference variable is ambiguous when there are >1 param-env +// candidates. + +// We don't unify the RHS of a projection goal eagerly when solving, both +// for caching reasons and partly to make sure that we don't make the new +// trait solver smarter than it should be. + +// This is (as far as I can tell) a forwards-compatible decision, but if you +// make this test go from fail to pass, be sure you understand the implications! + +trait Foo { + type Assoc; +} + +trait Bar {} + +impl Bar for T where T: Foo {} + +fn needs_bar() {} + +fn foo + Foo>() { + needs_bar::(); + //~^ ERROR type annotations needed: cannot satisfy `T: Bar` +} + +fn main() {} diff --git a/tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.stderr b/tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.stderr new file mode 100644 index 0000000000000..fa5e780ee5e8b --- /dev/null +++ b/tests/ui/traits/new-solver/two-projection-param-candidates-are-ambiguous.stderr @@ -0,0 +1,16 @@ +error[E0283]: type annotations needed: cannot satisfy `T: Bar` + --> $DIR/two-projection-param-candidates-are-ambiguous.rs:26:5 + | +LL | needs_bar::(); + | ^^^^^^^^^^^^^^ + | + = note: cannot satisfy `T: Bar` +note: required by a bound in `needs_bar` + --> $DIR/two-projection-param-candidates-are-ambiguous.rs:23:17 + | +LL | fn needs_bar() {} + | ^^^ required by this bound in `needs_bar` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0283`. From 59b11e8fa39090f555b8d91eb0c359ad0adaabc4 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Fri, 10 Feb 2023 18:07:23 +0000 Subject: [PATCH 402/501] Zero the `REPARSE_MOUNTPOINT_DATA_BUFFER` header Makes sure the full header is correctly initialized, including reserve parameters. --- library/std/src/sys/windows/fs.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/library/std/src/sys/windows/fs.rs b/library/std/src/sys/windows/fs.rs index 3780980382879..f1a784b5fd2e7 100644 --- a/library/std/src/sys/windows/fs.rs +++ b/library/std/src/sys/windows/fs.rs @@ -1393,6 +1393,8 @@ fn symlink_junction_inner(original: &Path, junction: &Path) -> io::Result<()> { let mut data = Align8([MaybeUninit::::uninit(); c::MAXIMUM_REPARSE_DATA_BUFFER_SIZE]); let data_ptr = data.0.as_mut_ptr(); let db = data_ptr.cast::(); + // Zero the header to ensure it's fully initialized, including reserved parameters. + *db = mem::zeroed(); let buf = ptr::addr_of_mut!((*db).ReparseTarget).cast::(); let mut i = 0; // FIXME: this conversion is very hacky From 0eba2f3c16b4e2792b75762c7643f3a09c9f1a3d Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Thu, 19 Jan 2023 21:13:01 +0000 Subject: [PATCH 403/501] Suggest fn call on pattern type mismatch --- compiler/rustc_hir_typeck/src/_match.rs | 2 +- compiler/rustc_hir_typeck/src/check.rs | 2 +- .../rustc_hir_typeck/src/fn_ctxt/checks.rs | 6 ++-- compiler/rustc_hir_typeck/src/pat.rs | 19 ++++++++--- .../suggest-call-on-pat-mismatch.rs | 16 +++++++++ .../suggest-call-on-pat-mismatch.stderr | 33 +++++++++++++++++++ 6 files changed, 68 insertions(+), 10 deletions(-) create mode 100644 tests/ui/suggestions/suggest-call-on-pat-mismatch.rs create mode 100644 tests/ui/suggestions/suggest-call-on-pat-mismatch.stderr diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index 88fb265358686..e19ef2ff3bf48 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -41,7 +41,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // #55810: Type check patterns first so we get types for all bindings. let scrut_span = scrut.span.find_ancestor_inside(expr.span).unwrap_or(scrut.span); for arm in arms { - self.check_pat_top(&arm.pat, scrutinee_ty, Some(scrut_span), true); + self.check_pat_top(&arm.pat, scrutinee_ty, Some(scrut_span), Some(scrut)); } // Now typecheck the blocks. diff --git a/compiler/rustc_hir_typeck/src/check.rs b/compiler/rustc_hir_typeck/src/check.rs index 1c70c1b71e763..cc515e6c85313 100644 --- a/compiler/rustc_hir_typeck/src/check.rs +++ b/compiler/rustc_hir_typeck/src/check.rs @@ -90,7 +90,7 @@ pub(super) fn check_fn<'a, 'tcx>( for (idx, (param_ty, param)) in inputs_fn.chain(maybe_va_list).zip(body.params).enumerate() { // Check the pattern. let ty_span = try { inputs_hir?.get(idx)?.span }; - fcx.check_pat_top(¶m.pat, param_ty, ty_span, false); + fcx.check_pat_top(¶m.pat, param_ty, ty_span, None); // Check that argument is Sized. // The check for a non-trivial pattern is a hack to avoid duplicate warnings diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 2a1265600de8b..9c7a84ce198e8 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -1330,11 +1330,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // Does the expected pattern type originate from an expression and what is the span? let (origin_expr, ty_span) = match (decl.ty, decl.init) { - (Some(ty), _) => (false, Some(ty.span)), // Bias towards the explicit user type. + (Some(ty), _) => (None, Some(ty.span)), // Bias towards the explicit user type. (_, Some(init)) => { - (true, Some(init.span.find_ancestor_inside(decl.span).unwrap_or(init.span))) + (Some(init), Some(init.span.find_ancestor_inside(decl.span).unwrap_or(init.span))) } // No explicit type; so use the scrutinee. - _ => (false, None), // We have `let $pat;`, so the expected type is unconstrained. + _ => (None, None), // We have `let $pat;`, so the expected type is unconstrained. }; // Type check the pattern. Override if necessary to avoid knock-on errors. diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs index 46799245222dc..52236ae56eeaa 100644 --- a/compiler/rustc_hir_typeck/src/pat.rs +++ b/compiler/rustc_hir_typeck/src/pat.rs @@ -46,7 +46,7 @@ struct TopInfo<'tcx> { /// Was the origin of the `span` from a scrutinee expression? /// /// Otherwise there is no scrutinee and it could be e.g. from the type of a formal parameter. - origin_expr: bool, + origin_expr: Option<&'tcx hir::Expr<'tcx>>, /// The span giving rise to the `expected` type, if one could be provided. /// /// If `origin_expr` is `true`, then this is the span of the scrutinee as in: @@ -74,7 +74,8 @@ struct TopInfo<'tcx> { impl<'tcx> FnCtxt<'_, 'tcx> { fn pattern_cause(&self, ti: TopInfo<'tcx>, cause_span: Span) -> ObligationCause<'tcx> { - let code = Pattern { span: ti.span, root_ty: ti.expected, origin_expr: ti.origin_expr }; + let code = + Pattern { span: ti.span, root_ty: ti.expected, origin_expr: ti.origin_expr.is_some() }; self.cause(cause_span, code) } @@ -85,7 +86,14 @@ impl<'tcx> FnCtxt<'_, 'tcx> { actual: Ty<'tcx>, ti: TopInfo<'tcx>, ) -> Option> { - self.demand_eqtype_with_origin(&self.pattern_cause(ti, cause_span), expected, actual) + let mut diag = + self.demand_eqtype_with_origin(&self.pattern_cause(ti, cause_span), expected, actual)?; + if let Some(expr) = ti.origin_expr { + self.suggest_fn_call(&mut diag, expr, expected, |output| { + self.can_eq(self.param_env, output, actual).is_ok() + }); + } + Some(diag) } fn demand_eqtype_pat( @@ -127,7 +135,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, span: Option, - origin_expr: bool, + origin_expr: Option<&'tcx hir::Expr<'tcx>>, ) { let info = TopInfo { expected, origin_expr, span }; self.check_pat(pat, expected, INITIAL_BM, info); @@ -2146,7 +2154,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err.help("the semantics of slice patterns changed recently; see issue #62254"); } else if self.autoderef(span, expected_ty) .any(|(ty, _)| matches!(ty.kind(), ty::Slice(..) | ty::Array(..))) - && let (Some(span), true) = (ti.span, ti.origin_expr) + && let Some(span) = ti.span + && let Some(_) = ti.origin_expr && let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { let ty = self.resolve_vars_if_possible(ti.expected); diff --git a/tests/ui/suggestions/suggest-call-on-pat-mismatch.rs b/tests/ui/suggestions/suggest-call-on-pat-mismatch.rs new file mode 100644 index 0000000000000..657dd9c22c21b --- /dev/null +++ b/tests/ui/suggestions/suggest-call-on-pat-mismatch.rs @@ -0,0 +1,16 @@ +enum E { + One(i32, i32), +} + +fn main() { + let var = E::One; + if let E::One(var1, var2) = var { + //~^ ERROR mismatched types + //~| HELP use parentheses to construct this tuple variant + println!("{var1} {var2}"); + } + + let Some(x) = Some; + //~^ ERROR mismatched types + //~| HELP use parentheses to construct this tuple variant +} diff --git a/tests/ui/suggestions/suggest-call-on-pat-mismatch.stderr b/tests/ui/suggestions/suggest-call-on-pat-mismatch.stderr new file mode 100644 index 0000000000000..7338312bab651 --- /dev/null +++ b/tests/ui/suggestions/suggest-call-on-pat-mismatch.stderr @@ -0,0 +1,33 @@ +error[E0308]: mismatched types + --> $DIR/suggest-call-on-pat-mismatch.rs:7:12 + | +LL | if let E::One(var1, var2) = var { + | ^^^^^^^^^^^^^^^^^^ --- this expression has type `fn(i32, i32) -> E {E::One}` + | | + | expected enum constructor, found `E` + | + = note: expected enum constructor `fn(i32, i32) -> E {E::One}` + found enum `E` +help: use parentheses to construct this tuple variant + | +LL | if let E::One(var1, var2) = var(/* i32 */, /* i32 */) { + | ++++++++++++++++++++++ + +error[E0308]: mismatched types + --> $DIR/suggest-call-on-pat-mismatch.rs:13:9 + | +LL | let Some(x) = Some; + | ^^^^^^^ ---- this expression has type `fn(_) -> Option<_> {Option::<_>::Some}` + | | + | expected enum constructor, found `Option<_>` + | + = note: expected enum constructor `fn(_) -> Option<_> {Option::<_>::Some}` + found enum `Option<_>` +help: use parentheses to construct this tuple variant + | +LL | let Some(x) = Some(/* value */); + | +++++++++++++ + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0308`. From c7bd3c682f9141f88b23d2e79c31a0be055c1c83 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Fri, 10 Feb 2023 14:55:17 -0500 Subject: [PATCH 404/501] Always reload onEnter configuration Configuration reload doesn't happen often anyway, and there will always be a set of onEnter rules to load --- editors/code/src/config.ts | 23 +++++++---------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 30ff97848299f..561b69f0100b2 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -11,9 +11,7 @@ export type RunnableEnvCfg = export class Config { readonly extensionId = "rust-lang.rust-analyzer"; - configureLang: - | { handle: vscode.Disposable; typingContinueCommentsOnNewline: boolean } - | undefined; + configureLang: vscode.Disposable | undefined; readonly rootSection = "rust-analyzer"; private readonly requiresReloadOpts = [ @@ -45,7 +43,7 @@ export class Config { } dispose() { - this.configureLang?.handle.dispose(); + this.configureLang?.dispose(); } private refreshLogging() { @@ -89,12 +87,8 @@ export class Config { */ private configureLanguage() { // Only need to dispose of the config if there's a change - if ( - this.configureLang && - this.typingContinueCommentsOnNewline !== - this.configureLang.typingContinueCommentsOnNewline - ) { - this.configureLang.handle.dispose(); + if (this.configureLang) { + this.configureLang.dispose(); this.configureLang = undefined; } @@ -167,12 +161,9 @@ export class Config { ]; } - this.configureLang = { - handle: vscode.languages.setLanguageConfiguration("rust", { - onEnterRules, - }), - typingContinueCommentsOnNewline: this.typingContinueCommentsOnNewline, - }; + this.configureLang = vscode.languages.setLanguageConfiguration("rust", { + onEnterRules, + }); } // We don't do runtime config validation here for simplicity. More on stackoverflow: From 404e9c5e3ad75057b6bbb3bcd44fe60480e50294 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Fri, 10 Feb 2023 12:50:17 -0800 Subject: [PATCH 405/501] Have a function for the `log(0)` panic, rather than copy-pasting the string constant --- library/core/src/num/int_log10.rs | 8 ++++++++ library/core/src/num/int_macros.rs | 21 +++++++++++++++------ library/core/src/num/uint_macros.rs | 21 +++++++++++++++------ 3 files changed, 38 insertions(+), 12 deletions(-) diff --git a/library/core/src/num/int_log10.rs b/library/core/src/num/int_log10.rs index 80472528f6c3a..0ce31b40a3845 100644 --- a/library/core/src/num/int_log10.rs +++ b/library/core/src/num/int_log10.rs @@ -138,3 +138,11 @@ pub const fn i64(val: i64) -> u32 { pub const fn i128(val: i128) -> u32 { u128(val as u128) } + +/// Instantiate this panic logic once, rather than for all the ilog methods +/// on every single primitive type. +#[cold] +#[track_caller] +pub const fn panic_for_nonpositive_argument() -> ! { + panic!("argument of integer logarithm must be positive") +} diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index b59f28193e2bd..479f8ffb78d01 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -2331,14 +2331,17 @@ macro_rules! int_impl { /// ``` #[stable(feature = "int_log", since = "1.67.0")] #[rustc_const_stable(feature = "int_log", since = "1.67.0")] - #[rustc_allow_const_fn_unstable(const_option)] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] #[track_caller] pub const fn ilog(self, base: Self) -> u32 { assert!(base >= 2, "base of integer logarithm must be at least 2"); - self.checked_ilog(base).expect("argument of integer logarithm must be positive") + if let Some(log) = self.checked_ilog(base) { + log + } else { + int_log10::panic_for_nonpositive_argument() + } } /// Returns the base 2 logarithm of the number, rounded down. @@ -2354,13 +2357,16 @@ macro_rules! int_impl { /// ``` #[stable(feature = "int_log", since = "1.67.0")] #[rustc_const_stable(feature = "int_log", since = "1.67.0")] - #[rustc_allow_const_fn_unstable(const_option)] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] #[track_caller] pub const fn ilog2(self) -> u32 { - self.checked_ilog2().expect("argument of integer logarithm must be positive") + if let Some(log) = self.checked_ilog2() { + log + } else { + int_log10::panic_for_nonpositive_argument() + } } /// Returns the base 10 logarithm of the number, rounded down. @@ -2376,13 +2382,16 @@ macro_rules! int_impl { /// ``` #[stable(feature = "int_log", since = "1.67.0")] #[rustc_const_stable(feature = "int_log", since = "1.67.0")] - #[rustc_allow_const_fn_unstable(const_option)] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] #[track_caller] pub const fn ilog10(self) -> u32 { - self.checked_ilog10().expect("argument of integer logarithm must be positive") + if let Some(log) = self.checked_ilog10() { + log + } else { + int_log10::panic_for_nonpositive_argument() + } } /// Returns the logarithm of the number with respect to an arbitrary base, diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index 1c97c46862833..411ea68674db3 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -705,14 +705,17 @@ macro_rules! uint_impl { /// ``` #[stable(feature = "int_log", since = "1.67.0")] #[rustc_const_stable(feature = "int_log", since = "1.67.0")] - #[rustc_allow_const_fn_unstable(const_option)] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] #[track_caller] pub const fn ilog(self, base: Self) -> u32 { assert!(base >= 2, "base of integer logarithm must be at least 2"); - self.checked_ilog(base).expect("argument of integer logarithm must be positive") + if let Some(log) = self.checked_ilog(base) { + log + } else { + int_log10::panic_for_nonpositive_argument() + } } /// Returns the base 2 logarithm of the number, rounded down. @@ -728,13 +731,16 @@ macro_rules! uint_impl { /// ``` #[stable(feature = "int_log", since = "1.67.0")] #[rustc_const_stable(feature = "int_log", since = "1.67.0")] - #[rustc_allow_const_fn_unstable(const_option)] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] #[track_caller] pub const fn ilog2(self) -> u32 { - self.checked_ilog2().expect("argument of integer logarithm must be positive") + if let Some(log) = self.checked_ilog2() { + log + } else { + int_log10::panic_for_nonpositive_argument() + } } /// Returns the base 10 logarithm of the number, rounded down. @@ -750,13 +756,16 @@ macro_rules! uint_impl { /// ``` #[stable(feature = "int_log", since = "1.67.0")] #[rustc_const_stable(feature = "int_log", since = "1.67.0")] - #[rustc_allow_const_fn_unstable(const_option)] #[must_use = "this returns the result of the operation, \ without modifying the original"] #[inline] #[track_caller] pub const fn ilog10(self) -> u32 { - self.checked_ilog10().expect("argument of integer logarithm must be positive") + if let Some(log) = self.checked_ilog10() { + log + } else { + int_log10::panic_for_nonpositive_argument() + } } /// Returns the logarithm of the number with respect to an arbitrary base, From 897647b37897d0cecb6216e718640d6b06f983d5 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Fri, 10 Feb 2023 16:00:03 -0700 Subject: [PATCH 406/501] rustdoc: remove redundant `if s.is_empty()` from `find_testable_code` --- src/librustdoc/html/markdown.rs | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index fb7c34118a491..d547730a7425c 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -787,11 +787,7 @@ pub(crate) fn find_testable_code( } Event::Text(ref s) if register_header.is_some() => { let level = register_header.unwrap(); - if s.is_empty() { - tests.register_header("", level); - } else { - tests.register_header(s, level); - } + tests.register_header(s, level); register_header = None; } _ => {} From 20446bd92bdd165ad23eef413b855a0be78285cc Mon Sep 17 00:00:00 2001 From: base0x10 Date: Fri, 10 Feb 2023 23:58:25 +0000 Subject: [PATCH 407/501] Update broken link in cargo style guide Toml now uses [toml.io](https://toml.io) for released specifications and the github repo for development. Also the old link was for the 0.4 specification, while cargo uses toml_edit, which uses toml 1.0 (reference: https://github.com/toml-rs/toml/blob/main/crates/toml_edit/CHANGELOG.md#030---2021-09-13). Finally the discussion of "Bare keys" vs "Quoted keys" has moved from the `#table` section to `#keys`. --- src/doc/style-guide/src/cargo.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/doc/style-guide/src/cargo.md b/src/doc/style-guide/src/cargo.md index f4993ba06a888..13b96ca8c5e9d 100644 --- a/src/doc/style-guide/src/cargo.md +++ b/src/doc/style-guide/src/cargo.md @@ -17,8 +17,7 @@ followed by the `description` at the end of that section. Don't use quotes around any standard key names; use bare keys. Only use quoted keys for non-standard keys whose names require them, and avoid introducing such key names when possible. See the [TOML -specification](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md#table) -for details. +specification](https://toml.io/en/v1.0.0#keys) for details. Put a single space both before and after the `=` between a key and value. Do not indent any key names; start all key names at the start of a line. From a06aaa4a9efe605b91b2f42718696906ce2ba629 Mon Sep 17 00:00:00 2001 From: Josh Stone Date: Wed, 1 Feb 2023 12:52:06 -0800 Subject: [PATCH 408/501] Update the minimum external LLVM to 14 --- .github/workflows/ci.yml | 8 +-- compiler/rustc_codegen_llvm/src/consts.rs | 9 +--- compiler/rustc_codegen_llvm/src/context.rs | 11 ----- compiler/rustc_codegen_llvm/src/llvm_util.rs | 15 +----- compiler/rustc_codegen_llvm/src/type_of.rs | 7 +-- .../rustc_llvm/llvm-wrapper/PassWrapper.cpp | 44 ++--------------- .../rustc_llvm/llvm-wrapper/RustWrapper.cpp | 14 +----- .../src/spec/aarch64_be_unknown_linux_gnu.rs | 2 +- .../aarch64_be_unknown_linux_gnu_ilp32.rs | 2 +- .../src/spec/aarch64_kmc_solid_asp3.rs | 2 +- .../src/spec/aarch64_linux_android.rs | 2 +- .../aarch64_nintendo_switch_freestanding.rs | 1 + .../src/spec/aarch64_pc_windows_gnullvm.rs | 2 +- .../src/spec/aarch64_pc_windows_msvc.rs | 2 +- .../src/spec/aarch64_unknown_freebsd.rs | 1 + .../src/spec/aarch64_unknown_fuchsia.rs | 1 + .../src/spec/aarch64_unknown_hermit.rs | 2 +- .../src/spec/aarch64_unknown_linux_gnu.rs | 2 +- .../spec/aarch64_unknown_linux_gnu_ilp32.rs | 2 +- .../src/spec/aarch64_unknown_linux_musl.rs | 1 + .../src/spec/aarch64_unknown_netbsd.rs | 1 + .../src/spec/aarch64_unknown_none.rs | 2 +- .../spec/aarch64_unknown_none_softfloat.rs | 2 +- .../src/spec/aarch64_unknown_nto_qnx_710.rs | 1 + .../src/spec/aarch64_unknown_openbsd.rs | 6 ++- .../src/spec/aarch64_unknown_redox.rs | 1 + .../src/spec/aarch64_unknown_uefi.rs | 1 + .../src/spec/aarch64_uwp_windows_msvc.rs | 1 + .../src/spec/aarch64_wrs_vxworks.rs | 6 ++- .../src/spec/arm64_32_apple_watchos.rs | 2 +- src/bootstrap/native.rs | 4 +- .../x86_64-gnu-llvm-13-stage1/Dockerfile | 49 ------------------- .../Dockerfile | 36 +++----------- src/ci/github-actions/ci.yml | 9 +--- tests/assembly/align_offset.rs | 1 - tests/codegen/consts.rs | 1 - tests/codegen/merge-functions.rs | 1 - tests/codegen/sse42-implies-crc32.rs | 1 - tests/codegen/uninit-consts.rs | 1 - tests/codegen/vec-in-place.rs | 1 - tests/ui/optimization-remark.rs | 1 - tests/ui/sanitize/memory-eager.rs | 1 - 42 files changed, 54 insertions(+), 205 deletions(-) delete mode 100644 src/ci/docker/host-x86_64/x86_64-gnu-llvm-13-stage1/Dockerfile rename src/ci/docker/host-x86_64/{x86_64-gnu-llvm-13 => x86_64-gnu-llvm-14-stage1}/Dockerfile (50%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b51105750c82c..7c17dfd8c8edc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -54,7 +54,7 @@ jobs: tidy: true os: ubuntu-20.04-xl env: {} - - name: x86_64-gnu-llvm-13 + - name: x86_64-gnu-llvm-14 tidy: false os: ubuntu-20.04-xl env: {} @@ -300,11 +300,7 @@ jobs: env: RUST_BACKTRACE: 1 os: ubuntu-20.04-xl - - name: x86_64-gnu-llvm-13 - env: - RUST_BACKTRACE: 1 - os: ubuntu-20.04-xl - - name: x86_64-gnu-llvm-13-stage1 + - name: x86_64-gnu-llvm-14-stage1 env: RUST_BACKTRACE: 1 os: ubuntu-20.04-xl diff --git a/compiler/rustc_codegen_llvm/src/consts.rs b/compiler/rustc_codegen_llvm/src/consts.rs index cad3c5d87b73c..92629aa18d458 100644 --- a/compiler/rustc_codegen_llvm/src/consts.rs +++ b/compiler/rustc_codegen_llvm/src/consts.rs @@ -3,7 +3,6 @@ use crate::common::{self, CodegenCx}; use crate::debuginfo; use crate::errors::{InvalidMinimumAlignment, SymbolAlreadyDefined}; use crate::llvm::{self, True}; -use crate::llvm_util; use crate::type_::Type; use crate::type_of::LayoutLlvmExt; use crate::value::Value; @@ -56,13 +55,7 @@ pub fn const_alloc_to_llvm<'ll>(cx: &CodegenCx<'ll, '_>, alloc: ConstAllocation< // to avoid the cost of generating large complex const expressions. // For example, `[(u32, u8); 1024 * 1024]` contains uninit padding in each element, // and would result in `{ [5 x i8] zeroinitializer, [3 x i8] undef, ...repeat 1M times... }`. - let max = if llvm_util::get_version() < (14, 0, 0) { - // Generating partially-uninit consts inhibits optimizations in LLVM < 14. - // See https://github.com/rust-lang/rust/issues/84565. - 1 - } else { - cx.sess().opts.unstable_opts.uninit_const_chunk_threshold - }; + let max = cx.sess().opts.unstable_opts.uninit_const_chunk_threshold; let allow_uninit_chunks = chunks.clone().take(max.saturating_add(1)).count() <= max; if allow_uninit_chunks { diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index c0b23585d3a77..120dc59dfb3b6 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -143,17 +143,6 @@ pub unsafe fn create_module<'ll>( let mut target_data_layout = sess.target.data_layout.to_string(); let llvm_version = llvm_util::get_version(); - if llvm_version < (14, 0, 0) { - if sess.target.llvm_target == "i686-pc-windows-msvc" - || sess.target.llvm_target == "i586-pc-windows-msvc" - { - target_data_layout = - "e-m:x-p:32:32-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:32-n8:16:32-a:0:32-S32" - .to_string(); - } else if sess.target.arch == "wasm32" { - target_data_layout = target_data_layout.replace("-p10:8:8-p20:8:8", ""); - } - } if llvm_version < (16, 0, 0) { if sess.target.arch == "s390x" { target_data_layout = target_data_layout.replace("-v128:64", ""); diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 79b243f73d1a0..12e3581c6632e 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -152,13 +152,7 @@ pub fn time_trace_profiler_finish(file_name: &Path) { pub fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]> { let arch = if sess.target.arch == "x86_64" { "x86" } else { &*sess.target.arch }; match (arch, s) { - ("x86", "sse4.2") => { - if get_version() >= (14, 0, 0) { - smallvec!["sse4.2", "crc32"] - } else { - smallvec!["sse4.2"] - } - } + ("x86", "sse4.2") => smallvec!["sse4.2", "crc32"], ("x86", "pclmulqdq") => smallvec!["pclmul"], ("x86", "rdrand") => smallvec!["rdrnd"], ("x86", "bmi1") => smallvec!["bmi"], @@ -243,7 +237,7 @@ pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec { // (see https://reviews.llvm.org/D110413). This unstable target feature is intended for use // by compiler-builtins, to export the builtins with the expected, LLVM-version-dependent ABI. // The target feature can be dropped once we no longer support older LLVM versions. - if sess.is_nightly_build() && get_version() >= (14, 0, 0) { + if sess.is_nightly_build() { features.push(Symbol::intern("llvm14-builtins-abi")); } features @@ -494,11 +488,6 @@ pub(crate) fn global_llvm_features(sess: &Session, diagnostics: bool) -> Vec= (14, 0, 0) && sess.target.arch == "aarch64" { - features.push("+v8a".into()); - } - if diagnostics && let Some(f) = check_tied_features(sess, &featsmap) { sess.emit_err(TargetFeatureDisableOrEnable { features: f, diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs index c73d233b767a4..0cb4bc806a137 100644 --- a/compiler/rustc_codegen_llvm/src/type_of.rs +++ b/compiler/rustc_codegen_llvm/src/type_of.rs @@ -1,6 +1,5 @@ use crate::common::*; use crate::context::TypeLowering; -use crate::llvm_util::get_version; use crate::type_::Type; use rustc_codegen_ssa::traits::*; use rustc_middle::bug; @@ -43,10 +42,8 @@ fn uncached_llvm_type<'a, 'tcx>( // in problematically distinct types due to HRTB and subtyping (see #47638). // ty::Dynamic(..) | ty::Adt(..) | ty::Closure(..) | ty::Foreign(..) | ty::Generator(..) | ty::Str - // For performance reasons we use names only when emitting LLVM IR. Unless we are on - // LLVM < 14, where the use of unnamed types resulted in various issues, e.g., #76213, - // #79564, and #79246. - if get_version() < (14, 0, 0) || !cx.sess().fewer_names() => + // For performance reasons we use names only when emitting LLVM IR. + if !cx.sess().fewer_names() => { let mut name = with_no_visible_paths!(with_no_trimmed_paths!(layout.ty.to_string())); if let (&ty::Adt(def, _), &Variants::Single { index }) = diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp index 15a4273fc5918..fbc1d8ef310c6 100644 --- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp @@ -14,6 +14,7 @@ #include "llvm/IR/AssemblyAnnotationWriter.h" #include "llvm/IR/IntrinsicInst.h" #include "llvm/IR/Verifier.h" +#include "llvm/MC/TargetRegistry.h" #include "llvm/Object/ObjectFile.h" #include "llvm/Object/IRObjectFile.h" #include "llvm/Passes/PassBuilder.h" @@ -25,11 +26,6 @@ #include "llvm/Support/VirtualFileSystem.h" #endif #include "llvm/Support/Host.h" -#if LLVM_VERSION_LT(14, 0) -#include "llvm/Support/TargetRegistry.h" -#else -#include "llvm/MC/TargetRegistry.h" -#endif #include "llvm/Target/TargetMachine.h" #include "llvm/Transforms/IPO/PassManagerBuilder.h" #include "llvm/Transforms/IPO/AlwaysInliner.h" @@ -267,10 +263,6 @@ enum class LLVMRustPassBuilderOptLevel { Oz, }; -#if LLVM_VERSION_LT(14,0) -using OptimizationLevel = PassBuilder::OptimizationLevel; -#endif - static OptimizationLevel fromRust(LLVMRustPassBuilderOptLevel Level) { switch (Level) { case LLVMRustPassBuilderOptLevel::O0: @@ -747,27 +739,18 @@ LLVMRustOptimize( if (SanitizerOptions) { if (SanitizerOptions->SanitizeMemory) { -#if LLVM_VERSION_GE(14, 0) MemorySanitizerOptions Options( SanitizerOptions->SanitizeMemoryTrackOrigins, SanitizerOptions->SanitizeMemoryRecover, /*CompileKernel=*/false, /*EagerChecks=*/true); -#else - MemorySanitizerOptions Options( - SanitizerOptions->SanitizeMemoryTrackOrigins, - SanitizerOptions->SanitizeMemoryRecover, - /*CompileKernel=*/false); -#endif OptimizerLastEPCallbacks.push_back( [Options](ModulePassManager &MPM, OptimizationLevel Level) { -#if LLVM_VERSION_GE(14, 0) && LLVM_VERSION_LT(16, 0) +#if LLVM_VERSION_LT(16, 0) MPM.addPass(ModuleMemorySanitizerPass(Options)); + MPM.addPass(createModuleToFunctionPassAdaptor(MemorySanitizerPass(Options))); #else MPM.addPass(MemorySanitizerPass(Options)); -#endif -#if LLVM_VERSION_LT(16, 0) - MPM.addPass(createModuleToFunctionPassAdaptor(MemorySanitizerPass(Options))); #endif } ); @@ -776,11 +759,7 @@ LLVMRustOptimize( if (SanitizerOptions->SanitizeThread) { OptimizerLastEPCallbacks.push_back( [](ModulePassManager &MPM, OptimizationLevel Level) { -#if LLVM_VERSION_GE(14, 0) MPM.addPass(ModuleThreadSanitizerPass()); -#else - MPM.addPass(ThreadSanitizerPass()); -#endif MPM.addPass(createModuleToFunctionPassAdaptor(ThreadSanitizerPass())); } ); @@ -792,7 +771,6 @@ LLVMRustOptimize( #if LLVM_VERSION_LT(15, 0) MPM.addPass(RequireAnalysisPass()); #endif -#if LLVM_VERSION_GE(14, 0) AddressSanitizerOptions opts = AddressSanitizerOptions{ /*CompileKernel=*/false, SanitizerOptions->SanitizeAddressRecover, @@ -803,13 +781,6 @@ LLVMRustOptimize( MPM.addPass(ModuleAddressSanitizerPass(opts)); #else MPM.addPass(AddressSanitizerPass(opts)); -#endif -#else - MPM.addPass(ModuleAddressSanitizerPass( - /*CompileKernel=*/false, SanitizerOptions->SanitizeAddressRecover)); - MPM.addPass(createModuleToFunctionPassAdaptor(AddressSanitizerPass( - /*CompileKernel=*/false, SanitizerOptions->SanitizeAddressRecover, - /*UseAfterScope=*/true))); #endif } ); @@ -817,15 +788,10 @@ LLVMRustOptimize( if (SanitizerOptions->SanitizeHWAddress) { OptimizerLastEPCallbacks.push_back( [SanitizerOptions](ModulePassManager &MPM, OptimizationLevel Level) { -#if LLVM_VERSION_GE(14, 0) HWAddressSanitizerOptions opts( /*CompileKernel=*/false, SanitizerOptions->SanitizeHWAddressRecover, /*DisableOptimization=*/false); MPM.addPass(HWAddressSanitizerPass(opts)); -#else - MPM.addPass(HWAddressSanitizerPass( - /*CompileKernel=*/false, SanitizerOptions->SanitizeHWAddressRecover)); -#endif } ); } @@ -1328,11 +1294,7 @@ extern "C" bool LLVMRustPrepareThinLTOResolveWeak(const LLVMRustThinLTOData *Data, LLVMModuleRef M) { Module &Mod = *unwrap(M); const auto &DefinedGlobals = Data->ModuleToDefinedGVSummaries.lookup(Mod.getModuleIdentifier()); -#if LLVM_VERSION_GE(14, 0) thinLTOFinalizeInModule(Mod, DefinedGlobals, /*PropagateAttrs=*/true); -#else - thinLTOResolvePrevailingInModule(Mod, DefinedGlobals); -#endif return true; } diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp index 87b0e1273eb77..b1e6534944db3 100644 --- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp @@ -63,11 +63,7 @@ static LLVM_THREAD_LOCAL char *LastError; // // Notably it exits the process with code 101, unlike LLVM's default of 1. static void FatalErrorHandler(void *UserData, -#if LLVM_VERSION_LT(14, 0) - const std::string& Reason, -#else const char* Reason, -#endif bool GenCrashDiag) { // Do the same thing that the default error handler does. std::cerr << "LLVM ERROR: " << Reason << std::endl; @@ -249,18 +245,10 @@ static Attribute::AttrKind fromRust(LLVMRustAttribute Kind) { template static inline void AddAttributes(T *t, unsigned Index, LLVMAttributeRef *Attrs, size_t AttrsLen) { AttributeList PAL = t->getAttributes(); - AttributeList PALNew; -#if LLVM_VERSION_LT(14, 0) - AttrBuilder B; - for (LLVMAttributeRef Attr : makeArrayRef(Attrs, AttrsLen)) - B.addAttribute(unwrap(Attr)); - PALNew = PAL.addAttributes(t->getContext(), Index, B); -#else AttrBuilder B(t->getContext()); for (LLVMAttributeRef Attr : ArrayRef(Attrs, AttrsLen)) B.addAttribute(unwrap(Attr)); - PALNew = PAL.addAttributesAtIndex(t->getContext(), Index, B); -#endif + AttributeList PALNew = PAL.addAttributesAtIndex(t->getContext(), Index, B); t->setAttributes(PALNew); } diff --git a/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs index 9bce82a191e8a..b84783c0a407b 100644 --- a/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs +++ b/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu.rs @@ -8,7 +8,7 @@ pub fn target() -> Target { data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { - features: "+outline-atomics".into(), + features: "+v8a,+outline-atomics".into(), max_atomic_width: Some(128), mcount: "\u{1}_mcount".into(), endian: Endian::Big, diff --git a/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs b/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs index c9ceb55ddad59..a24e0119f25e9 100644 --- a/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs +++ b/compiler/rustc_target/src/spec/aarch64_be_unknown_linux_gnu_ilp32.rs @@ -12,7 +12,7 @@ pub fn target() -> Target { arch: "aarch64".into(), options: TargetOptions { abi: "ilp32".into(), - features: "+outline-atomics".into(), + features: "+v8a,+outline-atomics".into(), mcount: "\u{1}_mcount".into(), endian: Endian::Big, ..base diff --git a/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs b/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs index 6ea9ae2667efa..437fd60158030 100644 --- a/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs +++ b/compiler/rustc_target/src/spec/aarch64_kmc_solid_asp3.rs @@ -9,7 +9,7 @@ pub fn target() -> Target { arch: "aarch64".into(), options: TargetOptions { linker: Some("aarch64-kmc-elf-gcc".into()), - features: "+neon,+fp-armv8".into(), + features: "+v8a,+neon,+fp-armv8".into(), relocation_model: RelocModel::Static, disable_redzone: true, max_atomic_width: Some(128), diff --git a/compiler/rustc_target/src/spec/aarch64_linux_android.rs b/compiler/rustc_target/src/spec/aarch64_linux_android.rs index daa946ccd519c..071b727b35c56 100644 --- a/compiler/rustc_target/src/spec/aarch64_linux_android.rs +++ b/compiler/rustc_target/src/spec/aarch64_linux_android.rs @@ -13,7 +13,7 @@ pub fn target() -> Target { max_atomic_width: Some(128), // As documented in https://developer.android.com/ndk/guides/cpu-features.html // the neon (ASIMD) and FP must exist on all android aarch64 targets. - features: "+neon,+fp-armv8".into(), + features: "+v8a,+neon,+fp-armv8".into(), supported_sanitizers: SanitizerSet::CFI | SanitizerSet::HWADDRESS | SanitizerSet::MEMTAG diff --git a/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs b/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs index 529e98d2cf31c..e271bdc8a015b 100644 --- a/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs +++ b/compiler/rustc_target/src/spec/aarch64_nintendo_switch_freestanding.rs @@ -10,6 +10,7 @@ pub fn target() -> Target { data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { + features: "+v8a".into(), linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), linker: Some("rust-lld".into()), link_script: Some(LINKER_SCRIPT.into()), diff --git a/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs b/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs index 98d3e79c8e97e..cf1d7ca1158d5 100644 --- a/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs +++ b/compiler/rustc_target/src/spec/aarch64_pc_windows_gnullvm.rs @@ -3,7 +3,7 @@ use crate::spec::Target; pub fn target() -> Target { let mut base = super::windows_gnullvm_base::opts(); base.max_atomic_width = Some(128); - base.features = "+neon,+fp-armv8".into(); + base.features = "+v8a,+neon,+fp-armv8".into(); base.linker = Some("aarch64-w64-mingw32-clang".into()); Target { diff --git a/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs b/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs index 7c4544b3f33c0..56b76bc7ada9a 100644 --- a/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/aarch64_pc_windows_msvc.rs @@ -3,7 +3,7 @@ use crate::spec::Target; pub fn target() -> Target { let mut base = super::windows_msvc_base::opts(); base.max_atomic_width = Some(128); - base.features = "+neon,+fp-armv8".into(); + base.features = "+v8a,+neon,+fp-armv8".into(); Target { llvm_target: "aarch64-pc-windows-msvc".into(), diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs b/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs index 2f39c4862cfac..84fa9814bbeab 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_freebsd.rs @@ -7,6 +7,7 @@ pub fn target() -> Target { data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { + features: "+v8a".into(), max_atomic_width: Some(128), supported_sanitizers: SanitizerSet::ADDRESS | SanitizerSet::CFI diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs b/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs index ef2ab304f9e04..a5683fa7348a7 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_fuchsia.rs @@ -7,6 +7,7 @@ pub fn target() -> Target { data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { + features: "+v8a".into(), max_atomic_width: Some(128), supported_sanitizers: SanitizerSet::ADDRESS | SanitizerSet::CFI diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs b/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs index 1d7269c8d737c..87e8d62702691 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_hermit.rs @@ -3,7 +3,7 @@ use crate::spec::Target; pub fn target() -> Target { let mut base = super::hermit_base::opts(); base.max_atomic_width = Some(128); - base.features = "+strict-align,+neon,+fp-armv8".into(); + base.features = "+v8a,+strict-align,+neon,+fp-armv8".into(); Target { llvm_target: "aarch64-unknown-hermit".into(), diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs index 36d54f1d7cc5c..da246089440fc 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu.rs @@ -7,7 +7,7 @@ pub fn target() -> Target { data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { - features: "+outline-atomics".into(), + features: "+v8a,+outline-atomics".into(), mcount: "\u{1}_mcount".into(), max_atomic_width: Some(128), supported_sanitizers: SanitizerSet::ADDRESS diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs b/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs index 63023df1d6c63..ad9df53c2b7fb 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_linux_gnu_ilp32.rs @@ -8,7 +8,7 @@ pub fn target() -> Target { arch: "aarch64".into(), options: TargetOptions { abi: "ilp32".into(), - features: "+outline-atomics".into(), + features: "+v8a,+outline-atomics".into(), max_atomic_width: Some(128), mcount: "\u{1}_mcount".into(), ..super::linux_gnu_base::opts() diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs index 9c299fed6be16..d0c950c2e32f6 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_linux_musl.rs @@ -4,6 +4,7 @@ pub fn target() -> Target { let mut base = super::linux_musl_base::opts(); base.max_atomic_width = Some(128); base.supports_xray = true; + base.features = "+v8a".into(); Target { llvm_target: "aarch64-unknown-linux-musl".into(), diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs b/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs index 703f7502295ee..a58b64d3d03a8 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_netbsd.rs @@ -7,6 +7,7 @@ pub fn target() -> Target { data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { + features: "+v8a".into(), mcount: "__mcount".into(), max_atomic_width: Some(128), ..super::netbsd_base::opts() diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_none.rs b/compiler/rustc_target/src/spec/aarch64_unknown_none.rs index aca52e1478eb8..30fbe6f3c153a 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_none.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_none.rs @@ -14,7 +14,7 @@ pub fn target() -> Target { let opts = TargetOptions { linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), linker: Some("rust-lld".into()), - features: "+strict-align,+neon,+fp-armv8".into(), + features: "+v8a,+strict-align,+neon,+fp-armv8".into(), supported_sanitizers: SanitizerSet::KCFI, relocation_model: RelocModel::Static, disable_redzone: true, diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs b/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs index 2385cb69abbef..9dfa1f268ac51 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_none_softfloat.rs @@ -13,7 +13,7 @@ pub fn target() -> Target { abi: "softfloat".into(), linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes), linker: Some("rust-lld".into()), - features: "+strict-align,-neon,-fp-armv8".into(), + features: "+v8a,+strict-align,-neon,-fp-armv8".into(), relocation_model: RelocModel::Static, disable_redzone: true, max_atomic_width: Some(128), diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs b/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs index 916b6137b650a..8c1126ae6d1cc 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_nto_qnx_710.rs @@ -17,6 +17,7 @@ pub fn target() -> Target { data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { + features: "+v8a".into(), max_atomic_width: Some(128), pre_link_args: TargetOptions::link_args( LinkerFlavor::Gnu(Cc::Yes, Lld::No), diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs b/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs index 3d99040f0d326..224e31af24f7d 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_openbsd.rs @@ -6,6 +6,10 @@ pub fn target() -> Target { pointer_width: 64, data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), - options: TargetOptions { max_atomic_width: Some(128), ..super::openbsd_base::opts() }, + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + ..super::openbsd_base::opts() + }, } } diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs b/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs index 6c9be4c8e9371..5650162cdbc99 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_redox.rs @@ -3,6 +3,7 @@ use crate::spec::Target; pub fn target() -> Target { let mut base = super::redox_base::opts(); base.max_atomic_width = Some(128); + base.features = "+v8a".into(); Target { llvm_target: "aarch64-unknown-redox".into(), diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs b/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs index 817ff2422a203..82fb015569d63 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_uefi.rs @@ -9,6 +9,7 @@ pub fn target() -> Target { base.max_atomic_width = Some(128); base.add_pre_link_args(LinkerFlavor::Msvc(Lld::No), &["/machine:arm64"]); + base.features = "+v8a".into(); Target { llvm_target: "aarch64-unknown-windows".into(), diff --git a/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs b/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs index db4dbf817b88b..d39442d917760 100644 --- a/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs +++ b/compiler/rustc_target/src/spec/aarch64_uwp_windows_msvc.rs @@ -3,6 +3,7 @@ use crate::spec::Target; pub fn target() -> Target { let mut base = super::windows_uwp_msvc_base::opts(); base.max_atomic_width = Some(128); + base.features = "+v8a".into(); Target { llvm_target: "aarch64-pc-windows-msvc".into(), diff --git a/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs b/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs index e118553dfd2bb..7e2af4c7a6a49 100644 --- a/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs +++ b/compiler/rustc_target/src/spec/aarch64_wrs_vxworks.rs @@ -6,6 +6,10 @@ pub fn target() -> Target { pointer_width: 64, data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), - options: TargetOptions { max_atomic_width: Some(128), ..super::vxworks_base::opts() }, + options: TargetOptions { + features: "+v8a".into(), + max_atomic_width: Some(128), + ..super::vxworks_base::opts() + }, } } diff --git a/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs b/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs index 52ee68e7560f6..c757ed45e4725 100644 --- a/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs +++ b/compiler/rustc_target/src/spec/arm64_32_apple_watchos.rs @@ -9,7 +9,7 @@ pub fn target() -> Target { data_layout: "e-m:o-p:32:32-i64:64-i128:128-n32:64-S128".into(), arch: "aarch64".into(), options: TargetOptions { - features: "+neon,+fp-armv8,+apple-a7".into(), + features: "+v8a,+neon,+fp-armv8,+apple-a7".into(), max_atomic_width: Some(128), forces_embed_bitcode: true, dynamic_linking: false, diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 07d339c067c86..9235de75ec670 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -561,11 +561,11 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) { let version = output(cmd.arg("--version")); let mut parts = version.split('.').take(2).filter_map(|s| s.parse::().ok()); if let (Some(major), Some(_minor)) = (parts.next(), parts.next()) { - if major >= 13 { + if major >= 14 { return; } } - panic!("\n\nbad LLVM version: {}, need >=13.0\n\n", version) + panic!("\n\nbad LLVM version: {}, need >=14.0\n\n", version) } fn configure_cmake( diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-13-stage1/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-13-stage1/Dockerfile deleted file mode 100644 index bcbf58253b190..0000000000000 --- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-13-stage1/Dockerfile +++ /dev/null @@ -1,49 +0,0 @@ -FROM ubuntu:22.04 - -ARG DEBIAN_FRONTEND=noninteractive -RUN apt-get update && apt-get install -y --no-install-recommends \ - g++ \ - gcc-multilib \ - make \ - ninja-build \ - file \ - curl \ - ca-certificates \ - python2.7 \ - git \ - cmake \ - sudo \ - gdb \ - llvm-13-tools \ - llvm-13-dev \ - libedit-dev \ - libssl-dev \ - pkg-config \ - zlib1g-dev \ - xz-utils \ - nodejs \ - && rm -rf /var/lib/apt/lists/* - -COPY scripts/sccache.sh /scripts/ -RUN sh /scripts/sccache.sh - -# We are disabling CI LLVM since this builder is intentionally using a host -# LLVM, rather than the typical src/llvm-project LLVM. -ENV NO_DOWNLOAD_CI_LLVM 1 - -# Using llvm-link-shared due to libffi issues -- see #34486 -ENV RUST_CONFIGURE_ARGS \ - --build=x86_64-unknown-linux-gnu \ - --llvm-root=/usr/lib/llvm-13 \ - --enable-llvm-link-shared \ - --set rust.thin-lto-import-instr-limit=10 - -ENV SCRIPT python2.7 ../x.py --stage 1 test --exclude src/tools/tidy && \ - # Run the `mir-opt` tests again but this time for a 32-bit target. - # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have - # both 32-bit and 64-bit outputs updated by the PR author, before - # the PR is approved and tested for merging. - # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`, - # despite having different output on 32-bit vs 64-bit targets. - python2.7 ../x.py --stage 1 test tests/mir-opt \ - --host='' --target=i686-unknown-linux-gnu diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-13/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile similarity index 50% rename from src/ci/docker/host-x86_64/x86_64-gnu-llvm-13/Dockerfile rename to src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile index 9fc9e9cbffbed..b99a0886b4d9b 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-13/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-14-stage1/Dockerfile @@ -1,8 +1,6 @@ FROM ubuntu:22.04 ARG DEBIAN_FRONTEND=noninteractive - -# NOTE: intentionally installs both python2 and python3 so we can test support for both. RUN apt-get update && apt-get install -y --no-install-recommends \ g++ \ gcc-multilib \ @@ -11,28 +9,20 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ file \ curl \ ca-certificates \ - python2.7 \ - python3.9 \ + python3 \ git \ cmake \ sudo \ gdb \ - llvm-13-tools \ - llvm-13-dev \ + llvm-14-tools \ + llvm-14-dev \ libedit-dev \ libssl-dev \ pkg-config \ zlib1g-dev \ xz-utils \ nodejs \ - \ -# Install powershell so we can test x.ps1 on Linux - apt-transport-https software-properties-common && \ - curl -s "https://packages.microsoft.com/config/ubuntu/$(lsb_release -rs)/packages-microsoft-prod.deb" > packages-microsoft-prod.deb && \ - dpkg -i packages-microsoft-prod.deb && \ - apt-get update && \ - apt-get install -y powershell \ - && rm -rf /var/lib/apt/lists/* + && rm -rf /var/lib/apt/lists/* COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh @@ -44,26 +34,16 @@ ENV NO_DOWNLOAD_CI_LLVM 1 # Using llvm-link-shared due to libffi issues -- see #34486 ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --llvm-root=/usr/lib/llvm-13 \ + --llvm-root=/usr/lib/llvm-14 \ --enable-llvm-link-shared \ --set rust.thin-lto-import-instr-limit=10 -# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux. -ENV SCRIPT ../x.py --stage 2 test --exclude src/tools/tidy && \ +ENV SCRIPT ../x.py --stage 1 test --exclude src/tools/tidy && \ # Run the `mir-opt` tests again but this time for a 32-bit target. # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have # both 32-bit and 64-bit outputs updated by the PR author, before # the PR is approved and tested for merging. # It will also detect tests lacking `// EMIT_MIR_FOR_EACH_BIT_WIDTH`, # despite having different output on 32-bit vs 64-bit targets. - ../x --stage 2 test tests/mir-opt \ - --host='' --target=i686-unknown-linux-gnu && \ - # Run the UI test suite again, but in `--pass=check` mode - # - # This is intended to make sure that both `--pass=check` continues to - # work. - # - ../x.ps1 --stage 2 test tests/ui --pass=check \ - --host='' --target=i686-unknown-linux-gnu && \ - # Run tidy at the very end, after all the other tests. - python2.7 ../x.py --stage 2 test src/tools/tidy + ../x.py --stage 1 test tests/mir-opt \ + --host='' --target=i686-unknown-linux-gnu diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 3c128c0ca251b..ad9c308ad852d 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -300,7 +300,7 @@ jobs: <<: *job-linux-xl tidy: true - - name: x86_64-gnu-llvm-13 + - name: x86_64-gnu-llvm-14 <<: *job-linux-xl tidy: false @@ -459,12 +459,7 @@ jobs: RUST_BACKTRACE: 1 <<: *job-linux-xl - - name: x86_64-gnu-llvm-13 - env: - RUST_BACKTRACE: 1 - <<: *job-linux-xl - - - name: x86_64-gnu-llvm-13-stage1 + - name: x86_64-gnu-llvm-14-stage1 env: RUST_BACKTRACE: 1 <<: *job-linux-xl diff --git a/tests/assembly/align_offset.rs b/tests/assembly/align_offset.rs index c5eefca3467bb..116edf62bbeda 100644 --- a/tests/assembly/align_offset.rs +++ b/tests/assembly/align_offset.rs @@ -1,7 +1,6 @@ // assembly-output: emit-asm // compile-flags: -Copt-level=1 // only-x86_64 -// min-llvm-version: 14.0 #![crate_type="rlib"] // CHECK-LABEL: align_offset_byte_ptr diff --git a/tests/codegen/consts.rs b/tests/codegen/consts.rs index 260d9de867087..d0418d1114289 100644 --- a/tests/codegen/consts.rs +++ b/tests/codegen/consts.rs @@ -1,5 +1,4 @@ // compile-flags: -C no-prepopulate-passes -// min-llvm-version: 14.0 #![crate_type = "lib"] diff --git a/tests/codegen/merge-functions.rs b/tests/codegen/merge-functions.rs index 8e8fe5c964d3c..d6caeeee89669 100644 --- a/tests/codegen/merge-functions.rs +++ b/tests/codegen/merge-functions.rs @@ -1,4 +1,3 @@ -// min-llvm-version: 14.0 // revisions: O Os //[Os] compile-flags: -Copt-level=s //[O] compile-flags: -O diff --git a/tests/codegen/sse42-implies-crc32.rs b/tests/codegen/sse42-implies-crc32.rs index 47b1a8993404a..56079d32a8d81 100644 --- a/tests/codegen/sse42-implies-crc32.rs +++ b/tests/codegen/sse42-implies-crc32.rs @@ -1,5 +1,4 @@ // only-x86_64 -// min-llvm-version: 14.0 // compile-flags: -Copt-level=3 #![crate_type = "lib"] diff --git a/tests/codegen/uninit-consts.rs b/tests/codegen/uninit-consts.rs index 98a6761f8abbb..54e9a9e9bb876 100644 --- a/tests/codegen/uninit-consts.rs +++ b/tests/codegen/uninit-consts.rs @@ -1,5 +1,4 @@ // compile-flags: -C no-prepopulate-passes -// min-llvm-version: 14.0 // Check that we use undef (and not zero) for uninitialized bytes in constants. diff --git a/tests/codegen/vec-in-place.rs b/tests/codegen/vec-in-place.rs index 5df3669056d0f..9992604221bc4 100644 --- a/tests/codegen/vec-in-place.rs +++ b/tests/codegen/vec-in-place.rs @@ -1,4 +1,3 @@ -// min-llvm-version: 14.0 // ignore-debug: the debug assertions get in the way // compile-flags: -O -Z merge-functions=disabled #![crate_type = "lib"] diff --git a/tests/ui/optimization-remark.rs b/tests/ui/optimization-remark.rs index d4b39c670162d..4f651b1dcbc29 100644 --- a/tests/ui/optimization-remark.rs +++ b/tests/ui/optimization-remark.rs @@ -1,6 +1,5 @@ // build-pass // ignore-pass -// min-llvm-version: 14.0.0 // revisions: all inline merge1 merge2 // compile-flags: --crate-type=lib -Cdebuginfo=1 -Copt-level=2 // diff --git a/tests/ui/sanitize/memory-eager.rs b/tests/ui/sanitize/memory-eager.rs index 0018c2f758182..0e992b4a5ebbb 100644 --- a/tests/ui/sanitize/memory-eager.rs +++ b/tests/ui/sanitize/memory-eager.rs @@ -1,6 +1,5 @@ // needs-sanitizer-support // needs-sanitizer-memory -// min-llvm-version: 14.0.0 // // revisions: unoptimized optimized // From ffdbd58d85ef1f172e4b78e00655b36131ed1d4f Mon Sep 17 00:00:00 2001 From: Josh Stone Date: Fri, 10 Feb 2023 16:13:31 -0800 Subject: [PATCH 409/501] Drop llvm14-builtins-abi with compiler_builtins 0.1.87 --- Cargo.lock | 4 ++-- compiler/rustc_codegen_llvm/src/llvm_util.rs | 13 ++----------- library/std/Cargo.toml | 2 +- 3 files changed, 5 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ff7abca476241..22dc4e74dd432 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -883,9 +883,9 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.85" +version = "0.1.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13e81c6cd7ab79f51a0c927d22858d61ad12bd0b3865f0b13ece02a4486aeabb" +checksum = "f867ce54c09855ccd135ad4a50c777182a0c7af5ff20a8f537617bd648b10d50" dependencies = [ "cc", "rustc-std-workspace-core", diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 12e3581c6632e..20b1dd9415386 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -211,7 +211,7 @@ pub fn check_tied_features( /// Must express features in the way Rust understands them pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec { let target_machine = create_informational_target_machine(sess); - let mut features: Vec = supported_target_features(sess) + supported_target_features(sess) .iter() .filter_map(|&(feature, gate)| { if sess.is_nightly_build() || allow_unstable || gate.is_none() { @@ -231,16 +231,7 @@ pub fn target_features(sess: &Session, allow_unstable: bool) -> Vec { true }) .map(|feature| Symbol::intern(feature)) - .collect(); - - // LLVM 14 changed the ABI for i128 arguments to __float/__fix builtins on Win64 - // (see https://reviews.llvm.org/D110413). This unstable target feature is intended for use - // by compiler-builtins, to export the builtins with the expected, LLVM-version-dependent ABI. - // The target feature can be dropped once we no longer support older LLVM versions. - if sess.is_nightly_build() { - features.push(Symbol::intern("llvm14-builtins-abi")); - } - features + .collect() } pub fn print_version() { diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index adf521d9b94a1..349cd91c89e69 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -16,7 +16,7 @@ panic_unwind = { path = "../panic_unwind", optional = true } panic_abort = { path = "../panic_abort" } core = { path = "../core" } libc = { version = "0.2.138", default-features = false, features = ['rustc-dep-of-std'] } -compiler_builtins = { version = "0.1.85" } +compiler_builtins = { version = "0.1.87" } profiler_builtins = { path = "../profiler_builtins", optional = true } unwind = { path = "../unwind" } hashbrown = { version = "0.12", default-features = false, features = ['rustc-dep-of-std'] } From 1057e2132e32764676e89e88992dc7a4c3e955ea Mon Sep 17 00:00:00 2001 From: Zephaniah Ong Date: Sat, 11 Feb 2023 09:38:33 +0800 Subject: [PATCH 410/501] emit JSON output for building of bootstrap itself --- src/bootstrap/bootstrap.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 45f238ef4bf1f..013d1ab525b0c 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -784,6 +784,8 @@ def build_bootstrap(self, color, verbose_count): if self.get_toml("metrics", "build"): args.append("--features") args.append("build-metrics") + if self.json_output: + args.append("--message-format=json") if color == "always": args.append("--color=always") elif color == "never": @@ -841,6 +843,7 @@ def parse_args(): parser.add_argument('--build') parser.add_argument('--color', choices=['always', 'never', 'auto']) parser.add_argument('--clean', action='store_true') + parser.add_argument('--json-output', action='store_true') parser.add_argument('-v', '--verbose', action='count', default=0) return parser.parse_known_args(sys.argv)[0] @@ -852,6 +855,7 @@ def bootstrap(args): build.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) build.verbose = args.verbose != 0 build.clean = args.clean + build.json_output = args.json_output # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`, # then `config.toml` in the root directory. From cbd1b81bd2a308b0f8a1734daf3eb30b56c7537c Mon Sep 17 00:00:00 2001 From: kadmin Date: Sat, 11 Feb 2023 03:04:36 +0000 Subject: [PATCH 411/501] Add array::map benchmarks --- library/core/benches/array.rs | 19 +++++++++++++++++++ library/core/benches/lib.rs | 1 + 2 files changed, 20 insertions(+) create mode 100644 library/core/benches/array.rs diff --git a/library/core/benches/array.rs b/library/core/benches/array.rs new file mode 100644 index 0000000000000..845c60762949b --- /dev/null +++ b/library/core/benches/array.rs @@ -0,0 +1,19 @@ +use test::black_box; +use test::Bencher; + +macro_rules! map_array { + ($func_name:ident, $start_item: expr, $map_item: expr, $arr_size: expr) => { + #[bench] + fn $func_name(b: &mut Bencher) { + let arr = [$start_item; $arr_size]; + b.iter(|| black_box(arr).map(|_| black_box($map_item))); + } + }; +} + +map_array!(map_8byte_8byte_8, 0u64, 1u64, 800); +map_array!(map_8byte_8byte_64, 0u64, 1u64, 6400); +map_array!(map_8byte_8byte_256, 0u64, 1u64, 25600); + +map_array!(map_8byte_256byte_256, 0u64, [0u64; 4], 25600); +map_array!(map_256byte_8byte_256, [0u64; 4], 0u64, 25600); diff --git a/library/core/benches/lib.rs b/library/core/benches/lib.rs index f1244d93285e3..e4100120d8252 100644 --- a/library/core/benches/lib.rs +++ b/library/core/benches/lib.rs @@ -9,6 +9,7 @@ extern crate test; mod any; +mod array; mod ascii; mod char; mod fmt; From ee0376c368c50c7dadc84801e88cfdbf250b92a4 Mon Sep 17 00:00:00 2001 From: Lukas Bergdoll Date: Sat, 11 Feb 2023 09:32:52 +0100 Subject: [PATCH 412/501] Split branches in heapsort child selection This allows even better code-gen, cmp + adc. While also more clearly communicating the intent. --- library/core/src/slice/sort.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index 990540f55f53d..8f02a44667085 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -198,7 +198,12 @@ where } // Choose the greater child. - child += (child + 1 < v.len() && is_less(&v[child], &v[child + 1])) as usize; + if child + 1 < v.len() { + // We need a branch to be sure not to out-of-bounds index, + // but it's highly predictable. The comparison, however, + // is better done branchless, especially for primitives. + child += is_less(&v[child], &v[child + 1]) as usize; + } // Stop if the invariant holds at `node`. if !is_less(&v[node], &v[child]) { From 14033108cd34f08d92b28ff0ae2eafb2d81bf8b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jakub=20Ber=C3=A1nek?= Date: Sat, 11 Feb 2023 10:31:09 +0100 Subject: [PATCH 413/501] Print disk usage in PGO CI script --- src/ci/stage-build.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/ci/stage-build.py b/src/ci/stage-build.py index 662c9e36694c6..4e6bcba5e20d2 100644 --- a/src/ci/stage-build.py +++ b/src/ci/stage-build.py @@ -211,7 +211,8 @@ def get_timestamp() -> float: TimerSection = Union[Duration, "Timer"] -def iterate_sections(section: TimerSection, name: str, level: int = 0) -> Iterator[Tuple[int, str, Duration]]: +def iterate_sections(section: TimerSection, name: str, level: int = 0) -> Iterator[ + Tuple[int, str, Duration]]: """ Hierarchically iterate the sections of a timer, in a depth-first order. """ @@ -239,7 +240,7 @@ def section(self, name: str) -> "Timer": start = get_timestamp() exc = None - child_timer = Timer(parent_names=self.parent_names + (name, )) + child_timer = Timer(parent_names=self.parent_names + (name,)) full_name = " > ".join(child_timer.parent_names) try: LOGGER.info(f"Section `{full_name}` starts") @@ -648,6 +649,16 @@ def print_binary_sizes(pipeline: Pipeline): LOGGER.info(f"Rustc binary size\n{output.getvalue()}") +def print_free_disk_space(pipeline: Pipeline): + usage = shutil.disk_usage(pipeline.opt_artifacts()) + total = usage.total + used = usage.used + free = usage.free + + logging.info( + f"Free disk space: {format_bytes(free)} out of total {format_bytes(total)} ({(used / total) * 100:.2f}% used)") + + def execute_build_pipeline(timer: Timer, pipeline: Pipeline, final_build_args: List[str]): # Clear and prepare tmp directory shutil.rmtree(pipeline.opt_artifacts(), ignore_errors=True) @@ -666,6 +677,7 @@ def execute_build_pipeline(timer: Timer, pipeline: Pipeline, final_build_args: L with stage1.section("Gather profiles"): gather_llvm_profiles(pipeline) + print_free_disk_space(pipeline) clear_llvm_files(pipeline) final_build_args += [ @@ -683,6 +695,7 @@ def execute_build_pipeline(timer: Timer, pipeline: Pipeline, final_build_args: L with stage2.section("Gather profiles"): gather_rustc_profiles(pipeline) + print_free_disk_space(pipeline) clear_llvm_files(pipeline) final_build_args += [ @@ -702,6 +715,7 @@ def execute_build_pipeline(timer: Timer, pipeline: Pipeline, final_build_args: L with stage3.section("Gather profiles"): gather_llvm_bolt_profiles(pipeline) + print_free_disk_space(pipeline) clear_llvm_files(pipeline) final_build_args += [ "--llvm-bolt-profile-use", @@ -733,5 +747,6 @@ def execute_build_pipeline(timer: Timer, pipeline: Pipeline, final_build_args: L raise e finally: timer.print_stats() + print_free_disk_space(pipeline) print_binary_sizes(pipeline) From ef8de38c8498885a46119f214e17f0237dab6251 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Sat, 11 Feb 2023 00:00:18 +0000 Subject: [PATCH 414/501] rustdoc: Don't resolve link to field on different variant --- .../passes/collect_intra_doc_links.rs | 29 ++++++++++++++++--- tests/rustdoc-ui/intra-doc/errors.rs | 16 ++++++++++ tests/rustdoc-ui/intra-doc/errors.stderr | 14 ++++++++- 3 files changed, 54 insertions(+), 5 deletions(-) diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 8435972bb11f2..1ee98a79b0563 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -338,7 +338,8 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> { match ty_res { Res::Def(DefKind::Enum, did) => match tcx.type_of(did).kind() { ty::Adt(def, _) if def.is_enum() => { - if let Some(field) = def.all_fields().find(|f| f.name == variant_field_name) { + if let Some(variant) = def.variants().iter().find(|v| v.name == variant_name) + && let Some(field) = variant.fields.iter().find(|f| f.name == variant_field_name) { Ok((ty_res, field.did)) } else { Err(UnresolvedPath { @@ -1768,15 +1769,35 @@ fn resolution_failure( // Otherwise, it must be an associated item or variant let res = partial_res.expect("None case was handled by `last_found_module`"); - let kind = match res { - Res::Def(kind, _) => Some(kind), + let kind_did = match res { + Res::Def(kind, did) => Some((kind, did)), Res::Primitive(_) => None, }; - let path_description = if let Some(kind) = kind { + let is_struct_variant = |did| { + if let ty::Adt(def, _) = tcx.type_of(did).kind() + && def.is_enum() + && let Some(variant) = def.variants().iter().find(|v| v.name == res.name(tcx)) { + // ctor is `None` if variant is a struct + variant.ctor.is_none() + } else { + false + } + }; + let path_description = if let Some((kind, did)) = kind_did { match kind { Mod | ForeignMod => "inner item", Struct => "field or associated item", Enum | Union => "variant or associated item", + Variant if is_struct_variant(did) => { + let variant = res.name(tcx); + let note = format!("variant `{variant}` has no such field"); + if let Some(span) = sp { + diag.span_label(span, ¬e); + } else { + diag.note(¬e); + } + return; + } Variant | Field | Closure diff --git a/tests/rustdoc-ui/intra-doc/errors.rs b/tests/rustdoc-ui/intra-doc/errors.rs index b29f7c29b5d86..95dd2b98e037e 100644 --- a/tests/rustdoc-ui/intra-doc/errors.rs +++ b/tests/rustdoc-ui/intra-doc/errors.rs @@ -103,3 +103,19 @@ pub trait T { macro_rules! m { () => {}; } + +///[`TestEnum::Variant1::field_name`] +//~^ ERROR unresolved link +//~| NOTE variant `Variant1` has no such field +pub enum TestEnum { + Variant1 {}, + Variant2 { field_name: u64 }, +} + +///[`TestEnumNoFields::Variant1::field_name`] +//~^ ERROR unresolved link +//~| NOTE `Variant1` is a variant, not a module or type, and cannot have associated items +pub enum TestEnumNoFields { + Variant1 (), + Variant2 {}, +} diff --git a/tests/rustdoc-ui/intra-doc/errors.stderr b/tests/rustdoc-ui/intra-doc/errors.stderr index 9a1896fb0cd63..1b2416d7da765 100644 --- a/tests/rustdoc-ui/intra-doc/errors.stderr +++ b/tests/rustdoc-ui/intra-doc/errors.stderr @@ -142,6 +142,18 @@ error: unresolved link to `T::h` LL | /// [T::h!] | ^^^^^ the trait `T` has no macro named `h` +error: unresolved link to `TestEnum::Variant1::field_name` + --> $DIR/errors.rs:107:6 + | +LL | ///[`TestEnum::Variant1::field_name`] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ variant `Variant1` has no such field + +error: unresolved link to `TestEnumNoFields::Variant1::field_name` + --> $DIR/errors.rs:115:6 + | +LL | ///[`TestEnumNoFields::Variant1::field_name`] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `Variant1` is a variant, not a module or type, and cannot have associated items + error: unresolved link to `m` --> $DIR/errors.rs:98:6 | @@ -153,5 +165,5 @@ help: to link to the macro, add an exclamation mark LL | /// [m!()] | + -error: aborting due to 20 previous errors +error: aborting due to 22 previous errors From 5fdf640fb3cf207531629bff7372fb4caf296e94 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 11 Feb 2023 16:29:54 +0100 Subject: [PATCH 415/501] fix: Fix bind pat inlay hints rendering for constant patterns --- crates/ide/src/inlay_hints/bind_pat.rs | 44 ++++++++++++++++---------- 1 file changed, 27 insertions(+), 17 deletions(-) diff --git a/crates/ide/src/inlay_hints/bind_pat.rs b/crates/ide/src/inlay_hints/bind_pat.rs index f5b5c44737438..4af7f9bdb73b5 100644 --- a/crates/ide/src/inlay_hints/bind_pat.rs +++ b/crates/ide/src/inlay_hints/bind_pat.rs @@ -67,28 +67,23 @@ fn should_not_display_type_hint( return true; } - if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() { - if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() { - return true; - } - } - - if config.hide_closure_initialization_hints { - if let Some(parent) = bind_pat.syntax().parent() { - if let Some(it) = ast::LetStmt::cast(parent) { - if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { - if closure_has_block_body(&closure) { - return true; - } - } - } - } + if sema.resolve_bind_pat_to_const(bind_pat).is_some() { + return true; } for node in bind_pat.syntax().ancestors() { match_ast! { match node { - ast::LetStmt(it) => return it.ty().is_some(), + ast::LetStmt(it) => { + if config.hide_closure_initialization_hints { + if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() { + if closure_has_block_body(&closure) { + return true; + } + } + } + return it.ty().is_some() + }, // FIXME: We might wanna show type hints in parameters for non-top level patterns as well ast::Param(it) => return it.ty().is_some(), ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty), @@ -567,6 +562,21 @@ fn main() { ); } + #[test] + fn const_pats_have_no_type_hints() { + check_types( + r#" +const FOO: usize = 0; + +fn main() { + match 0 { + FOO => (), + _ => () + } +}"#, + ); + } + #[test] fn let_statement() { check_types( From 87d6107f19a41c372caf3ea07a50020213375d70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 11 Feb 2023 17:25:07 +0200 Subject: [PATCH 416/501] Build i686-pc-windows-msvc binaries --- .github/workflows/release.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index b070dd3406f20..48f4c6b55ed90 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -28,6 +28,9 @@ jobs: - os: windows-latest target: x86_64-pc-windows-msvc code-target: win32-x64 + - os: windows-latest + target: i686-pc-windows-msvc + code-target: win32-ia32 - os: windows-latest target: aarch64-pc-windows-msvc code-target: win32-arm64 @@ -230,6 +233,10 @@ jobs: with: name: dist-x86_64-pc-windows-msvc path: dist + - uses: actions/download-artifact@v1 + with: + name: dist-i686-pc-windows-msvc + path: dist - uses: actions/download-artifact@v1 with: name: dist-aarch64-pc-windows-msvc From d505c5abe4d2b03c0f714bc19087cb77f166a19a Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Sat, 11 Feb 2023 18:03:06 +0100 Subject: [PATCH 417/501] Improve JS function itemTypeFromName code a bit --- src/librustdoc/html/static/js/search.js | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index 251e806c2d90d..ea1875d8e27a9 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -142,13 +142,11 @@ function initSearch(rawSearchIndex) { } function itemTypeFromName(typename) { - for (let i = 0, len = itemTypes.length; i < len; ++i) { - if (itemTypes[i] === typename) { - return i; - } + const index = itemTypes.findIndex(i => i === typename); + if (index < 0) { + throw new Error("Unknown type filter `" + typename + "`"); } - - throw new Error("Unknown type filter `" + typename + "`"); + return index; } /** From e80afa6501a700cb7bd356ea1c26190f6e21e79a Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 11 Feb 2023 17:37:52 +0000 Subject: [PATCH 418/501] Intern span when length is MAX_LEN with parent. --- compiler/rustc_span/src/span_encoding.rs | 7 ++++++- tests/ui/span/issue-107353.rs | 8 ++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 tests/ui/span/issue-107353.rs diff --git a/compiler/rustc_span/src/span_encoding.rs b/compiler/rustc_span/src/span_encoding.rs index d48c4f7e5a811..c600298c51a08 100644 --- a/compiler/rustc_span/src/span_encoding.rs +++ b/compiler/rustc_span/src/span_encoding.rs @@ -110,11 +110,16 @@ impl Span { // Inline format with parent. let len_or_tag = len_or_tag | PARENT_MASK; let parent2 = parent.local_def_index.as_u32(); - if ctxt2 == SyntaxContext::root().as_u32() && parent2 <= MAX_CTXT { + if ctxt2 == SyntaxContext::root().as_u32() + && parent2 <= MAX_CTXT + && len_or_tag < LEN_TAG + { + debug_assert_ne!(len_or_tag, LEN_TAG); return Span { base_or_index: base, len_or_tag, ctxt_or_tag: parent2 as u16 }; } } else { // Inline format with ctxt. + debug_assert_ne!(len_or_tag, LEN_TAG); return Span { base_or_index: base, len_or_tag: len as u16, diff --git a/tests/ui/span/issue-107353.rs b/tests/ui/span/issue-107353.rs new file mode 100644 index 0000000000000..09c66f42d786d --- /dev/null +++ b/tests/ui/span/issue-107353.rs @@ -0,0 +1,8 @@ +// Verify that span interning correctly handles having a span of exactly MAX_LEN length. +// compile-flags: --crate-type=lib +// check-pass + +#![allow(dead_code)] +fn a<'a, T>() -> &'a T { +todo!()//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +} From 84bb373b2a6a33ea136ba7c09642c2e321f24c62 Mon Sep 17 00:00:00 2001 From: Camille GILLOT Date: Sat, 11 Feb 2023 18:07:06 +0000 Subject: [PATCH 419/501] Pacify tidy. --- tests/ui/span/issue-107353.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/ui/span/issue-107353.rs b/tests/ui/span/issue-107353.rs index 09c66f42d786d..943f7f0eb1929 100644 --- a/tests/ui/span/issue-107353.rs +++ b/tests/ui/span/issue-107353.rs @@ -1,3 +1,4 @@ +// ignore-tidy-linelength // Verify that span interning correctly handles having a span of exactly MAX_LEN length. // compile-flags: --crate-type=lib // check-pass From 0e61d3ab3f6205ce0906837be25d5cf3ec1b2730 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sat, 11 Feb 2023 22:23:17 +0400 Subject: [PATCH 420/501] rustdoc: Remove cache for preprocessed markdown links --- src/librustdoc/core.rs | 2 -- src/librustdoc/passes/collect_intra_doc_links.rs | 14 ++------------ .../passes/collect_intra_doc_links/early.rs | 1 - 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index d85749cadbd76..a26c25c978277 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -27,13 +27,11 @@ use crate::clean::inline::build_external_trait; use crate::clean::{self, ItemId}; use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions}; use crate::formats::cache::Cache; -use crate::passes::collect_intra_doc_links::PreprocessedMarkdownLink; use crate::passes::{self, Condition::*}; pub(crate) use rustc_session::config::{Input, Options, UnstableOptions}; pub(crate) struct ResolverCaches { - pub(crate) markdown_links: Option>>, pub(crate) all_trait_impls: Option>, pub(crate) all_macro_rules: FxHashMap>, pub(crate) extern_doc_reachable: DefIdSet, diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 692adcf0a8091..257ad655c902b 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -806,22 +806,12 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> { // NOTE: if there are links that start in one crate and end in another, this will not resolve them. // This is a degenerate case and it's not supported by rustdoc. let parent_node = parent_module.or(parent_node); - let mut tmp_links = self - .cx - .resolver_caches - .markdown_links - .take() - .expect("`markdown_links` are already borrowed"); - if !tmp_links.contains_key(&doc) { - tmp_links.insert(doc.clone(), preprocessed_markdown_links(&doc)); - } - for md_link in &tmp_links[&doc] { - let link = self.resolve_link(item, &doc, parent_node, md_link); + for md_link in preprocessed_markdown_links(&doc) { + let link = self.resolve_link(item, &doc, parent_node, &md_link); if let Some(link) = link { self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link); } } - self.cx.resolver_caches.markdown_links = Some(tmp_links); } if item.is_mod() { diff --git a/src/librustdoc/passes/collect_intra_doc_links/early.rs b/src/librustdoc/passes/collect_intra_doc_links/early.rs index 75c3380ee9bb9..ec449e94ce587 100644 --- a/src/librustdoc/passes/collect_intra_doc_links/early.rs +++ b/src/librustdoc/passes/collect_intra_doc_links/early.rs @@ -24,7 +24,6 @@ pub(crate) fn early_resolve_intra_doc_links( link_resolver.process_extern_impls(); ResolverCaches { - markdown_links: Some(Default::default()), all_trait_impls: Some(link_resolver.all_trait_impls), all_macro_rules: link_resolver.all_macro_rules, extern_doc_reachable: link_resolver.extern_doc_reachable, From 37a72a2e3e548c6bdecdf425b1616a7d6edcec0c Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sat, 11 Feb 2023 22:51:21 +0400 Subject: [PATCH 421/501] rustc_ast: Merge impls and reorder methods for attributes and meta items --- compiler/rustc_ast/src/attr/mod.rs | 626 ++++++++++++++--------------- 1 file changed, 311 insertions(+), 315 deletions(-) diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index cd60506dd8079..e75d2f77dbbad 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -40,84 +40,65 @@ impl MarkedAttrs { } } -impl NestedMetaItem { - /// Returns the `MetaItem` if `self` is a `NestedMetaItem::MetaItem`. - pub fn meta_item(&self) -> Option<&MetaItem> { - match self { - NestedMetaItem::MetaItem(item) => Some(item), - _ => None, - } - } +pub struct AttrIdGenerator(WorkerLocal>); - /// Returns the `MetaItemLit` if `self` is a `NestedMetaItem::Literal`s. - pub fn lit(&self) -> Option<&MetaItemLit> { - match self { - NestedMetaItem::Lit(lit) => Some(lit), - _ => None, - } - } +#[cfg(debug_assertions)] +static MAX_ATTR_ID: AtomicU32 = AtomicU32::new(u32::MAX); - /// Returns `true` if this list item is a MetaItem with a name of `name`. - pub fn has_name(&self, name: Symbol) -> bool { - self.meta_item().map_or(false, |meta_item| meta_item.has_name(name)) - } +impl AttrIdGenerator { + pub fn new() -> Self { + // We use `(index as u32).reverse_bits()` to initialize the + // starting value of AttrId in each worker thread. + // The `index` is the index of the worker thread. + // This ensures that the AttrId generated in each thread is unique. + AttrIdGenerator(WorkerLocal::new(|index| { + let index: u32 = index.try_into().unwrap(); - /// For a single-segment meta item, returns its name; otherwise, returns `None`. - pub fn ident(&self) -> Option { - self.meta_item().and_then(|meta_item| meta_item.ident()) - } - pub fn name_or_empty(&self) -> Symbol { - self.ident().unwrap_or_else(Ident::empty).name - } + #[cfg(debug_assertions)] + { + let max_id = ((index + 1).next_power_of_two() - 1).bitxor(u32::MAX).reverse_bits(); + MAX_ATTR_ID.fetch_min(max_id, Ordering::Release); + } - /// Gets the string value if `self` is a `MetaItem` and the `MetaItem` is a - /// `MetaItemKind::NameValue` variant containing a string, otherwise `None`. - pub fn value_str(&self) -> Option { - self.meta_item().and_then(|meta_item| meta_item.value_str()) + Cell::new(index.reverse_bits()) + })) } - /// Returns a name and single literal value tuple of the `MetaItem`. - pub fn name_value_literal(&self) -> Option<(Symbol, &MetaItemLit)> { - self.meta_item().and_then(|meta_item| { - meta_item.meta_item_list().and_then(|meta_item_list| { - if meta_item_list.len() == 1 - && let Some(ident) = meta_item.ident() - && let Some(lit) = meta_item_list[0].lit() - { - return Some((ident.name, lit)); - } - None - }) - }) - } + pub fn mk_attr_id(&self) -> AttrId { + let id = self.0.get(); - /// Gets a list of inner meta items from a list `MetaItem` type. - pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { - self.meta_item().and_then(|meta_item| meta_item.meta_item_list()) - } + // Ensure the assigned attr_id does not overlap the bits + // representing the number of threads. + #[cfg(debug_assertions)] + assert!(id <= MAX_ATTR_ID.load(Ordering::Acquire)); - /// Returns `true` if the variant is `MetaItem`. - pub fn is_meta_item(&self) -> bool { - self.meta_item().is_some() + self.0.set(id + 1); + AttrId::from_u32(id) } +} - /// Returns `true` if `self` is a `MetaItem` and the meta item is a word. - pub fn is_word(&self) -> bool { - self.meta_item().map_or(false, |meta_item| meta_item.is_word()) +impl Attribute { + pub fn get_normal_item(&self) -> &AttrItem { + match &self.kind { + AttrKind::Normal(normal) => &normal.item, + AttrKind::DocComment(..) => panic!("unexpected doc comment"), + } } - /// See [`MetaItem::name_value_literal_span`]. - pub fn name_value_literal_span(&self) -> Option { - self.meta_item()?.name_value_literal_span() + pub fn unwrap_normal_item(self) -> AttrItem { + match self.kind { + AttrKind::Normal(normal) => normal.into_inner().item, + AttrKind::DocComment(..) => panic!("unexpected doc comment"), + } } -} -impl Attribute { - #[inline] - pub fn has_name(&self, name: Symbol) -> bool { - match &self.kind { - AttrKind::Normal(normal) => normal.item.path == name, - AttrKind::DocComment(..) => false, + /// Returns `true` if it is a sugared doc comment (`///` or `//!` for example). + /// So `#[doc = "doc"]` (which is a doc comment) and `#[doc(...)]` (which is not + /// a doc comment) will return `false`. + pub fn is_doc_comment(&self) -> bool { + match self.kind { + AttrKind::Normal(..) => false, + AttrKind::DocComment(..) => true, } } @@ -138,17 +119,11 @@ impl Attribute { self.ident().unwrap_or_else(Ident::empty).name } - pub fn value_str(&self) -> Option { - match &self.kind { - AttrKind::Normal(normal) => normal.item.value_str(), - AttrKind::DocComment(..) => None, - } - } - - pub fn meta_item_list(&self) -> Option> { + #[inline] + pub fn has_name(&self, name: Symbol) -> bool { match &self.kind { - AttrKind::Normal(normal) => normal.item.meta_item_list(), - AttrKind::DocComment(..) => None, + AttrKind::Normal(normal) => normal.item.path == name, + AttrKind::DocComment(..) => false, } } @@ -159,112 +134,18 @@ impl Attribute { false } } -} -impl MetaItem { - /// For a single-segment meta item, returns its name; otherwise, returns `None`. - pub fn ident(&self) -> Option { - if self.path.segments.len() == 1 { Some(self.path.segments[0].ident) } else { None } - } - pub fn name_or_empty(&self) -> Symbol { - self.ident().unwrap_or_else(Ident::empty).name - } - - /// ```text - /// Example: - /// #[attribute(name = "value")] - /// ^^^^^^^^^^^^^^ - /// ``` - pub fn name_value_literal(&self) -> Option<&MetaItemLit> { + pub fn meta_item_list(&self) -> Option> { match &self.kind { - MetaItemKind::NameValue(v) => Some(v), - _ => None, + AttrKind::Normal(normal) => normal.item.meta_item_list(), + AttrKind::DocComment(..) => None, } } pub fn value_str(&self) -> Option { - self.kind.value_str() - } - - pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { match &self.kind { - MetaItemKind::List(l) => Some(&**l), - _ => None, - } - } - - pub fn is_word(&self) -> bool { - matches!(self.kind, MetaItemKind::Word) - } - - pub fn has_name(&self, name: Symbol) -> bool { - self.path == name - } - - /// This is used in case you want the value span instead of the whole attribute. Example: - /// - /// ```text - /// #[doc(alias = "foo")] - /// ``` - /// - /// In here, it'll return a span for `"foo"`. - pub fn name_value_literal_span(&self) -> Option { - Some(self.name_value_literal()?.span) - } -} - -impl AttrArgsEq { - fn value_str(&self) -> Option { - match self { - AttrArgsEq::Ast(expr) => match expr.kind { - ExprKind::Lit(token_lit) => { - LitKind::from_token_lit(token_lit).ok().and_then(|lit| lit.str()) - } - _ => None, - }, - AttrArgsEq::Hir(lit) => lit.kind.str(), - } - } -} - -impl AttrItem { - pub fn span(&self) -> Span { - self.args.span().map_or(self.path.span, |args_span| self.path.span.to(args_span)) - } - - pub fn meta(&self, span: Span) -> Option { - Some(MetaItem { path: self.path.clone(), kind: self.meta_kind()?, span }) - } - - pub fn meta_kind(&self) -> Option { - MetaItemKind::from_attr_args(&self.args) - } - - fn meta_item_list(&self) -> Option> { - match &self.args { - AttrArgs::Delimited(args) if args.delim == MacDelimiter::Parenthesis => { - MetaItemKind::list_from_tokens(args.tokens.clone()) - } - AttrArgs::Delimited(_) | AttrArgs::Eq(..) | AttrArgs::Empty => None, - } - } - - fn value_str(&self) -> Option { - match &self.args { - AttrArgs::Eq(_, args) => args.value_str(), - AttrArgs::Delimited(_) | AttrArgs::Empty => None, - } - } -} - -impl Attribute { - /// Returns `true` if it is a sugared doc comment (`///` or `//!` for example). - /// So `#[doc = "doc"]` (which is a doc comment) and `#[doc(...)]` (which is not - /// a doc comment) will return `false`. - pub fn is_doc_comment(&self) -> bool { - match self.kind { - AttrKind::Normal(..) => false, - AttrKind::DocComment(..) => true, + AttrKind::Normal(normal) => normal.item.value_str(), + AttrKind::DocComment(..) => None, } } @@ -299,20 +180,6 @@ impl Attribute { self.doc_str().map_or(false, |s| comments::may_have_doc_links(s.as_str())) } - pub fn get_normal_item(&self) -> &AttrItem { - match &self.kind { - AttrKind::Normal(normal) => &normal.item, - AttrKind::DocComment(..) => panic!("unexpected doc comment"), - } - } - - pub fn unwrap_normal_item(self) -> AttrItem { - match self.kind { - AttrKind::Normal(normal) => normal.into_inner().item, - AttrKind::DocComment(..) => panic!("unexpected doc comment"), - } - } - /// Extracts the MetaItem from inside this Attribute. pub fn meta(&self) -> Option { match &self.kind { @@ -344,130 +211,102 @@ impl Attribute { } } -pub struct AttrIdGenerator(WorkerLocal>); - -#[cfg(debug_assertions)] -static MAX_ATTR_ID: AtomicU32 = AtomicU32::new(u32::MAX); - -impl AttrIdGenerator { - pub fn new() -> Self { - // We use `(index as u32).reverse_bits()` to initialize the - // starting value of AttrId in each worker thread. - // The `index` is the index of the worker thread. - // This ensures that the AttrId generated in each thread is unique. - AttrIdGenerator(WorkerLocal::new(|index| { - let index: u32 = index.try_into().unwrap(); +impl AttrItem { + pub fn span(&self) -> Span { + self.args.span().map_or(self.path.span, |args_span| self.path.span.to(args_span)) + } - #[cfg(debug_assertions)] - { - let max_id = ((index + 1).next_power_of_two() - 1).bitxor(u32::MAX).reverse_bits(); - MAX_ATTR_ID.fetch_min(max_id, Ordering::Release); + fn meta_item_list(&self) -> Option> { + match &self.args { + AttrArgs::Delimited(args) if args.delim == MacDelimiter::Parenthesis => { + MetaItemKind::list_from_tokens(args.tokens.clone()) } - - Cell::new(index.reverse_bits()) - })) + AttrArgs::Delimited(_) | AttrArgs::Eq(..) | AttrArgs::Empty => None, + } } - pub fn mk_attr_id(&self) -> AttrId { - let id = self.0.get(); - - // Ensure the assigned attr_id does not overlap the bits - // representing the number of threads. - #[cfg(debug_assertions)] - assert!(id <= MAX_ATTR_ID.load(Ordering::Acquire)); - - self.0.set(id + 1); - AttrId::from_u32(id) + fn value_str(&self) -> Option { + match &self.args { + AttrArgs::Eq(_, args) => args.value_str(), + AttrArgs::Delimited(_) | AttrArgs::Empty => None, + } } -} -pub fn mk_attr( - g: &AttrIdGenerator, - style: AttrStyle, - path: Path, - args: AttrArgs, - span: Span, -) -> Attribute { - mk_attr_from_item(g, AttrItem { path, args, tokens: None }, None, style, span) -} - -pub fn mk_attr_from_item( - g: &AttrIdGenerator, - item: AttrItem, - tokens: Option, - style: AttrStyle, - span: Span, -) -> Attribute { - Attribute { - kind: AttrKind::Normal(P(NormalAttr { item, tokens })), - id: g.mk_attr_id(), - style, - span, + pub fn meta(&self, span: Span) -> Option { + Some(MetaItem { path: self.path.clone(), kind: self.meta_kind()?, span }) } -} -pub fn mk_attr_word(g: &AttrIdGenerator, style: AttrStyle, name: Symbol, span: Span) -> Attribute { - let path = Path::from_ident(Ident::new(name, span)); - let args = AttrArgs::Empty; - mk_attr(g, style, path, args, span) -} - -pub fn mk_attr_name_value_str( - g: &AttrIdGenerator, - style: AttrStyle, - name: Symbol, - val: Symbol, - span: Span, -) -> Attribute { - let lit = token::Lit::new(token::Str, escape_string_symbol(val), None); - let expr = P(Expr { - id: DUMMY_NODE_ID, - kind: ExprKind::Lit(lit), - span, - attrs: AttrVec::new(), - tokens: None, - }); - let path = Path::from_ident(Ident::new(name, span)); - let args = AttrArgs::Eq(span, AttrArgsEq::Ast(expr)); - mk_attr(g, style, path, args, span) + pub fn meta_kind(&self) -> Option { + MetaItemKind::from_attr_args(&self.args) + } } -pub fn mk_attr_nested_word( - g: &AttrIdGenerator, - style: AttrStyle, - outer: Symbol, - inner: Symbol, - span: Span, -) -> Attribute { - let inner_tokens = TokenStream::new(vec![TokenTree::Token( - Token::from_ast_ident(Ident::new(inner, span)), - Spacing::Alone, - )]); - let outer_ident = Ident::new(outer, span); - let path = Path::from_ident(outer_ident); - let attr_args = AttrArgs::Delimited(DelimArgs { - dspan: DelimSpan::from_single(span), - delim: MacDelimiter::Parenthesis, - tokens: inner_tokens, - }); - mk_attr(g, style, path, attr_args, span) +impl AttrArgsEq { + fn value_str(&self) -> Option { + match self { + AttrArgsEq::Ast(expr) => match expr.kind { + ExprKind::Lit(token_lit) => { + LitKind::from_token_lit(token_lit).ok().and_then(|lit| lit.str()) + } + _ => None, + }, + AttrArgsEq::Hir(lit) => lit.kind.str(), + } + } } -pub fn mk_doc_comment( - g: &AttrIdGenerator, - comment_kind: CommentKind, - style: AttrStyle, - data: Symbol, - span: Span, -) -> Attribute { - Attribute { kind: AttrKind::DocComment(comment_kind, data), id: g.mk_attr_id(), style, span } -} +impl MetaItem { + /// For a single-segment meta item, returns its name; otherwise, returns `None`. + pub fn ident(&self) -> Option { + if self.path.segments.len() == 1 { Some(self.path.segments[0].ident) } else { None } + } -pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool { - items.iter().any(|item| item.has_name(name)) -} + pub fn name_or_empty(&self) -> Symbol { + self.ident().unwrap_or_else(Ident::empty).name + } + + pub fn has_name(&self, name: Symbol) -> bool { + self.path == name + } + + pub fn is_word(&self) -> bool { + matches!(self.kind, MetaItemKind::Word) + } + + pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { + match &self.kind { + MetaItemKind::List(l) => Some(&**l), + _ => None, + } + } + + /// ```text + /// Example: + /// #[attribute(name = "value")] + /// ^^^^^^^^^^^^^^ + /// ``` + pub fn name_value_literal(&self) -> Option<&MetaItemLit> { + match &self.kind { + MetaItemKind::NameValue(v) => Some(v), + _ => None, + } + } + + /// This is used in case you want the value span instead of the whole attribute. Example: + /// + /// ```text + /// #[doc(alias = "foo")] + /// ``` + /// + /// In here, it'll return a span for `"foo"`. + pub fn name_value_literal_span(&self) -> Option { + Some(self.name_value_literal()?.span) + } + + pub fn value_str(&self) -> Option { + self.kind.value_str() + } -impl MetaItem { fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, @@ -564,6 +403,24 @@ impl MetaItemKind { } } + fn from_tokens( + tokens: &mut iter::Peekable>, + ) -> Option { + match tokens.peek() { + Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => { + let inner_tokens = inner_tokens.clone(); + tokens.next(); + MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List) + } + Some(TokenTree::Delimited(..)) => None, + Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => { + tokens.next(); + MetaItemKind::name_value_from_tokens(tokens) + } + _ => Some(MetaItemKind::Word), + } + } + fn from_attr_args(args: &AttrArgs) -> Option { match args { AttrArgs::Empty => Some(MetaItemKind::Word), @@ -585,24 +442,6 @@ impl MetaItemKind { AttrArgs::Eq(_, AttrArgsEq::Hir(lit)) => Some(MetaItemKind::NameValue(lit.clone())), } } - - fn from_tokens( - tokens: &mut iter::Peekable>, - ) -> Option { - match tokens.peek() { - Some(TokenTree::Delimited(_, Delimiter::Parenthesis, inner_tokens)) => { - let inner_tokens = inner_tokens.clone(); - tokens.next(); - MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List) - } - Some(TokenTree::Delimited(..)) => None, - Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => { - tokens.next(); - MetaItemKind::name_value_from_tokens(tokens) - } - _ => Some(MetaItemKind::Word), - } - } } impl NestedMetaItem { @@ -613,6 +452,77 @@ impl NestedMetaItem { } } + /// For a single-segment meta item, returns its name; otherwise, returns `None`. + pub fn ident(&self) -> Option { + self.meta_item().and_then(|meta_item| meta_item.ident()) + } + + pub fn name_or_empty(&self) -> Symbol { + self.ident().unwrap_or_else(Ident::empty).name + } + + /// Returns `true` if this list item is a MetaItem with a name of `name`. + pub fn has_name(&self, name: Symbol) -> bool { + self.meta_item().map_or(false, |meta_item| meta_item.has_name(name)) + } + + /// Returns `true` if `self` is a `MetaItem` and the meta item is a word. + pub fn is_word(&self) -> bool { + self.meta_item().map_or(false, |meta_item| meta_item.is_word()) + } + + /// Gets a list of inner meta items from a list `MetaItem` type. + pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { + self.meta_item().and_then(|meta_item| meta_item.meta_item_list()) + } + + /// Returns a name and single literal value tuple of the `MetaItem`. + pub fn name_value_literal(&self) -> Option<(Symbol, &MetaItemLit)> { + self.meta_item().and_then(|meta_item| { + meta_item.meta_item_list().and_then(|meta_item_list| { + if meta_item_list.len() == 1 + && let Some(ident) = meta_item.ident() + && let Some(lit) = meta_item_list[0].lit() + { + return Some((ident.name, lit)); + } + None + }) + }) + } + + /// See [`MetaItem::name_value_literal_span`]. + pub fn name_value_literal_span(&self) -> Option { + self.meta_item()?.name_value_literal_span() + } + + /// Gets the string value if `self` is a `MetaItem` and the `MetaItem` is a + /// `MetaItemKind::NameValue` variant containing a string, otherwise `None`. + pub fn value_str(&self) -> Option { + self.meta_item().and_then(|meta_item| meta_item.value_str()) + } + + /// Returns the `MetaItemLit` if `self` is a `NestedMetaItem::Literal`s. + pub fn lit(&self) -> Option<&MetaItemLit> { + match self { + NestedMetaItem::Lit(lit) => Some(lit), + _ => None, + } + } + + /// Returns the `MetaItem` if `self` is a `NestedMetaItem::MetaItem`. + pub fn meta_item(&self) -> Option<&MetaItem> { + match self { + NestedMetaItem::MetaItem(item) => Some(item), + _ => None, + } + } + + /// Returns `true` if the variant is `MetaItem`. + pub fn is_meta_item(&self) -> bool { + self.meta_item().is_some() + } + fn from_tokens(tokens: &mut iter::Peekable) -> Option where I: Iterator, @@ -634,3 +544,89 @@ impl NestedMetaItem { MetaItem::from_tokens(tokens).map(NestedMetaItem::MetaItem) } } + +pub fn mk_doc_comment( + g: &AttrIdGenerator, + comment_kind: CommentKind, + style: AttrStyle, + data: Symbol, + span: Span, +) -> Attribute { + Attribute { kind: AttrKind::DocComment(comment_kind, data), id: g.mk_attr_id(), style, span } +} + +pub fn mk_attr( + g: &AttrIdGenerator, + style: AttrStyle, + path: Path, + args: AttrArgs, + span: Span, +) -> Attribute { + mk_attr_from_item(g, AttrItem { path, args, tokens: None }, None, style, span) +} + +pub fn mk_attr_from_item( + g: &AttrIdGenerator, + item: AttrItem, + tokens: Option, + style: AttrStyle, + span: Span, +) -> Attribute { + Attribute { + kind: AttrKind::Normal(P(NormalAttr { item, tokens })), + id: g.mk_attr_id(), + style, + span, + } +} + +pub fn mk_attr_word(g: &AttrIdGenerator, style: AttrStyle, name: Symbol, span: Span) -> Attribute { + let path = Path::from_ident(Ident::new(name, span)); + let args = AttrArgs::Empty; + mk_attr(g, style, path, args, span) +} + +pub fn mk_attr_nested_word( + g: &AttrIdGenerator, + style: AttrStyle, + outer: Symbol, + inner: Symbol, + span: Span, +) -> Attribute { + let inner_tokens = TokenStream::new(vec![TokenTree::Token( + Token::from_ast_ident(Ident::new(inner, span)), + Spacing::Alone, + )]); + let outer_ident = Ident::new(outer, span); + let path = Path::from_ident(outer_ident); + let attr_args = AttrArgs::Delimited(DelimArgs { + dspan: DelimSpan::from_single(span), + delim: MacDelimiter::Parenthesis, + tokens: inner_tokens, + }); + mk_attr(g, style, path, attr_args, span) +} + +pub fn mk_attr_name_value_str( + g: &AttrIdGenerator, + style: AttrStyle, + name: Symbol, + val: Symbol, + span: Span, +) -> Attribute { + let lit = token::Lit::new(token::Str, escape_string_symbol(val), None); + let expr = P(Expr { + id: DUMMY_NODE_ID, + kind: ExprKind::Lit(lit), + span, + attrs: AttrVec::new(), + tokens: None, + }); + let path = Path::from_ident(Ident::new(name, span)); + let args = AttrArgs::Eq(span, AttrArgsEq::Ast(expr)); + mk_attr(g, style, path, args, span) +} + +pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool { + items.iter().any(|item| item.has_name(name)) +} From 80d265240bf1b884753bafc8f07e43fc8236679f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20K=C3=A5re=20Alsaker?= Date: Fri, 3 Feb 2023 18:39:31 +0100 Subject: [PATCH 422/501] Create a single value cache for the () query key --- compiler/rustc_middle/src/query/keys.rs | 4 +- .../rustc_query_system/src/query/caches.rs | 47 ++++++++++++++++++- compiler/rustc_query_system/src/query/mod.rs | 3 +- 3 files changed, 50 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index e4bb3ce3d5a99..dc02fd53ed02c 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -8,7 +8,7 @@ use crate::ty::subst::{GenericArg, SubstsRef}; use crate::ty::{self, layout::TyAndLayout, Ty, TyCtxt}; use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE}; use rustc_hir::hir_id::{HirId, OwnerId}; -use rustc_query_system::query::{DefaultCacheSelector, VecCacheSelector}; +use rustc_query_system::query::{DefaultCacheSelector, SingleCacheSelector, VecCacheSelector}; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; @@ -45,7 +45,7 @@ pub trait Key: Sized { } impl Key for () { - type CacheSelector = DefaultCacheSelector; + type CacheSelector = SingleCacheSelector; #[inline(always)] fn query_crate_is_local(&self) -> bool { diff --git a/compiler/rustc_query_system/src/query/caches.rs b/compiler/rustc_query_system/src/query/caches.rs index 9f875b4373173..c9dd75e4d554b 100644 --- a/compiler/rustc_query_system/src/query/caches.rs +++ b/compiler/rustc_query_system/src/query/caches.rs @@ -5,7 +5,6 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sharded; #[cfg(parallel_compiler)] use rustc_data_structures::sharded::Sharded; -#[cfg(not(parallel_compiler))] use rustc_data_structures::sync::Lock; use rustc_data_structures::sync::WorkerLocal; use rustc_index::vec::{Idx, IndexVec}; @@ -117,6 +116,52 @@ where } } +pub struct SingleCacheSelector; + +impl<'tcx, V: 'tcx> CacheSelector<'tcx, V> for SingleCacheSelector { + type Cache = SingleCache + where + V: Copy; + type ArenaCache = ArenaCache<'tcx, (), V>; +} + +pub struct SingleCache { + cache: Lock>, +} + +impl Default for SingleCache { + fn default() -> Self { + SingleCache { cache: Lock::new(None) } + } +} + +impl QueryStorage for SingleCache { + type Value = V; + type Stored = V; +} + +impl QueryCache for SingleCache +where + V: Copy + Debug, +{ + type Key = (); + + #[inline(always)] + fn lookup(&self, _key: &()) -> Option<(V, DepNodeIndex)> { + *self.cache.lock() + } + + #[inline] + fn complete(&self, _key: (), value: V, index: DepNodeIndex) -> Self::Stored { + *self.cache.lock() = Some((value.clone(), index)); + value + } + + fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) { + self.cache.lock().as_ref().map(|value| f(&(), &value.0, value.1)); + } +} + pub struct ArenaCache<'tcx, K, V> { arena: WorkerLocal>, #[cfg(parallel_compiler)] diff --git a/compiler/rustc_query_system/src/query/mod.rs b/compiler/rustc_query_system/src/query/mod.rs index d308af1920760..6c0ee2bc2f6f0 100644 --- a/compiler/rustc_query_system/src/query/mod.rs +++ b/compiler/rustc_query_system/src/query/mod.rs @@ -8,7 +8,8 @@ pub use self::job::{print_query_stack, QueryInfo, QueryJob, QueryJobId, QueryJob mod caches; pub use self::caches::{ - CacheSelector, DefaultCacheSelector, QueryCache, QueryStorage, VecCacheSelector, + CacheSelector, DefaultCacheSelector, QueryCache, QueryStorage, SingleCacheSelector, + VecCacheSelector, }; mod config; From a85b0101e6a1d7d2a7a51d0e5472c1a1215b3031 Mon Sep 17 00:00:00 2001 From: Boxy Date: Sat, 11 Feb 2023 23:05:11 +0000 Subject: [PATCH 423/501] make `relate`'s const ty assertion use semantic equality --- compiler/rustc_infer/src/infer/combine.rs | 28 ++++ compiler/rustc_middle/src/query/mod.rs | 7 + compiler/rustc_middle/src/ty/relate.rs | 20 --- .../rustc_trait_selection/src/traits/misc.rs | 22 ++- .../rustc_trait_selection/src/traits/mod.rs | 1 + .../const_kind_expr/relate_ty_with_infer_1.rs | 30 ++++ .../const_kind_expr/relate_ty_with_infer_2.rs | 151 ++++++++++++++++++ 7 files changed, 237 insertions(+), 22 deletions(-) create mode 100644 tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_1.rs create mode 100644 tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_2.rs diff --git a/compiler/rustc_infer/src/infer/combine.rs b/compiler/rustc_infer/src/infer/combine.rs index 4da2a67414498..76834c3b36841 100644 --- a/compiler/rustc_infer/src/infer/combine.rs +++ b/compiler/rustc_infer/src/infer/combine.rs @@ -31,6 +31,7 @@ use super::{InferCtxt, MiscVariable, TypeTrace}; use crate::traits::{Obligation, PredicateObligations}; use rustc_data_structures::sso::SsoHashMap; use rustc_hir::def_id::DefId; +use rustc_middle::infer::canonical::OriginalQueryValues; use rustc_middle::infer::unify_key::{ConstVarValue, ConstVariableValue}; use rustc_middle::infer::unify_key::{ConstVariableOrigin, ConstVariableOriginKind}; use rustc_middle::traits::ObligationCause; @@ -152,6 +153,33 @@ impl<'tcx> InferCtxt<'tcx> { let a = self.shallow_resolve(a); let b = self.shallow_resolve(b); + // We should never have to relate the `ty` field on `Const` as it is checked elsewhere that consts have the + // correct type for the generic param they are an argument for. However there have been a number of cases + // historically where asserting that the types are equal has found bugs in the compiler so this is valuable + // to check even if it is a bit nasty impl wise :( + // + // This probe is probably not strictly necessary but it seems better to be safe and not accidentally find + // ourselves with a check to find bugs being required for code to compile because it made inference progress. + self.probe(|_| { + if a.ty() == b.ty() { + return; + } + + // We don't have access to trait solving machinery in `rustc_infer` so the logic for determining if the + // two const param's types are able to be equal has to go through a canonical query with the actual logic + // in `rustc_trait_selection`. + let canonical = self.canonicalize_query( + (relation.param_env(), a.ty(), b.ty()), + &mut OriginalQueryValues::default(), + ); + if let Err(()) = self.tcx.check_const_param_definitely_unequal(canonical) { + self.tcx.sess.delay_span_bug( + DUMMY_SP, + &format!("cannot relate consts of different types (a={:?}, b={:?})", a, b,), + ); + } + }); + match (a.kind(), b.kind()) { ( ty::ConstKind::Infer(InferConst::Var(a_vid)), diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index d37d6b37a37c3..d8f7614c56f42 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -2168,4 +2168,11 @@ rustc_queries! { desc { "traits in scope for documentation links for a module" } separate_provide_extern } + + /// Used in `super_combine_consts` to ICE if the type of the two consts are definitely not going to end up being + /// equal to eachother. This might return `Ok` even if the types are unequal, but will never return `Err` if + /// the types might be equal. + query check_const_param_definitely_unequal(arg: Canonical<'tcx, (ty::ParamEnv<'tcx>, Ty<'tcx>, Ty<'tcx>)>) -> Result<(), ()> { + desc { "check whether two const param are definitely not equal to eachother"} + } } diff --git a/compiler/rustc_middle/src/ty/relate.rs b/compiler/rustc_middle/src/ty/relate.rs index 890dabde1f73d..da6b9e3ed23d9 100644 --- a/compiler/rustc_middle/src/ty/relate.rs +++ b/compiler/rustc_middle/src/ty/relate.rs @@ -9,7 +9,6 @@ use crate::ty::{self, Expr, ImplSubject, Term, TermKind, Ty, TyCtxt, TypeFoldabl use crate::ty::{GenericArg, GenericArgKind, SubstsRef}; use rustc_hir as ast; use rustc_hir::def_id::DefId; -use rustc_span::DUMMY_SP; use rustc_target::spec::abi; use std::iter; @@ -594,25 +593,6 @@ pub fn super_relate_consts<'tcx, R: TypeRelation<'tcx>>( debug!("{}.super_relate_consts(a = {:?}, b = {:?})", relation.tag(), a, b); let tcx = relation.tcx(); - let a_ty; - let b_ty; - if relation.tcx().features().adt_const_params { - a_ty = tcx.normalize_erasing_regions(relation.param_env(), a.ty()); - b_ty = tcx.normalize_erasing_regions(relation.param_env(), b.ty()); - } else { - a_ty = tcx.erase_regions(a.ty()); - b_ty = tcx.erase_regions(b.ty()); - } - if a_ty != b_ty { - relation.tcx().sess.delay_span_bug( - DUMMY_SP, - &format!( - "cannot relate constants ({:?}, {:?}) of different types: {} != {}", - a, b, a_ty, b_ty - ), - ); - } - // HACK(const_generics): We still need to eagerly evaluate consts when // relating them because during `normalize_param_env_or_error`, // we may relate an evaluated constant in a obligation against diff --git a/compiler/rustc_trait_selection/src/traits/misc.rs b/compiler/rustc_trait_selection/src/traits/misc.rs index a41a601f2db07..39654258dcda9 100644 --- a/compiler/rustc_trait_selection/src/traits/misc.rs +++ b/compiler/rustc_trait_selection/src/traits/misc.rs @@ -1,12 +1,14 @@ //! Miscellaneous type-system utilities that are too small to deserve their own modules. -use crate::traits::{self, ObligationCause}; +use crate::traits::{self, ObligationCause, ObligationCtxt}; use rustc_data_structures::fx::FxIndexSet; use rustc_hir as hir; +use rustc_infer::infer::canonical::Canonical; use rustc_infer::infer::{RegionResolutionError, TyCtxtInferExt}; use rustc_infer::{infer::outlives::env::OutlivesEnvironment, traits::FulfillmentError}; -use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable}; +use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, TypeVisitable}; +use rustc_span::DUMMY_SP; use super::outlives_bounds::InferCtxtExt; @@ -131,3 +133,19 @@ pub fn type_allowed_to_implement_copy<'tcx>( Ok(()) } + +pub fn check_const_param_definitely_unequal<'tcx>( + tcx: TyCtxt<'tcx>, + canonical: Canonical<'tcx, (ParamEnv<'tcx>, Ty<'tcx>, Ty<'tcx>)>, +) -> Result<(), ()> { + let (infcx, (param_env, ty_a, ty_b), _) = + tcx.infer_ctxt().build_with_canonical(DUMMY_SP, &canonical); + let ocx = ObligationCtxt::new(&infcx); + + let result = ocx.eq(&ObligationCause::dummy(), param_env, ty_a, ty_b); + // use `select_where_possible` instead of `select_all_or_error` so that + // we don't get errors from obligations being ambiguous. + let errors = ocx.select_where_possible(); + + if errors.len() > 0 || result.is_err() { Err(()) } else { Ok(()) } +} diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index 83458017e00f0..f34d55531e767 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -554,6 +554,7 @@ pub fn provide(providers: &mut ty::query::Providers) { specialization_graph_of: specialize::specialization_graph_provider, specializes: specialize::specializes, subst_and_check_impossible_predicates, + check_const_param_definitely_unequal: misc::check_const_param_definitely_unequal, is_impossible_method, ..*providers }; diff --git a/tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_1.rs b/tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_1.rs new file mode 100644 index 0000000000000..1e248411830cd --- /dev/null +++ b/tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_1.rs @@ -0,0 +1,30 @@ +// check-pass +#![feature(generic_const_exprs)] +#![allow(incomplete_features)] + +// issue #107899 +// We end up relating `Const(ty: size_of, kind: Value(Branch([])))` with +// `Const(ty: size_of, kind: Value(Branch([])))` which if you were to `==` +// the `ty` fields would return `false` and ICE. This test checks that we use +// actual semantic equality that takes into account aliases and infer vars. + +use std::mem::size_of; + +trait X { + fn f(self); + fn g(self); +} + +struct Y; + +impl X for Y +where + [(); size_of::()]: Sized, +{ + fn f(self) { + self.g(); + } + fn g(self) {} +} + +fn main() {} diff --git a/tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_2.rs b/tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_2.rs new file mode 100644 index 0000000000000..91a8a7c4a0121 --- /dev/null +++ b/tests/ui/const-generics/generic_const_exprs/const_kind_expr/relate_ty_with_infer_2.rs @@ -0,0 +1,151 @@ +// check-pass +#![feature(inline_const, generic_const_exprs)] +#![allow(incomplete_features)] +use std::marker::PhantomData; + +pub struct Equal(); +pub trait True {} +impl True for Equal {} + +// replacement for generativity +pub struct Id<'id>(PhantomData &'id ()>); +pub struct Guard<'id>(Id<'id>); +fn make_guard<'id>(i: &'id Id<'id>) -> Guard<'id> { + Guard(Id(PhantomData)) +} + +impl<'id> Into> for Guard<'id> { + fn into(self) -> Id<'id> { + self.0 + } +} + +pub struct Arena<'life> { + bytes: *mut [u8], + //bitmap: RefCell, + _token: PhantomData>, +} + +#[repr(transparent)] +pub struct Item<'life, T> { + data: T, + _phantom: PhantomData>, +} + +#[repr(transparent)] +pub struct Token<'life, 'borrow, 'compact, 'reborrow, T> +where + 'life: 'reborrow, + T: Tokenize<'life, 'borrow, 'compact, 'reborrow>, +{ + //ptr: *mut ::Tokenized, + ptr: core::ptr::NonNull, + _phantom: PhantomData>, + _compact: PhantomData<&'borrow Guard<'compact>>, + _result: PhantomData<&'reborrow T::Untokenized>, +} + +impl<'life> Arena<'life> { + pub fn tokenize<'before, 'compact, 'borrow, 'reborrow, T, U>( + &self, + guard: &'borrow Guard<'compact>, + item: Item<'life, &'before mut T>, + ) -> Token<'life, 'borrow, 'compact, 'reborrow, U> + where + T: Tokenize<'life, 'borrow, 'compact, 'reborrow, Untokenized = U>, + T::Untokenized: Tokenize<'life, 'borrow, 'compact, 'reborrow>, + Equal<{ core::mem::size_of::() }, { core::mem::size_of::() }>: True, + 'compact: 'borrow, + 'life: 'reborrow, + 'life: 'compact, + 'life: 'borrow, + // 'borrow: 'before ?? + { + let dst = item.data as *mut T as *mut T::Tokenized; + Token { + ptr: core::ptr::NonNull::new(dst as *mut _).unwrap(), + _phantom: PhantomData, + _compact: PhantomData, + _result: PhantomData, + } + } +} + +pub trait Tokenize<'life, 'borrow, 'compact, 'reborrow> +where + 'compact: 'borrow, + 'life: 'reborrow, + 'life: 'borrow, + 'life: 'compact, +{ + type Tokenized; + type Untokenized; + const TO: fn(&Arena<'life>, &'borrow Guard<'compact>, Self) -> Self::Tokenized; + const FROM: fn(&'reborrow Arena<'life>, Self::Tokenized) -> Self::Untokenized; +} + +macro_rules! tokenize { + ($to:expr, $from:expr) => { + const TO: fn(&Arena<'life>, &'borrow Guard<'compact>, Self) -> Self::Tokenized = $to; + const FROM: fn(&'reborrow Arena<'life>, Self::Tokenized) -> Self::Untokenized = $from; + }; +} + +struct Foo<'life, 'borrow>(Option>); +struct TokenFoo<'life, 'borrow, 'compact, 'reborrow>( + Option>, +); +struct Bar(u8); + +impl<'life, 'before, 'borrow, 'compact, 'reborrow> Tokenize<'life, 'borrow, 'compact, 'reborrow> + for Foo<'life, 'before> +where + 'compact: 'borrow, + 'life: 'reborrow, + 'life: 'borrow, + 'life: 'compact, +{ + type Tokenized = TokenFoo<'life, 'borrow, 'compact, 'reborrow>; + type Untokenized = Foo<'life, 'reborrow>; + tokenize!(foo_to, foo_from); +} + +impl<'life, 'borrow, 'compact, 'reborrow> Tokenize<'life, 'borrow, 'compact, 'reborrow> for Bar +where + 'compact: 'borrow, + 'life: 'reborrow, + 'life: 'borrow, + 'life: 'compact, +{ + type Tokenized = Bar; + type Untokenized = Bar; + tokenize!(bar_to, bar_from); +} + +fn bar_to<'life, 'borrow, 'compact>( + arena: &Arena<'life>, + guard: &'borrow Guard<'compact>, + s: Bar, +) -> Bar { + s +} +fn bar_from<'life, 'reborrow>(arena: &'reborrow Arena<'life>, s: Bar) -> Bar { + s +} + +fn foo_to<'life, 'borrow, 'compact, 'reborrow, 'before>( + arena: &'before Arena<'life>, + guard: &'borrow Guard<'compact>, + s: Foo<'life, 'before>, +) -> TokenFoo<'life, 'borrow, 'compact, 'reborrow> { + let Foo(bar) = s; + TokenFoo(bar.map(|bar| arena.tokenize(guard, bar))) +} +fn foo_from<'life, 'borrow, 'compact, 'reborrow>( + arena: &'reborrow Arena<'life>, + s: TokenFoo<'life, 'borrow, 'compact, 'reborrow>, +) -> Foo<'life, 'reborrow> { + Foo(s.0.map(|bar| panic!())) +} + +fn main() {} From 72b3f46b435b7e3c662e34ecefdd1e66e59d3092 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Sat, 11 Feb 2023 12:36:55 -0700 Subject: [PATCH 424/501] rustdoc: account for intra-doc links in `` --- src/librustdoc/html/markdown.rs | 13 +++++++++++-- src/librustdoc/html/markdown/tests.rs | 2 +- src/librustdoc/html/render/context.rs | 5 ++++- tests/rustdoc/description.rs | 6 ++++++ 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index dee0a01a65413..0ff73eb79de03 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -1180,14 +1180,23 @@ pub(crate) fn short_markdown_summary(markdown: &str, link_names: &[RenderedLink] /// - Headings, links, and formatting are stripped. /// - Inline code is rendered as-is, surrounded by backticks. /// - HTML and code blocks are ignored. -pub(crate) fn plain_text_summary(md: &str) -> String { +pub(crate) fn plain_text_summary(md: &str, link_names: &[RenderedLink]) -> String { if md.is_empty() { return String::new(); } let mut s = String::with_capacity(md.len() * 3 / 2); - for event in Parser::new_ext(md, summary_opts()) { + let mut replacer = |broken_link: BrokenLink<'_>| { + link_names + .iter() + .find(|link| link.original_text.as_str() == &*broken_link.reference) + .map(|link| (link.href.as_str().into(), link.new_text.as_str().into())) + }; + + let p = Parser::new_with_broken_link_callback(md, summary_opts(), Some(&mut replacer)); + + for event in p { match &event { Event::Text(text) => s.push_str(text), Event::Code(code) => { diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs index 5878c58264ec3..e05635a020756 100644 --- a/src/librustdoc/html/markdown/tests.rs +++ b/src/librustdoc/html/markdown/tests.rs @@ -249,7 +249,7 @@ fn test_short_markdown_summary() { #[test] fn test_plain_text_summary() { fn t(input: &str, expect: &str) { - let output = plain_text_summary(input); + let output = plain_text_summary(input, &[]); assert_eq!(output, expect, "original: {}", input); } diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 1216a8d71c8cf..6762fba9275cf 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -182,7 +182,10 @@ impl<'tcx> Context<'tcx> { }; title.push_str(" - Rust"); let tyname = it.type_(); - let desc = it.doc_value().as_ref().map(|doc| plain_text_summary(doc)); + let desc = it + .doc_value() + .as_ref() + .map(|doc| plain_text_summary(doc, &it.link_names(&self.cache()))); let desc = if let Some(desc) = desc { desc } else if it.is_crate() { diff --git a/tests/rustdoc/description.rs b/tests/rustdoc/description.rs index 05ec428220847..43cd59ebd0924 100644 --- a/tests/rustdoc/description.rs +++ b/tests/rustdoc/description.rs @@ -22,3 +22,9 @@ pub mod foo_mod { // 'Only paragraph.' /// Only paragraph. pub fn foo_fn() {} + +// @has 'foo/fn.bar_fn.html' '//meta[@name="description"]/@content' \ +// 'Description with intra-doc link to foo_fn and [nonexistent_item] and foo_fn.' +#[allow(rustdoc::broken_intra_doc_links)] +/// Description with intra-doc link to [foo_fn] and [nonexistent_item] and [foo_fn](self::foo_fn). +pub fn bar_fn() {} From cca82fd997a8c16a1a790846ceab8717f16cc238 Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Sun, 12 Feb 2023 01:09:09 +0000 Subject: [PATCH 425/501] Document PointerLike --- library/core/src/marker.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index e11bca5962a15..07a7d45c7ebcc 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -871,7 +871,10 @@ pub trait Destruct {} #[rustc_deny_explicit_impl] pub trait Tuple {} -/// A marker for things +/// A marker for pointer-like types. +/// +/// All types that have the same size and alignment as a `usize` or +/// `*const ()` automatically implement this trait. #[unstable(feature = "pointer_like_trait", issue = "none")] #[cfg_attr(bootstrap, lang = "pointer_sized")] #[cfg_attr(not(bootstrap), lang = "pointer_like")] From 903ca873f75e3fcea2bf4cf583ae835525e38367 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Sat, 21 Jan 2023 22:09:56 +0000 Subject: [PATCH 426/501] Suggest the correct array length on mismatch --- .../src/infer/error_reporting/mod.rs | 65 +++++++++++++++++++ tests/ui/consts/array-literal-len-mismatch.rs | 4 ++ .../consts/array-literal-len-mismatch.stderr | 11 ++++ tests/ui/consts/const-array-oob-arith.stderr | 8 ++- tests/ui/inference/array-len-mismatch.rs | 12 ++++ tests/ui/inference/array-len-mismatch.stderr | 21 ++++++ 6 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 tests/ui/consts/array-literal-len-mismatch.rs create mode 100644 tests/ui/consts/array-literal-len-mismatch.stderr create mode 100644 tests/ui/inference/array-len-mismatch.rs create mode 100644 tests/ui/inference/array-len-mismatch.stderr diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index 88a0d6def5ec2..e60c86aed012c 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -64,6 +64,7 @@ use rustc_errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString}; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::intravisit::Visitor; use rustc_hir::lang_items::LangItem; use rustc_hir::Node; use rustc_middle::dep_graph::DepContext; @@ -1975,6 +1976,70 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { (ty::Bool, ty::Tuple(list)) => if list.len() == 0 { self.suggest_let_for_letchains(&mut err, &trace.cause, span); } + (ty::Array(_, _), ty::Array(_, _)) => 'block: { + let hir = self.tcx.hir(); + let TypeError::FixedArraySize(sz) = terr else { + break 'block; + }; + let tykind = match hir.find_by_def_id(trace.cause.body_id) { + Some(hir::Node::Item(hir::Item { + kind: hir::ItemKind::Fn(_, _, body_id), + .. + })) => { + let body = hir.body(*body_id); + struct LetVisitor<'v> { + span: Span, + result: Option<&'v hir::Ty<'v>>, + } + impl<'v> Visitor<'v> for LetVisitor<'v> { + fn visit_stmt(&mut self, s: &'v hir::Stmt<'v>) { + if self.result.is_some() { + return; + } + // Find a local statement where the initializer has + // the same span as the error and the type is specified. + if let hir::Stmt { + kind: hir::StmtKind::Local(hir::Local { + init: Some(hir::Expr { + span: init_span, + .. + }), + ty: Some(array_ty), + .. + }), + .. + } = s + && init_span == &self.span { + self.result = Some(*array_ty); + } + } + } + let mut visitor = LetVisitor {span, result: None}; + visitor.visit_body(body); + visitor.result.map(|r| &r.peel_refs().kind) + } + Some(hir::Node::Item(hir::Item { + kind: hir::ItemKind::Const(ty, _), + .. + })) => { + Some(&ty.peel_refs().kind) + } + _ => None + }; + + if let Some(tykind) = tykind + && let hir::TyKind::Array(_, length) = tykind + && let hir::ArrayLen::Body(hir::AnonConst { hir_id, .. }) = length + && let Some(span) = self.tcx.hir().opt_span(*hir_id) + { + err.span_suggestion( + span, + "consider specifying the actual array length", + sz.found, + Applicability::MaybeIncorrect, + ); + } + } _ => {} } } diff --git a/tests/ui/consts/array-literal-len-mismatch.rs b/tests/ui/consts/array-literal-len-mismatch.rs new file mode 100644 index 0000000000000..b30ff61a99c53 --- /dev/null +++ b/tests/ui/consts/array-literal-len-mismatch.rs @@ -0,0 +1,4 @@ +const NUMBERS: [u8; 3] = [10, 20]; +//~^ ERROR mismatched types +//~^^ HELP consider specifying the actual array length +fn main() {} diff --git a/tests/ui/consts/array-literal-len-mismatch.stderr b/tests/ui/consts/array-literal-len-mismatch.stderr new file mode 100644 index 0000000000000..22fec638970a7 --- /dev/null +++ b/tests/ui/consts/array-literal-len-mismatch.stderr @@ -0,0 +1,11 @@ +error[E0308]: mismatched types + --> $DIR/array-literal-len-mismatch.rs:1:26 + | +LL | const NUMBERS: [u8; 3] = [10, 20]; + | - ^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements + | | + | help: consider specifying the actual array length: `2` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/consts/const-array-oob-arith.stderr b/tests/ui/consts/const-array-oob-arith.stderr index f7a55d3ca7210..029d94273fae1 100644 --- a/tests/ui/consts/const-array-oob-arith.stderr +++ b/tests/ui/consts/const-array-oob-arith.stderr @@ -2,13 +2,17 @@ error[E0308]: mismatched types --> $DIR/const-array-oob-arith.rs:5:45 | LL | const BLUB: [i32; (ARR[0] - 40) as usize] = [5]; - | ^^^ expected an array with a fixed size of 2 elements, found one with 1 element + | ---------------------- ^^^ expected an array with a fixed size of 2 elements, found one with 1 element + | | + | help: consider specifying the actual array length: `1` error[E0308]: mismatched types --> $DIR/const-array-oob-arith.rs:8:44 | LL | const BOO: [i32; (ARR[0] - 41) as usize] = [5, 99]; - | ^^^^^^^ expected an array with a fixed size of 1 element, found one with 2 elements + | ---------------------- ^^^^^^^ expected an array with a fixed size of 1 element, found one with 2 elements + | | + | help: consider specifying the actual array length: `2` error: aborting due to 2 previous errors diff --git a/tests/ui/inference/array-len-mismatch.rs b/tests/ui/inference/array-len-mismatch.rs new file mode 100644 index 0000000000000..149d061029bc3 --- /dev/null +++ b/tests/ui/inference/array-len-mismatch.rs @@ -0,0 +1,12 @@ +fn returns_arr() -> [u8; 2] { + [1, 2] +} + +fn main() { + let wrong: [u8; 3] = [10, 20]; + //~^ ERROR mismatched types + //~^^ HELP consider specifying the actual array length + let wrong: [u8; 3] = returns_arr(); + //~^ ERROR mismatched types + //~^^ HELP consider specifying the actual array length +} diff --git a/tests/ui/inference/array-len-mismatch.stderr b/tests/ui/inference/array-len-mismatch.stderr new file mode 100644 index 0000000000000..7358e47839725 --- /dev/null +++ b/tests/ui/inference/array-len-mismatch.stderr @@ -0,0 +1,21 @@ +error[E0308]: mismatched types + --> $DIR/array-len-mismatch.rs:6:26 + | +LL | let wrong: [u8; 3] = [10, 20]; + | ------- ^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements + | | | + | | help: consider specifying the actual array length: `2` + | expected due to this + +error[E0308]: mismatched types + --> $DIR/array-len-mismatch.rs:9:26 + | +LL | let wrong: [u8; 3] = returns_arr(); + | ------- ^^^^^^^^^^^^^ expected an array with a fixed size of 3 elements, found one with 2 elements + | | | + | | help: consider specifying the actual array length: `2` + | expected due to this + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0308`. From 240b6f7f8d512a70d5e01a1de12ffb5b6fb0f0ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 12 Feb 2023 10:55:49 +0200 Subject: [PATCH 427/501] Add version placeholder to changelog template --- xtask/src/publish.rs | 2 +- xtask/src/release/changelog.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/xtask/src/publish.rs b/xtask/src/publish.rs index 79b5f3d2f61b7..cdb7d8fac89de 100644 --- a/xtask/src/publish.rs +++ b/xtask/src/publish.rs @@ -13,7 +13,7 @@ impl flags::PublishReleaseNotes { let tag_name = &file_name[0..10]; let original_changelog_url = create_original_changelog_url(&file_name); let additional_paragraph = - format!("\nSee also [original changelog]({original_changelog_url})."); + format!("\nSee also the [changelog post]({original_changelog_url})."); markdown.push_str(&additional_paragraph); if self.dry_run { println!("{markdown}"); diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs index abf4d7a2e1640..67538414840d0 100644 --- a/xtask/src/release/changelog.rs +++ b/xtask/src/release/changelog.rs @@ -69,7 +69,7 @@ pub(crate) fn get_changelog( :page-layout: post Commit: commit:{commit}[] + -Release: release:{today}[] +Release: release:{today}[] (`TBD`) == New Features From 9d85161f30655d2abcce8b2b51477738ace2d912 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 12 Feb 2023 11:12:05 +0200 Subject: [PATCH 428/501] Add Lapce section to the manual --- docs/user/manual.adoc | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 82909ad790976..e3add3053b53c 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -535,12 +535,6 @@ https://docs.helix-editor.com/[Helix] supports LSP by default. However, it won't install `rust-analyzer` automatically. You can follow instructions for installing <>. -=== Crates - -There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically. - -For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow]. - === Visual Studio 2022 There are multiple rust-analyzer extensions for Visual Studio 2022 on Windows: @@ -577,6 +571,17 @@ https://github.com/sourcegear/rust-vs-extension[GitHub (docs, issues, discussion * Free (no-cost) * Supports all editions of Visual Studio 2022 on Windows: Community, Professional, or Enterprise +=== Lapce + +https://lapce.dev/[Lapce] has a Rust plugin which you can install directly. +Unfortunately, it downloads an old version of `rust-analyzer`, but you can set the server path under Settings. + +=== Crates + +There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically. + +For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow]. + == Troubleshooting Start with looking at the rust-analyzer version. From cfeec8a7050095529d2fefa3f4aa029b8e8dc789 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 12 Feb 2023 11:10:33 +0200 Subject: [PATCH 429/501] Add clippy configuration section to the manual --- docs/user/manual.adoc | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 82909ad790976..fb9ecd2f8c933 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -816,6 +816,10 @@ include::./generated_assists.adoc[] While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis. Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings. +=== Clippy + +To run `cargo clippy` instead of `cargo check`, you can set `"rust-analyzer.check.command": "clippy"`. + include::./generated_diagnostic.adoc[] == Editor Features From 8f617b55cefb199f80b114a37215d51e380d6ebf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 12 Feb 2023 11:35:44 +0200 Subject: [PATCH 430/501] Update some old checkOnSave references --- docs/user/manual.adoc | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index fb9ecd2f8c933..5b5f438a8054a 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -768,14 +768,18 @@ See https://github.com/rust-analyzer/rust-project.json-example for a small examp You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading. -Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command. +Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. +To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `rust-analyzer.check.overrideCommand` configuration. +As an example, the following configuration explicitly sets `cargo check` as the `check` command. [source,json] ---- -{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] } +{ "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] } ---- -The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <> section for more information. +`check.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. +The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. +See the <> section for more information. == Security @@ -952,7 +956,7 @@ Also note that a full runnable name is something like *run bin_or_example_name*, Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output. -To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences. +To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `"rust-analyzer.checkOnSave": false` in preferences. For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`: From c3a2e7a8093e54569516cac8ae5905089cf955f5 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 12 Feb 2023 15:16:27 +0100 Subject: [PATCH 431/501] avoid mixing accesses of ptrs derived from a mutable ref and parent ptrs --- library/core/tests/ptr.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs index 80d30f14c66b2..c02cd99cc4477 100644 --- a/library/core/tests/ptr.rs +++ b/library/core/tests/ptr.rs @@ -25,7 +25,7 @@ fn test() { snd: isize, } let mut p = Pair { fst: 10, snd: 20 }; - let pptr: *mut Pair = &mut p; + let pptr: *mut Pair = addr_of_mut!(p); let iptr: *mut isize = pptr as *mut isize; assert_eq!(*iptr, 10); *iptr = 30; @@ -1070,8 +1070,8 @@ fn swap_copy_untyped() { let mut x = 5u8; let mut y = 6u8; - let ptr1 = &mut x as *mut u8 as *mut bool; - let ptr2 = &mut y as *mut u8 as *mut bool; + let ptr1 = addr_of_mut!(x).cast::(); + let ptr2 = addr_of_mut!(y).cast::(); unsafe { ptr::swap(ptr1, ptr2); From 0ea0c90d58cccccd7039334092f8ee85bc4db394 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 12 Feb 2023 16:30:37 +0100 Subject: [PATCH 432/501] fix UB in ancient test --- tests/ui/regions/regions-mock-codegen.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/ui/regions/regions-mock-codegen.rs b/tests/ui/regions/regions-mock-codegen.rs index 9d0ca76e4095d..d5c93f81fd84d 100644 --- a/tests/ui/regions/regions-mock-codegen.rs +++ b/tests/ui/regions/regions-mock-codegen.rs @@ -22,15 +22,15 @@ struct Ccx { x: isize, } -fn allocate(_bcx: &arena) -> &Bcx<'_> { +fn allocate(_bcx: &arena) -> &mut Bcx<'_> { unsafe { let layout = Layout::new::(); let ptr = Global.allocate(layout).unwrap_or_else(|_| handle_alloc_error(layout)); - &*(ptr.as_ptr() as *const _) + &mut *ptr.as_ptr().cast() } } -fn h<'a>(bcx: &'a Bcx<'a>) -> &'a Bcx<'a> { +fn h<'a>(bcx: &'a Bcx<'a>) -> &'a mut Bcx<'a> { return allocate(bcx.fcx.arena); } @@ -38,7 +38,7 @@ fn g(fcx: &Fcx) { let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { - Global.deallocate(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); + Global.deallocate(NonNull::new_unchecked(bcx2 as *mut _ as *mut _), Layout::new::()); } } From 79d2430e9961c9b38aa8366f028fbcbcf98e74a5 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Sun, 12 Feb 2023 10:37:25 -0800 Subject: [PATCH 433/501] Add a doc note about why `Chain` is not `ExactSizeIterator` --- library/core/src/iter/traits/exact_size.rs | 10 ++++++++++ library/core/src/iter/traits/marker.rs | 11 +++++++++++ 2 files changed, 21 insertions(+) diff --git a/library/core/src/iter/traits/exact_size.rs b/library/core/src/iter/traits/exact_size.rs index 1757e37ec0e2f..908830d8a9514 100644 --- a/library/core/src/iter/traits/exact_size.rs +++ b/library/core/src/iter/traits/exact_size.rs @@ -21,6 +21,16 @@ /// /// [`len`]: ExactSizeIterator::len /// +/// # When *shouldn't* an adapter be `ExactSizeIterator`? +/// +/// If an adapter makes an iterator *longer*, then it's usually incorrect for +/// that adapter to implement `ExactSizeIterator`. The inner exact-sized +/// iterator might already be `usize::MAX`-long, and thus the length of the +/// longer adapted iterator would no longer be exactly representable in `usize`. +/// +/// This is why [`Chain`](crate::iter::Chain) isn't `ExactSizeIterator`, +/// even when `A` and `B` are both `ExactSizeIterator`. +/// /// # Examples /// /// Basic usage: diff --git a/library/core/src/iter/traits/marker.rs b/library/core/src/iter/traits/marker.rs index da753745740d7..af02848233d99 100644 --- a/library/core/src/iter/traits/marker.rs +++ b/library/core/src/iter/traits/marker.rs @@ -31,6 +31,17 @@ impl FusedIterator for &mut I {} /// The iterator must produce exactly the number of elements it reported /// or diverge before reaching the end. /// +/// # When *shouldn't* an adapter be `TrustedLen`? +/// +/// If an adapter makes an iterator *shorter* by a given amount, then it's +/// usually incorrect for that adapter to implement `TrustedLen`. The inner +/// iterator might return more than `usize::MAX` items, but there's no way to +/// know what `k` elements less than that will be, since the `size_hint` from +/// the inner iterator has already saturated and lost that information. +/// +/// This is why [`Skip`](crate::iter::Skip) isn't `TrustedLen`, even when +/// `I` implements `TrustedLen`. +/// /// # Safety /// /// This trait must only be implemented when the contract is upheld. Consumers From b0df355f80647ca94e6676e1f5bd6990ae17086e Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Sun, 12 Feb 2023 12:27:40 -0700 Subject: [PATCH 434/501] rustdoc: use tighter line height in h1 and h2 --- src/librustdoc/html/static/css/rustdoc.css | 8 ++++++++ tests/rustdoc-gui/mobile.goml | 2 +- tests/rustdoc-gui/scrape-examples-layout.goml | 8 ++++---- tests/rustdoc-gui/search-result-display.goml | 4 ++-- tests/rustdoc-gui/sidebar-mobile-scroll.goml | 6 +++--- tests/rustdoc-gui/sidebar-mobile.goml | 2 +- 6 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index 2a9548712f086..4f8f00f298a8f 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -174,6 +174,14 @@ h1, h2, h3, h4 { .top-doc .docblock > h4 { border-bottom: 1px solid var(--headings-border-bottom-color); } +/* while line-height 1.5 is required for any "block of text", + which WCAG defines as more than one sentence, it looks weird for + very large main headers */ +h1, h2 { + line-height: 1.25; + padding-top: 3px; + padding-bottom: 9px; +} h3.code-header { font-size: 1.125rem; /* 18px */ } diff --git a/tests/rustdoc-gui/mobile.goml b/tests/rustdoc-gui/mobile.goml index 3e444cbd6dc99..8c8516ebff8e4 100644 --- a/tests/rustdoc-gui/mobile.goml +++ b/tests/rustdoc-gui/mobile.goml @@ -12,7 +12,7 @@ assert-css: (".main-heading", { "flex-direction": "column" }) -assert-property: (".mobile-topbar h2", {"offsetHeight": 36}) +assert-property: (".mobile-topbar h2", {"offsetHeight": 33}) // Note: We can't use assert-text here because the 'Since' is set by CSS and // is therefore not part of the DOM. diff --git a/tests/rustdoc-gui/scrape-examples-layout.goml b/tests/rustdoc-gui/scrape-examples-layout.goml index 95102528ec11d..dad727c775775 100644 --- a/tests/rustdoc-gui/scrape-examples-layout.goml +++ b/tests/rustdoc-gui/scrape-examples-layout.goml @@ -40,10 +40,10 @@ assert-property: ( store-value: (offset_y, 4) // First with desktop -assert-position: (".scraped-example .code-wrapper", {"y": 255}) -assert-position: (".scraped-example .code-wrapper .prev", {"y": 255 + |offset_y|}) +assert-position: (".scraped-example .code-wrapper", {"y": 253}) +assert-position: (".scraped-example .code-wrapper .prev", {"y": 253 + |offset_y|}) // Then with mobile size: (600, 600) -assert-position: (".scraped-example .code-wrapper", {"y": 314}) -assert-position: (".scraped-example .code-wrapper .prev", {"y": 314 + |offset_y|}) +assert-position: (".scraped-example .code-wrapper", {"y": 308}) +assert-position: (".scraped-example .code-wrapper .prev", {"y": 308 + |offset_y|}) diff --git a/tests/rustdoc-gui/search-result-display.goml b/tests/rustdoc-gui/search-result-display.goml index 43e608228d8e3..20a88c36edb3e 100644 --- a/tests/rustdoc-gui/search-result-display.goml +++ b/tests/rustdoc-gui/search-result-display.goml @@ -22,7 +22,7 @@ size: (900, 900) // First we check the current width, height and position. assert-css: ("#crate-search", {"width": "223px"}) -assert-css: (".search-results-title", {"height": "44px", "width": "640px"}) +assert-css: (".search-results-title", {"height": "50px", "width": "640px"}) assert-css: ("#search", {"width": "640px"}) // Then we update the text of one of the `