From fc9196683a87bb1945f44fca7372f58c2d97f582 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Mar 2023 22:13:41 +0000 Subject: [PATCH 1/9] Update syn requirement from 1.0.81 to 2.0.6 Updates the requirements on [syn](https://github.com/dtolnay/syn) to permit the latest version. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/1.0.81...2.0.6) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- impl/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/impl/Cargo.toml b/impl/Cargo.toml index 38dcf97e..b857d18f 100644 --- a/impl/Cargo.toml +++ b/impl/Cargo.toml @@ -24,7 +24,7 @@ proc-macro = true [dependencies] proc-macro2 = "1.0" quote = "1.0" -syn = "1.0.81" +syn = "2.0.6" convert_case = { version = "0.6", optional = true } unicode-xid = { version = "0.2.2", optional = true } From 8b1ecd690863f1b122875ff6dd64e913096d2b05 Mon Sep 17 00:00:00 2001 From: tyranron Date: Thu, 23 Mar 2023 17:46:48 +0200 Subject: [PATCH 2/9] Fix `Debug` and `Display` impls [skip ci] --- impl/Cargo.toml | 2 +- impl/src/fmt/debug.rs | 14 +++++++------- impl/src/fmt/display.rs | 36 ++++++++++++++++++------------------ impl/src/fmt/mod.rs | 6 +++--- impl/src/utils.rs | 18 ++++++++---------- 5 files changed, 37 insertions(+), 39 deletions(-) diff --git a/impl/Cargo.toml b/impl/Cargo.toml index b857d18f..8a1f140f 100644 --- a/impl/Cargo.toml +++ b/impl/Cargo.toml @@ -24,7 +24,7 @@ proc-macro = true [dependencies] proc-macro2 = "1.0" quote = "1.0" -syn = "2.0.6" +syn = "2.0" convert_case = { version = "0.6", optional = true } unicode-xid = { version = "0.2.2", optional = true } diff --git a/impl/src/fmt/debug.rs b/impl/src/fmt/debug.rs index fbfdc1fc..ff884dc9 100644 --- a/impl/src/fmt/debug.rs +++ b/impl/src/fmt/debug.rs @@ -154,9 +154,9 @@ impl ContainerAttributes { attrs .as_ref() .iter() - .filter(|attr| attr.path.is_ident("debug")) + .filter(|attr| attr.path().is_ident("debug")) .try_fold(ContainerAttributes::default(), |mut attrs, attr| { - let attr = syn::parse2::(attr.tokens.clone())?; + let attr = attr.parse_args::()?; attrs.bounds.0.extend(attr.bounds.0); Ok(attrs) }) @@ -172,7 +172,7 @@ impl Parse for ContainerAttributes { syn::parenthesized!(content in input); let error_span = error_span.unwrap_or_else(|| unreachable!()); - BoundsAttribute::check_legacy_fmt(&content, error_span)?; + BoundsAttribute::check_legacy_fmt(&content, error_span.span())?; content.parse().map(|bounds| ContainerAttributes { bounds }) } @@ -202,10 +202,10 @@ impl FieldAttribute { Ok(attrs .as_ref() .iter() - .filter(|attr| attr.path.is_ident("debug")) + .filter(|attr| attr.path().is_ident("debug")) .try_fold(None, |mut attrs, attr| { - let field_attr = syn::parse2::(attr.tokens.clone())?; - if let Some((path, _)) = attrs.replace((&attr.path, field_attr)) { + let field_attr = attr.parse_args::()?; + if let Some((path, _)) = attrs.replace((attr.path(), field_attr)) { Err(Error::new( path.span(), "only single `#[debug(...)]` attribute is allowed here", @@ -227,7 +227,7 @@ impl Parse for FieldAttribute { syn::parenthesized!(content in input); let error_span = error_span.unwrap_or_else(|| unreachable!()); - FmtAttribute::check_legacy_fmt(&content, error_span)?; + FmtAttribute::check_legacy_fmt(&content, error_span.span())?; if content.peek(syn::LitStr) { content.parse().map(Self::Fmt) diff --git a/impl/src/fmt/display.rs b/impl/src/fmt/display.rs index 7df1d330..26229533 100644 --- a/impl/src/fmt/display.rs +++ b/impl/src/fmt/display.rs @@ -219,22 +219,22 @@ impl Attributes { attrs .as_ref() .iter() - .filter(|attr| attr.path.is_ident(trait_name_to_attribute_name(trait_name))) + .filter(|attr| attr.path().is_ident(trait_name_to_attribute_name(trait_name))) .try_fold(Attributes::default(), |mut attrs, attr| { - let attr = syn::parse2::(attr.tokens.clone())?; - match attr { - Attribute::Bounds(more) => { - attrs.bounds.0.extend(more.0); - } - Attribute::Fmt(fmt) => { - attrs.fmt.replace(fmt).map_or(Ok(()), |dup| Err(Error::new( - dup.span(), - format!( - "Multiple `#[{}(\"...\", ...)]` attributes aren't allowed", - trait_name_to_attribute_name(trait_name), - ))))?; - } - }; + let attr = attr.parse_args::()?; + match attr { + Attribute::Bounds(more) => { + attrs.bounds.0.extend(more.0); + } + Attribute::Fmt(fmt) => { + attrs.fmt.replace(fmt).map_or(Ok(()), |dup| Err(Error::new( + dup.span(), + format!( + "Multiple `#[{}(\"...\", ...)]` attributes aren't allowed", + trait_name_to_attribute_name(trait_name), + ))))?; + } + }; Ok(attrs) }) } @@ -251,7 +251,7 @@ enum Attribute { } impl Parse for Attribute { - fn parse(input: ParseStream) -> Result { + fn parse(input: ParseStream<'_>) -> Result { use proc_macro2::Delimiter::Parenthesis; let error_span = input.cursor().group(Parenthesis).map(|(_, span, _)| span); @@ -259,8 +259,8 @@ impl Parse for Attribute { syn::parenthesized!(content in input); let error_span = error_span.unwrap_or_else(|| unreachable!()); - BoundsAttribute::check_legacy_fmt(&content, error_span)?; - FmtAttribute::check_legacy_fmt(&content, error_span)?; + BoundsAttribute::check_legacy_fmt(&content, error_span.span())?; + FmtAttribute::check_legacy_fmt(&content, error_span.span())?; if content.peek(syn::LitStr) { content.parse().map(Attribute::Fmt) diff --git a/impl/src/fmt/mod.rs b/impl/src/fmt/mod.rs index 68913620..91983444 100644 --- a/impl/src/fmt/mod.rs +++ b/impl/src/fmt/mod.rs @@ -17,7 +17,7 @@ use syn::{ parse::{Parse, ParseBuffer, ParseStream}, punctuated::Punctuated, spanned::Spanned as _, - Error, Result, + token, Error, Result, }; /// Representation of a macro attribute expressing additional trait bounds. @@ -72,7 +72,7 @@ impl Parse for BoundsAttribute { syn::parenthesized!(content in input); content - .parse_terminated(syn::WherePredicate::parse) + .parse_terminated(syn::WherePredicate::parse, token::Comma) .map(Self) } } @@ -140,7 +140,7 @@ impl FmtAttribute { match path { Ok(path) if path.is_ident("fmt") => (|| { let args = fork - .parse_terminated::<_, syn::token::Comma>(syn::Lit::parse) + .parse_terminated(syn::Lit::parse, token::Comma) .ok()? .into_iter() .enumerate() diff --git a/impl/src/utils.rs b/impl/src/utils.rs index 6ae2b8a5..b13abb6c 100644 --- a/impl/src/utils.rs +++ b/impl/src/utils.rs @@ -882,16 +882,14 @@ fn get_meta_info( attrs: &[Attribute], allowed_attr_params: &[&str], ) -> Result { - let mut it = attrs - .iter() - .filter_map(|m| m.parse_meta().ok()) - .filter(|m| { - m.path() - .segments - .first() - .map(|p| p.ident == trait_attr) - .unwrap_or_default() - }); + let mut it = attrs.iter().filter(|a| { + a.meta + .path() + .segments + .first() + .map(|p| p.ident == trait_attr) + .unwrap_or_default() + }); let mut info = MetaInfo::default(); From 7b35cc42da26639ff10b18669021f82d3fe91405 Mon Sep 17 00:00:00 2001 From: tyranron Date: Thu, 23 Mar 2023 18:25:48 +0200 Subject: [PATCH 3/9] Try fix `utils` parsing --- impl/src/fmt/mod.rs | 2 +- impl/src/utils.rs | 71 +++++++++++++++++++++++++++++---------------- 2 files changed, 47 insertions(+), 26 deletions(-) diff --git a/impl/src/fmt/mod.rs b/impl/src/fmt/mod.rs index 91983444..9d06c941 100644 --- a/impl/src/fmt/mod.rs +++ b/impl/src/fmt/mod.rs @@ -53,7 +53,7 @@ impl BoundsAttribute { } impl Parse for BoundsAttribute { - fn parse(input: ParseStream) -> Result { + fn parse(input: ParseStream<'_>) -> Result { let _ = input.parse::().and_then(|p| { if ["bound", "bounds", "where"] .into_iter() diff --git a/impl/src/utils.rs b/impl/src/utils.rs index b13abb6c..07937f84 100644 --- a/impl/src/utils.rs +++ b/impl/src/utils.rs @@ -7,10 +7,14 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote, ToTokens}; use syn::{ - parse_quote, punctuated::Punctuated, spanned::Spanned, Attribute, Data, - DeriveInput, Error, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, - Generics, Ident, ImplGenerics, Index, Meta, NestedMeta, Result, Token, Type, - TypeGenerics, TypeParamBound, Variant, WhereClause, + ext::IdentExt as _, + parse::{Parse, ParseStream}, + parse_quote, + punctuated::Punctuated, + spanned::Spanned, + token, Attribute, Data, DeriveInput, Error, Field, Fields, FieldsNamed, + FieldsUnnamed, GenericParam, Generics, Ident, ImplGenerics, Index, Meta, Result, + Token, Type, TypeGenerics, TypeParamBound, Variant, WhereClause, }; #[derive(Clone, Copy, Default)] @@ -893,30 +897,30 @@ fn get_meta_info( let mut info = MetaInfo::default(); - let Some(meta) = it.next() else { + let Some(attr) = it.next() else { return Ok(info); }; if allowed_attr_params.is_empty() { - return Err(Error::new(meta.span(), "Attribute is not allowed here")); + return Err(Error::new(attr.span(), "Attribute is not allowed here")); } info.enabled = Some(true); - if let Some(another_meta) = it.next() { + if let Some(another_attr) = it.next() { return Err(Error::new( - another_meta.span(), + another_attr.span(), "Only a single attribute is allowed", )); } - let list = match meta.clone() { + let list = match &attr.meta { Meta::Path(_) => { if allowed_attr_params.contains(&"ignore") { return Ok(info); } else { return Err(Error::new( - meta.span(), + attr.span(), format!( "Empty attribute is not allowed, add one of the following parameters: {}", allowed_attr_params.join(", "), @@ -933,28 +937,23 @@ fn get_meta_info( } }; - parse_punctuated_nested_meta(&mut info, &list.nested, allowed_attr_params, None)?; + parse_punctuated_nested_meta( + &mut info, + &list.parse_args_with(Punctuated::parse_terminated)?, + allowed_attr_params, + None, + )?; Ok(info) } fn parse_punctuated_nested_meta( info: &mut MetaInfo, - meta: &Punctuated, + meta: &Punctuated, allowed_attr_params: &[&str], wrapper_name: Option<&str>, ) -> Result<()> { for meta in meta.iter() { - let meta = match meta { - NestedMeta::Meta(meta) => meta, - NestedMeta::Lit(lit) => { - return Err(Error::new( - lit.span(), - "Attribute doesn't support literals here", - )) - } - }; - match meta { Meta::List(list) if list.path.is_ident("not") => { if wrapper_name.is_some() { @@ -966,7 +965,7 @@ fn parse_punctuated_nested_meta( } parse_punctuated_nested_meta( info, - &list.nested, + &list.parse_args_with(Punctuated::parse_terminated)?, allowed_attr_params, Some("not"), )?; @@ -999,7 +998,9 @@ fn parse_punctuated_nested_meta( | (Some("ref"), "types") | (Some("ref_mut"), "types") => { parse_nested = false; - for meta in &list.nested { + for meta in &list.parse_args_with( + Punctuated::::parse_terminated, + )? { let typ: syn::Type = match meta { NestedMeta::Meta(meta) => { let Meta::Path(path) = meta else { @@ -1063,7 +1064,7 @@ fn parse_punctuated_nested_meta( if parse_nested { parse_punctuated_nested_meta( info, - &list.nested, + &list.parse_args_with(Punctuated::parse_terminated)?, allowed_attr_params, Some(&attr_name), )?; @@ -1118,6 +1119,26 @@ fn parse_punctuated_nested_meta( Ok(()) } +enum NestedMeta { + Meta(Meta), + Lit(syn::Lit), +} + +impl Parse for NestedMeta { + fn parse(input: ParseStream<'_>) -> Result { + if input.peek(syn::Lit) && !(input.peek(syn::LitBool) && input.peek2(Token![=])) + { + input.parse().map(Self::Lit) + } else if input.peek(Ident::peek_any) + || input.peek(Token![::]) && input.peek3(Ident::peek_any) + { + input.parse().map(Self::Meta) + } else { + Err(input.error("expected identifier or literal")) + } + } +} + #[derive(Clone, Debug, Default)] pub struct FullMetaInfo { pub enabled: bool, From de8f7ab0c7ad89fcad33bb96b555a56432fefa18 Mon Sep 17 00:00:00 2001 From: tyranron Date: Thu, 11 May 2023 18:22:08 +0200 Subject: [PATCH 4/9] Borrow `syn::Type` polyfill --- impl/src/lib.rs | 1 + impl/src/parsing.rs | 311 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 312 insertions(+) create mode 100644 impl/src/parsing.rs diff --git a/impl/src/lib.rs b/impl/src/lib.rs index 1760086f..324aa54b 100644 --- a/impl/src/lib.rs +++ b/impl/src/lib.rs @@ -8,6 +8,7 @@ use proc_macro::TokenStream; use syn::parse::Error as ParseError; +pub(crate) mod parsing; mod utils; #[cfg(any(feature = "add_assign", feature = "mul_assign"))] diff --git a/impl/src/parsing.rs b/impl/src/parsing.rs new file mode 100644 index 00000000..92734e90 --- /dev/null +++ b/impl/src/parsing.rs @@ -0,0 +1,311 @@ +//! Common parsing utilities for derive macros. +//! +//! Fair parsing of [`syn::Type`] requires [`syn`]'s `full` feature to be +//! enabled, which unnecessary increases compile times. As we don't have +//! complex AST manipulation, usually requiring only understanding where +//! syntax item begins and ends, simpler manual parsing is implemented. + +use proc_macro2::{Delimiter, TokenStream}; +use quote::ToTokens; +use syn::{ + buffer::Cursor, + parse::{Parse, ParseStream}, + punctuated::Punctuated, + spanned::Spanned as _, + token, Error, Result, +}; + +/// [`syn::Type`] [`Parse`]ing polyfill. +#[derive(Debug)] +pub(crate) enum Type { + /// [`syn::Type::Tuple`] [`Parse`]ing polyfill. + Tuple { + paren: token::Paren, + items: Punctuated, + }, + + /// Every other [`syn::Type`] variant. + Other(TokenStream), +} + +impl Parse for Type { + fn parse(input: ParseStream) -> Result { + input.step(|c| { + let outer = *c; + + if let Some((mut cursor, paren_span, next_item)) = + outer.group(Delimiter::Parenthesis) + { + let mut items = Punctuated::new(); + while !cursor.eof() { + let (stream, c) = Self::parse_other(cursor).ok_or_else(|| { + Error::new(cursor.span(), "failed to parse type") + })?; + items.push_value(stream); + cursor = c; + if let Some((p, c)) = punct(',')(cursor) { + items.push_punct(token::Comma(p.span())); + cursor = c; + } + } + // `(Type)` is equivalent to `Type`, so isn't top-level tuple. + if items.len() == 1 && !items.trailing_punct() { + let stream = outer + .token_tree() + .unwrap_or_else(|| unreachable!()) + .0 + .into_token_stream(); + Ok((Type::Other(stream), next_item)) + } else { + Ok(( + Type::Tuple { + paren: token::Paren(paren_span), + items, + }, + next_item, + )) + } + } else { + Self::parse_other(outer) + .map(|(s, c)| (Self::Other(s), c)) + .ok_or_else(|| Error::new(outer.span(), "failed to parse type")) + } + }) + } +} + +impl ToTokens for Type { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Type::Tuple { paren, items } => { + paren.surround(tokens, |tokens| items.to_tokens(tokens)) + } + Type::Other(other) => other.to_tokens(tokens), + } + } +} + +impl Type { + /// Parses a single [`Type::Other`]. + pub fn parse_other(c: Cursor<'_>) -> Option<(TokenStream, Cursor<'_>)> { + take_until1( + alt([&mut balanced_pair(punct('<'), punct('>')), &mut token_tree]), + punct(','), + )(c) + } +} + +/// Result of parsing. +type ParsingResult<'a> = Option<(TokenStream, Cursor<'a>)>; + +/// Tries to parse a [`Punct`]. +/// +/// [`Punct`]: proc_macro2::Punct +pub fn punct(p: char) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> { + move |c| { + c.punct().and_then(|(punct, c)| { + (punct.as_char() == p).then(|| (punct.into_token_stream(), c)) + }) + } +} + +/// Tries to parse any [`TokenTree`]. +/// +/// [`TokenTree`]: proc_macro2::TokenTree +pub fn token_tree(c: Cursor<'_>) -> ParsingResult<'_> { + c.token_tree().map(|(tt, c)| (tt.into_token_stream(), c)) +} + +/// Parses until balanced amount of `open` and `close` or eof. +/// +/// [`Cursor`] should be pointing **right after** the first `open`ing. +pub fn balanced_pair( + mut open: impl FnMut(Cursor<'_>) -> ParsingResult<'_>, + mut close: impl FnMut(Cursor<'_>) -> ParsingResult<'_>, +) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> { + move |c| { + let (mut out, mut c) = open(c)?; + let mut count = 1; + + while count != 0 { + let (stream, cursor) = if let Some(closing) = close(c) { + count -= 1; + closing + } else if let Some(opening) = open(c) { + count += 1; + opening + } else { + let (tt, c) = c.token_tree()?; + (tt.into_token_stream(), c) + }; + out.extend(stream); + c = cursor; + } + + Some((out, c)) + } +} + +/// Tries to execute the first successful parser of the provided ones. +pub fn alt( + mut parsers: [&mut dyn FnMut(Cursor<'_>) -> ParsingResult<'_>; N], +) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> + '_ { + move |c| { + parsers + .iter_mut() + .find_map(|parser| parser(c).map(|(s, c)| (s, c))) + } +} + +/// Parses with the provided `parser` while `until` fails. Returns [`None`] in +/// case `until` succeeded initially or `parser` never succeeded. Doesn't +/// consume tokens parsed by `until`. +pub fn take_until1( + mut parser: P, + mut until: U, +) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> +where + P: FnMut(Cursor<'_>) -> ParsingResult<'_>, + U: FnMut(Cursor<'_>) -> ParsingResult<'_>, +{ + move |mut cursor| { + let mut out = TokenStream::new(); + let mut parsed = false; + + loop { + if cursor.eof() || until(cursor).is_some() { + return parsed.then_some((out, cursor)); + } + + let (stream, c) = parser(cursor)?; + out.extend(stream); + cursor = c; + parsed = true; + } + } +} + +#[cfg(test)] +mod spec { + use std::{fmt::Debug, str::FromStr}; + + use itertools::Itertools as _; + use proc_macro2::TokenStream; + use quote::ToTokens; + use syn::{ + parse::{Parse, Parser as _}, + punctuated::Punctuated, + token::Comma, + }; + + use super::Type; + + fn assert<'a, T: Debug + Parse + ToTokens>( + input: &'a str, + parsed: impl AsRef<[&'a str]>, + ) { + let parsed = parsed.as_ref(); + let punctuated = Punctuated::::parse_terminated + .parse2(TokenStream::from_str(input).unwrap()) + .unwrap(); + + assert_eq!( + parsed.len(), + punctuated.len(), + "Wrong length\n\ + Expected: {parsed:?}\n\ + Found: {punctuated:?}", + ); + + punctuated + .iter() + .map(|ty| ty.to_token_stream().to_string()) + .zip(parsed) + .enumerate() + .for_each(|(i, (found, expected))| { + assert_eq!( + *expected, &found, + "Mismatch at index {i}\n\ + Expected: {parsed:?}\n\ + Found: {punctuated:?}", + ); + }); + } + + mod type_and_tuple { + use super::*; + + #[test] + fn zst_is_tuple() { + let zst = "()"; + match syn::parse_str::(zst).unwrap() { + Type::Tuple { items, .. } => { + assert!(items.is_empty(), "Expected empty tuple, found: {items:?}"); + } + other => panic!("Expected `Type::Tuple {{ .. }}`, found: {other:?}"), + } + } + + #[test] + fn group_not_tuple() { + let group = "(Type)"; + match syn::parse_str::(group).unwrap() { + Type::Other(tokens) => { + assert_eq!(tokens.to_string(), group); + } + tuple => panic!("Expected `Type::Other(_)`, found: {tuple:?}"), + } + } + + #[test] + fn single_element_tuple() { + let tuple = "(Type,)"; + match syn::parse_str::(tuple).unwrap() { + Type::Tuple { items, .. } => { + assert_eq!( + items.len(), + 1, + "Expected empty tuple, found: {items:?}", + ); + assert_eq!(items.first().unwrap().to_string(), "Type"); + } + other => panic!("Expected `Type::Tuple {{ .. }}`, found: {other:?}"), + } + } + + #[test] + fn cases() { + let cases = [ + "[Type ; 3]", + "fn (usize) -> bool", + "for <'a > fn (&'a usize) -> bool", + "(Type)", + "path :: to :: Type", + "path :: to :: Generic < Type >", + "< Type as Trait >:: Assoc", + "< Type as Trait >:: Assoc < GAT >", + "* const ()", + "* mut ()", + "& i32", + "&'static str", + "& [str]", + "dyn Trait", + "dyn Trait + Send", + "()", + "(Type ,)", + "(Type , Type)", + "(Type , Type ,)", + ]; + + assert::("", []); + for i in 1..4 { + for permutations in cases.into_iter().permutations(i) { + let mut input = permutations.join(","); + assert::(&input, &permutations); + input.push(','); + assert::(&input, &permutations); + } + } + } + } +} From cc5ea314e41932fbc421e2ca6ebc7e7fcaaf830d Mon Sep 17 00:00:00 2001 From: tyranron Date: Thu, 11 May 2023 19:59:06 +0200 Subject: [PATCH 5/9] Polyfill old `syn::Meta` for nested parsing in `utils` module --- impl/src/utils.rs | 212 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 170 insertions(+), 42 deletions(-) diff --git a/impl/src/utils.rs b/impl/src/utils.rs index 07937f84..89263781 100644 --- a/impl/src/utils.rs +++ b/impl/src/utils.rs @@ -7,14 +7,10 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote, ToTokens}; use syn::{ - ext::IdentExt as _, - parse::{Parse, ParseStream}, - parse_quote, - punctuated::Punctuated, - spanned::Spanned, - token, Attribute, Data, DeriveInput, Error, Field, Fields, FieldsNamed, - FieldsUnnamed, GenericParam, Generics, Ident, ImplGenerics, Index, Meta, Result, - Token, Type, TypeGenerics, TypeParamBound, Variant, WhereClause, + parse_quote, punctuated::Punctuated, spanned::Spanned, token, Attribute, Data, + DeriveInput, Error, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, + Generics, Ident, ImplGenerics, Index, Result, Token, Type, TypeGenerics, + TypeParamBound, Variant, WhereClause, }; #[derive(Clone, Copy, Default)] @@ -915,7 +911,7 @@ fn get_meta_info( } let list = match &attr.meta { - Meta::Path(_) => { + syn::Meta::Path(_) => { if allowed_attr_params.contains(&"ignore") { return Ok(info); } else { @@ -928,8 +924,8 @@ fn get_meta_info( )); } } - Meta::List(list) => list, - Meta::NameValue(val) => { + syn::Meta::List(list) => list, + syn::Meta::NameValue(val) => { return Err(Error::new( val.span(), "Attribute doesn't support name-value format here", @@ -949,13 +945,13 @@ fn get_meta_info( fn parse_punctuated_nested_meta( info: &mut MetaInfo, - meta: &Punctuated, + meta: &Punctuated, allowed_attr_params: &[&str], wrapper_name: Option<&str>, ) -> Result<()> { for meta in meta.iter() { match meta { - Meta::List(list) if list.path.is_ident("not") => { + polyfill::Meta::List(list) if list.path.is_ident("not") => { if wrapper_name.is_some() { // Only single top-level `not` attribute is allowed. return Err(Error::new( @@ -971,7 +967,7 @@ fn parse_punctuated_nested_meta( )?; } - Meta::List(list) => { + polyfill::Meta::List(list) => { let path = &list.path; if !allowed_attr_params.iter().any(|param| path.is_ident(param)) { return Err(Error::new( @@ -999,11 +995,11 @@ fn parse_punctuated_nested_meta( | (Some("ref_mut"), "types") => { parse_nested = false; for meta in &list.parse_args_with( - Punctuated::::parse_terminated, + Punctuated::::parse_terminated, )? { let typ: syn::Type = match meta { - NestedMeta::Meta(meta) => { - let Meta::Path(path) = meta else { + polyfill::NestedMeta::Meta(meta) => { + let polyfill::Meta::Path(path) = meta else { return Err(Error::new( meta.span(), format!( @@ -1014,12 +1010,12 @@ fn parse_punctuated_nested_meta( }; syn::TypePath { qself: None, - path: path.clone(), + path: path.clone().into(), } .into() } - NestedMeta::Lit(syn::Lit::Str(s)) => s.parse()?, - NestedMeta::Lit(lit) => return Err(Error::new( + polyfill::NestedMeta::Lit(syn::Lit::Str(s)) => s.parse()?, + polyfill::NestedMeta::Lit(lit) => return Err(Error::new( lit.span(), "Attribute doesn't support nested literals here", )), @@ -1071,7 +1067,7 @@ fn parse_punctuated_nested_meta( } } - Meta::Path(path) => { + polyfill::Meta::Path(path) => { if !allowed_attr_params.iter().any(|param| path.is_ident(param)) { return Err(Error::new( meta.span(), @@ -1106,35 +1102,167 @@ fn parse_punctuated_nested_meta( } } } - - Meta::NameValue(val) => { - return Err(Error::new( - val.span(), - "Attribute doesn't support name-value parameters here", - )) - } } } Ok(()) } -enum NestedMeta { - Meta(Meta), - Lit(syn::Lit), -} +// TODO: Remove this eventually, once all macros migrate to +// custom typed attributes parsing. +/// Polyfill for [`syn`] 1.x AST. +mod polyfill { + use proc_macro2::TokenStream; + use quote::ToTokens; + use syn::{ + ext::IdentExt as _, + parse::{Parse, ParseStream, Parser}, + token, Token, + }; -impl Parse for NestedMeta { - fn parse(input: ParseStream<'_>) -> Result { - if input.peek(syn::Lit) && !(input.peek(syn::LitBool) && input.peek2(Token![=])) - { - input.parse().map(Self::Lit) - } else if input.peek(Ident::peek_any) - || input.peek(Token![::]) && input.peek3(Ident::peek_any) + #[derive(Clone, Debug)] + pub(super) enum PathOrKeyword { + Path(syn::Path), + Keyword(syn::Ident), + } + + impl Parse for PathOrKeyword { + fn parse(input: ParseStream<'_>) -> syn::Result { + if input.fork().parse::().is_ok() { + return input.parse().map(Self::Path); + } + syn::Ident::parse_any(input).map(Self::Keyword) + } + } + + impl ToTokens for PathOrKeyword { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Path(p) => p.to_tokens(tokens), + Self::Keyword(i) => i.to_tokens(tokens), + } + } + } + + impl PathOrKeyword { + pub(super) fn is_ident(&self, ident: &I) -> bool + where + syn::Ident: PartialEq, { - input.parse().map(Self::Meta) - } else { - Err(input.error("expected identifier or literal")) + match self { + Self::Path(p) => p.is_ident(ident), + Self::Keyword(i) => i == ident, + } + } + + pub fn get_ident(&self) -> Option<&syn::Ident> { + match self { + Self::Path(p) => p.get_ident(), + Self::Keyword(i) => Some(i), + } + } + } + + impl From for syn::Path { + fn from(p: PathOrKeyword) -> Self { + match p { + PathOrKeyword::Path(p) => p, + PathOrKeyword::Keyword(i) => i.into(), + } + } + } + + #[derive(Clone, Debug)] + pub(super) struct MetaList { + pub(super) path: PathOrKeyword, + pub(super) tokens: TokenStream, + } + + impl Parse for MetaList { + fn parse(input: ParseStream<'_>) -> syn::Result { + let path = input.parse::()?; + let tokens; + _ = syn::parenthesized!(tokens in input); + Ok(Self { + path, + tokens: tokens.parse()?, + }) + } + } + + impl ToTokens for MetaList { + fn to_tokens(&self, tokens: &mut TokenStream) { + self.path.to_tokens(tokens); + token::Paren::default() + .surround(tokens, |tokens| self.tokens.to_tokens(tokens)) + } + } + + impl MetaList { + pub fn parse_args_with(&self, parser: F) -> syn::Result { + parser.parse2(self.tokens.clone()) + } + } + + #[derive(Clone, Debug)] + pub(super) enum Meta { + Path(PathOrKeyword), + List(MetaList), + } + + impl Parse for Meta { + fn parse(input: ParseStream<'_>) -> syn::Result { + let path = input.parse::()?; + Ok(if input.peek(token::Paren) { + let tokens; + _ = syn::parenthesized!(tokens in input); + Self::List(MetaList { + path, + tokens: tokens.parse()?, + }) + } else { + Self::Path(path) + }) + } + } + + impl ToTokens for Meta { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Path(p) => p.to_tokens(tokens), + Self::List(l) => l.to_tokens(tokens), + } + } + } + + #[derive(Clone, Debug)] + pub(super) enum NestedMeta { + Meta(Meta), + Lit(syn::Lit), + } + + impl Parse for NestedMeta { + fn parse(input: ParseStream<'_>) -> syn::Result { + if input.peek(syn::Lit) + && !(input.peek(syn::LitBool) && input.peek2(Token![=])) + { + input.parse().map(Self::Lit) + } else if input.peek(syn::Ident::peek_any) + || input.peek(Token![::]) && input.peek3(syn::Ident::peek_any) + { + input.parse().map(Self::Meta) + } else { + Err(input.error("expected identifier or literal")) + } + } + } + + impl ToTokens for NestedMeta { + fn to_tokens(&self, tokens: &mut TokenStream) { + match self { + Self::Meta(m) => m.to_tokens(tokens), + Self::Lit(l) => l.to_tokens(tokens), + } } } } From f86b51839b77ec44c658298ad34d4fc5d687006d Mon Sep 17 00:00:00 2001 From: tyranron Date: Thu, 11 May 2023 19:59:24 +0200 Subject: [PATCH 6/9] Revert "Borrow `syn::Type` polyfill" This reverts commit de8f7ab0c7ad89fcad33bb96b555a56432fefa18. --- impl/src/lib.rs | 1 - impl/src/parsing.rs | 311 -------------------------------------------- 2 files changed, 312 deletions(-) delete mode 100644 impl/src/parsing.rs diff --git a/impl/src/lib.rs b/impl/src/lib.rs index 324aa54b..1760086f 100644 --- a/impl/src/lib.rs +++ b/impl/src/lib.rs @@ -8,7 +8,6 @@ use proc_macro::TokenStream; use syn::parse::Error as ParseError; -pub(crate) mod parsing; mod utils; #[cfg(any(feature = "add_assign", feature = "mul_assign"))] diff --git a/impl/src/parsing.rs b/impl/src/parsing.rs deleted file mode 100644 index 92734e90..00000000 --- a/impl/src/parsing.rs +++ /dev/null @@ -1,311 +0,0 @@ -//! Common parsing utilities for derive macros. -//! -//! Fair parsing of [`syn::Type`] requires [`syn`]'s `full` feature to be -//! enabled, which unnecessary increases compile times. As we don't have -//! complex AST manipulation, usually requiring only understanding where -//! syntax item begins and ends, simpler manual parsing is implemented. - -use proc_macro2::{Delimiter, TokenStream}; -use quote::ToTokens; -use syn::{ - buffer::Cursor, - parse::{Parse, ParseStream}, - punctuated::Punctuated, - spanned::Spanned as _, - token, Error, Result, -}; - -/// [`syn::Type`] [`Parse`]ing polyfill. -#[derive(Debug)] -pub(crate) enum Type { - /// [`syn::Type::Tuple`] [`Parse`]ing polyfill. - Tuple { - paren: token::Paren, - items: Punctuated, - }, - - /// Every other [`syn::Type`] variant. - Other(TokenStream), -} - -impl Parse for Type { - fn parse(input: ParseStream) -> Result { - input.step(|c| { - let outer = *c; - - if let Some((mut cursor, paren_span, next_item)) = - outer.group(Delimiter::Parenthesis) - { - let mut items = Punctuated::new(); - while !cursor.eof() { - let (stream, c) = Self::parse_other(cursor).ok_or_else(|| { - Error::new(cursor.span(), "failed to parse type") - })?; - items.push_value(stream); - cursor = c; - if let Some((p, c)) = punct(',')(cursor) { - items.push_punct(token::Comma(p.span())); - cursor = c; - } - } - // `(Type)` is equivalent to `Type`, so isn't top-level tuple. - if items.len() == 1 && !items.trailing_punct() { - let stream = outer - .token_tree() - .unwrap_or_else(|| unreachable!()) - .0 - .into_token_stream(); - Ok((Type::Other(stream), next_item)) - } else { - Ok(( - Type::Tuple { - paren: token::Paren(paren_span), - items, - }, - next_item, - )) - } - } else { - Self::parse_other(outer) - .map(|(s, c)| (Self::Other(s), c)) - .ok_or_else(|| Error::new(outer.span(), "failed to parse type")) - } - }) - } -} - -impl ToTokens for Type { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - Type::Tuple { paren, items } => { - paren.surround(tokens, |tokens| items.to_tokens(tokens)) - } - Type::Other(other) => other.to_tokens(tokens), - } - } -} - -impl Type { - /// Parses a single [`Type::Other`]. - pub fn parse_other(c: Cursor<'_>) -> Option<(TokenStream, Cursor<'_>)> { - take_until1( - alt([&mut balanced_pair(punct('<'), punct('>')), &mut token_tree]), - punct(','), - )(c) - } -} - -/// Result of parsing. -type ParsingResult<'a> = Option<(TokenStream, Cursor<'a>)>; - -/// Tries to parse a [`Punct`]. -/// -/// [`Punct`]: proc_macro2::Punct -pub fn punct(p: char) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> { - move |c| { - c.punct().and_then(|(punct, c)| { - (punct.as_char() == p).then(|| (punct.into_token_stream(), c)) - }) - } -} - -/// Tries to parse any [`TokenTree`]. -/// -/// [`TokenTree`]: proc_macro2::TokenTree -pub fn token_tree(c: Cursor<'_>) -> ParsingResult<'_> { - c.token_tree().map(|(tt, c)| (tt.into_token_stream(), c)) -} - -/// Parses until balanced amount of `open` and `close` or eof. -/// -/// [`Cursor`] should be pointing **right after** the first `open`ing. -pub fn balanced_pair( - mut open: impl FnMut(Cursor<'_>) -> ParsingResult<'_>, - mut close: impl FnMut(Cursor<'_>) -> ParsingResult<'_>, -) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> { - move |c| { - let (mut out, mut c) = open(c)?; - let mut count = 1; - - while count != 0 { - let (stream, cursor) = if let Some(closing) = close(c) { - count -= 1; - closing - } else if let Some(opening) = open(c) { - count += 1; - opening - } else { - let (tt, c) = c.token_tree()?; - (tt.into_token_stream(), c) - }; - out.extend(stream); - c = cursor; - } - - Some((out, c)) - } -} - -/// Tries to execute the first successful parser of the provided ones. -pub fn alt( - mut parsers: [&mut dyn FnMut(Cursor<'_>) -> ParsingResult<'_>; N], -) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> + '_ { - move |c| { - parsers - .iter_mut() - .find_map(|parser| parser(c).map(|(s, c)| (s, c))) - } -} - -/// Parses with the provided `parser` while `until` fails. Returns [`None`] in -/// case `until` succeeded initially or `parser` never succeeded. Doesn't -/// consume tokens parsed by `until`. -pub fn take_until1( - mut parser: P, - mut until: U, -) -> impl FnMut(Cursor<'_>) -> ParsingResult<'_> -where - P: FnMut(Cursor<'_>) -> ParsingResult<'_>, - U: FnMut(Cursor<'_>) -> ParsingResult<'_>, -{ - move |mut cursor| { - let mut out = TokenStream::new(); - let mut parsed = false; - - loop { - if cursor.eof() || until(cursor).is_some() { - return parsed.then_some((out, cursor)); - } - - let (stream, c) = parser(cursor)?; - out.extend(stream); - cursor = c; - parsed = true; - } - } -} - -#[cfg(test)] -mod spec { - use std::{fmt::Debug, str::FromStr}; - - use itertools::Itertools as _; - use proc_macro2::TokenStream; - use quote::ToTokens; - use syn::{ - parse::{Parse, Parser as _}, - punctuated::Punctuated, - token::Comma, - }; - - use super::Type; - - fn assert<'a, T: Debug + Parse + ToTokens>( - input: &'a str, - parsed: impl AsRef<[&'a str]>, - ) { - let parsed = parsed.as_ref(); - let punctuated = Punctuated::::parse_terminated - .parse2(TokenStream::from_str(input).unwrap()) - .unwrap(); - - assert_eq!( - parsed.len(), - punctuated.len(), - "Wrong length\n\ - Expected: {parsed:?}\n\ - Found: {punctuated:?}", - ); - - punctuated - .iter() - .map(|ty| ty.to_token_stream().to_string()) - .zip(parsed) - .enumerate() - .for_each(|(i, (found, expected))| { - assert_eq!( - *expected, &found, - "Mismatch at index {i}\n\ - Expected: {parsed:?}\n\ - Found: {punctuated:?}", - ); - }); - } - - mod type_and_tuple { - use super::*; - - #[test] - fn zst_is_tuple() { - let zst = "()"; - match syn::parse_str::(zst).unwrap() { - Type::Tuple { items, .. } => { - assert!(items.is_empty(), "Expected empty tuple, found: {items:?}"); - } - other => panic!("Expected `Type::Tuple {{ .. }}`, found: {other:?}"), - } - } - - #[test] - fn group_not_tuple() { - let group = "(Type)"; - match syn::parse_str::(group).unwrap() { - Type::Other(tokens) => { - assert_eq!(tokens.to_string(), group); - } - tuple => panic!("Expected `Type::Other(_)`, found: {tuple:?}"), - } - } - - #[test] - fn single_element_tuple() { - let tuple = "(Type,)"; - match syn::parse_str::(tuple).unwrap() { - Type::Tuple { items, .. } => { - assert_eq!( - items.len(), - 1, - "Expected empty tuple, found: {items:?}", - ); - assert_eq!(items.first().unwrap().to_string(), "Type"); - } - other => panic!("Expected `Type::Tuple {{ .. }}`, found: {other:?}"), - } - } - - #[test] - fn cases() { - let cases = [ - "[Type ; 3]", - "fn (usize) -> bool", - "for <'a > fn (&'a usize) -> bool", - "(Type)", - "path :: to :: Type", - "path :: to :: Generic < Type >", - "< Type as Trait >:: Assoc", - "< Type as Trait >:: Assoc < GAT >", - "* const ()", - "* mut ()", - "& i32", - "&'static str", - "& [str]", - "dyn Trait", - "dyn Trait + Send", - "()", - "(Type ,)", - "(Type , Type)", - "(Type , Type ,)", - ]; - - assert::("", []); - for i in 1..4 { - for permutations in cases.into_iter().permutations(i) { - let mut input = permutations.join(","); - assert::(&input, &permutations); - input.push(','); - assert::(&input, &permutations); - } - } - } - } -} From 08555bcc88a589943b71b2bf2db992ded25b2881 Mon Sep 17 00:00:00 2001 From: tyranron Date: Fri, 12 May 2023 19:37:11 +0200 Subject: [PATCH 7/9] Fix `fmt` and `bound` attributes parsing --- impl/src/fmt/debug.rs | 27 +++++-------------- impl/src/fmt/display.rs | 17 ++++-------- impl/src/fmt/mod.rs | 14 +++++----- .../debug/legacy_bound_syntax.stderr | 4 +-- .../debug/legacy_fmt_syntax.stderr | 4 +-- .../display/legacy_bound_syntax.stderr | 4 +-- .../display/legacy_fmt_syntax.stderr | 4 +-- 7 files changed, 25 insertions(+), 49 deletions(-) diff --git a/impl/src/fmt/debug.rs b/impl/src/fmt/debug.rs index ff884dc9..86e823e3 100644 --- a/impl/src/fmt/debug.rs +++ b/impl/src/fmt/debug.rs @@ -165,16 +165,9 @@ impl ContainerAttributes { impl Parse for ContainerAttributes { fn parse(input: ParseStream) -> Result { - use proc_macro2::Delimiter::Parenthesis; + BoundsAttribute::check_legacy_fmt(input)?; - let error_span = input.cursor().group(Parenthesis).map(|(_, span, _)| span); - let content; - syn::parenthesized!(content in input); - let error_span = error_span.unwrap_or_else(|| unreachable!()); - - BoundsAttribute::check_legacy_fmt(&content, error_span.span())?; - - content.parse().map(|bounds| ContainerAttributes { bounds }) + input.parse().map(|bounds| ContainerAttributes { bounds }) } } @@ -220,19 +213,12 @@ impl FieldAttribute { impl Parse for FieldAttribute { fn parse(input: ParseStream) -> Result { - use proc_macro2::Delimiter::Parenthesis; - - let error_span = input.cursor().group(Parenthesis).map(|(_, span, _)| span); - let content; - syn::parenthesized!(content in input); - let error_span = error_span.unwrap_or_else(|| unreachable!()); + FmtAttribute::check_legacy_fmt(input)?; - FmtAttribute::check_legacy_fmt(&content, error_span.span())?; - - if content.peek(syn::LitStr) { - content.parse().map(Self::Fmt) + if input.peek(syn::LitStr) { + input.parse().map(Self::Fmt) } else { - let _ = content.parse::().and_then(|p| { + let _ = input.parse::().and_then(|p| { if ["skip", "ignore"].into_iter().any(|i| p.is_ident(i)) { Ok(p) } else { @@ -242,7 +228,6 @@ impl Parse for FieldAttribute { )) } })?; - Ok(Self::Skip) } } diff --git a/impl/src/fmt/display.rs b/impl/src/fmt/display.rs index 26229533..0e9983a5 100644 --- a/impl/src/fmt/display.rs +++ b/impl/src/fmt/display.rs @@ -252,20 +252,13 @@ enum Attribute { impl Parse for Attribute { fn parse(input: ParseStream<'_>) -> Result { - use proc_macro2::Delimiter::Parenthesis; + BoundsAttribute::check_legacy_fmt(input)?; + FmtAttribute::check_legacy_fmt(input)?; - let error_span = input.cursor().group(Parenthesis).map(|(_, span, _)| span); - let content; - syn::parenthesized!(content in input); - let error_span = error_span.unwrap_or_else(|| unreachable!()); - - BoundsAttribute::check_legacy_fmt(&content, error_span.span())?; - FmtAttribute::check_legacy_fmt(&content, error_span.span())?; - - if content.peek(syn::LitStr) { - content.parse().map(Attribute::Fmt) + if input.peek(syn::LitStr) { + input.parse().map(Attribute::Fmt) } else { - content.parse().map(Attribute::Bounds) + input.parse().map(Attribute::Bounds) } } } diff --git a/impl/src/fmt/mod.rs b/impl/src/fmt/mod.rs index 9d06c941..41e57547 100644 --- a/impl/src/fmt/mod.rs +++ b/impl/src/fmt/mod.rs @@ -10,11 +10,11 @@ mod parsing; use std::{iter, mem}; -use proc_macro2::{Ident, Span, TokenStream, TokenTree}; +use proc_macro2::{Ident, TokenStream, TokenTree}; use quote::ToTokens; use syn::{ buffer::Cursor, - parse::{Parse, ParseBuffer, ParseStream}, + parse::{Parse, ParseStream}, punctuated::Punctuated, spanned::Spanned as _, token, Error, Result, @@ -26,13 +26,12 @@ struct BoundsAttribute(Punctuated); impl BoundsAttribute { /// Errors in case legacy syntax is encountered: `bound = "..."`. - fn check_legacy_fmt(input: &ParseBuffer<'_>, error_span: Span) -> Result<()> { + fn check_legacy_fmt(input: ParseStream<'_>) -> Result<()> { let fork = input.fork(); let path = fork .parse::() .and_then(|path| fork.parse::().map(|_| path)); - match path { Ok(path) if path.is_ident("bound") => fork .parse::() @@ -43,7 +42,7 @@ impl BoundsAttribute { }) .map_or(Ok(()), |bound| { Err(Error::new( - error_span, + input.span(), format!("legacy syntax, use `bound({bound})` instead"), )) }), @@ -130,13 +129,12 @@ impl FmtAttribute { } /// Errors in case legacy syntax is encountered: `fmt = "...", (arg),*`. - fn check_legacy_fmt(input: &ParseBuffer<'_>, error_span: Span) -> Result<()> { + fn check_legacy_fmt(input: ParseStream<'_>) -> Result<()> { let fork = input.fork(); let path = fork .parse::() .and_then(|path| fork.parse::().map(|_| path)); - match path { Ok(path) if path.is_ident("fmt") => (|| { let args = fork @@ -157,7 +155,7 @@ impl FmtAttribute { })() .map_or(Ok(()), |fmt| { Err(Error::new( - error_span, + input.span(), format!( "legacy syntax, remove `fmt =` and use `{}` instead", fmt.join(", "), diff --git a/tests/compile_fail/debug/legacy_bound_syntax.stderr b/tests/compile_fail/debug/legacy_bound_syntax.stderr index a182fcfd..e1ae3a94 100644 --- a/tests/compile_fail/debug/legacy_bound_syntax.stderr +++ b/tests/compile_fail/debug/legacy_bound_syntax.stderr @@ -1,5 +1,5 @@ error: legacy syntax, use `bound(String: std::fmt::Display)` instead - --> tests/compile_fail/debug/legacy_bound_syntax.rs:2:8 + --> tests/compile_fail/debug/legacy_bound_syntax.rs:2:9 | 2 | #[debug(bound = "String: std::fmt::Display")] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^ diff --git a/tests/compile_fail/debug/legacy_fmt_syntax.stderr b/tests/compile_fail/debug/legacy_fmt_syntax.stderr index 57e5c78f..30b9b1e4 100644 --- a/tests/compile_fail/debug/legacy_fmt_syntax.stderr +++ b/tests/compile_fail/debug/legacy_fmt_syntax.stderr @@ -1,5 +1,5 @@ error: legacy syntax, remove `fmt =` and use `"Stuff({}): {}", bar` instead - --> tests/compile_fail/debug/legacy_fmt_syntax.rs:3:12 + --> tests/compile_fail/debug/legacy_fmt_syntax.rs:3:13 | 3 | #[debug(fmt = "Stuff({}): {}", "bar")] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^ diff --git a/tests/compile_fail/display/legacy_bound_syntax.stderr b/tests/compile_fail/display/legacy_bound_syntax.stderr index 55b724c0..d96236b0 100644 --- a/tests/compile_fail/display/legacy_bound_syntax.stderr +++ b/tests/compile_fail/display/legacy_bound_syntax.stderr @@ -1,5 +1,5 @@ error: legacy syntax, use `bound(String: std::fmt::Display)` instead - --> tests/compile_fail/display/legacy_bound_syntax.rs:2:10 + --> tests/compile_fail/display/legacy_bound_syntax.rs:2:11 | 2 | #[display(bound = "String: std::fmt::Display")] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^^^ diff --git a/tests/compile_fail/display/legacy_fmt_syntax.stderr b/tests/compile_fail/display/legacy_fmt_syntax.stderr index 68e427b8..f533747d 100644 --- a/tests/compile_fail/display/legacy_fmt_syntax.stderr +++ b/tests/compile_fail/display/legacy_fmt_syntax.stderr @@ -1,5 +1,5 @@ error: legacy syntax, remove `fmt =` and use `"Stuff({}): {}", bar` instead - --> tests/compile_fail/display/legacy_fmt_syntax.rs:2:10 + --> tests/compile_fail/display/legacy_fmt_syntax.rs:2:11 | 2 | #[display(fmt = "Stuff({}): {}", "bar")] - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | ^^^ From 20f4cc2d5a94c3bd968f7d4360245cbaccbb121b Mon Sep 17 00:00:00 2001 From: tyranron Date: Fri, 12 May 2023 19:46:42 +0200 Subject: [PATCH 8/9] Fix --- impl/src/utils.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/impl/src/utils.rs b/impl/src/utils.rs index 89263781..3dd24ce3 100644 --- a/impl/src/utils.rs +++ b/impl/src/utils.rs @@ -7,7 +7,7 @@ use proc_macro2::TokenStream; use quote::{format_ident, quote, ToTokens}; use syn::{ - parse_quote, punctuated::Punctuated, spanned::Spanned, token, Attribute, Data, + parse_quote, punctuated::Punctuated, spanned::Spanned, Attribute, Data, DeriveInput, Error, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics, Ident, ImplGenerics, Index, Result, Token, Type, TypeGenerics, TypeParamBound, Variant, WhereClause, @@ -995,7 +995,7 @@ fn parse_punctuated_nested_meta( | (Some("ref_mut"), "types") => { parse_nested = false; for meta in &list.parse_args_with( - Punctuated::::parse_terminated, + Punctuated::::parse_terminated, )? { let typ: syn::Type = match meta { polyfill::NestedMeta::Meta(meta) => { @@ -1120,7 +1120,7 @@ mod polyfill { token, Token, }; - #[derive(Clone, Debug)] + #[derive(Clone)] pub(super) enum PathOrKeyword { Path(syn::Path), Keyword(syn::Ident), @@ -1172,7 +1172,7 @@ mod polyfill { } } - #[derive(Clone, Debug)] + #[derive(Clone)] pub(super) struct MetaList { pub(super) path: PathOrKeyword, pub(super) tokens: TokenStream, @@ -1204,7 +1204,7 @@ mod polyfill { } } - #[derive(Clone, Debug)] + #[derive(Clone)] pub(super) enum Meta { Path(PathOrKeyword), List(MetaList), @@ -1235,7 +1235,7 @@ mod polyfill { } } - #[derive(Clone, Debug)] + #[derive(Clone)] pub(super) enum NestedMeta { Meta(Meta), Lit(syn::Lit), From 40babb3c9a797a7acc3f8af5fad4e7f713d3c9f4 Mon Sep 17 00:00:00 2001 From: tyranron Date: Fri, 12 May 2023 19:49:39 +0200 Subject: [PATCH 9/9] Mention in CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 50a206bf..554f3ed4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -53,6 +53,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/). - `#[automatically_derived]` is now emitted from all macro expansions. This should prevent code style linters from attempting to modify the generated code. +- Upgrade to `syn` 2.0. ### Fixed