Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into aj-skip-type-params
Browse files Browse the repository at this point in the history
  • Loading branch information
ascjones committed Jun 23, 2021
2 parents 271c2b5 + 11c730f commit a7e1e6b
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 36 deletions.
1 change: 1 addition & 0 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ jobs:
- name: check-features
run: |
cargo check --no-default-features --features bit-vec
cargo check --no-default-features --features docs
cargo check --no-default-features --features serde
cargo check --no-default-features --features serde,decode
Expand Down
6 changes: 5 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,18 @@ derive_more = { version = "0.99.1", default-features = false, features = ["from"
scale = { package = "parity-scale-codec", version = "2.1", default-features = false, features = ["derive"] }

[features]
default = ["std"]
default = ["std", "docs"]
std = [
"bitvec/std",
"scale/std",
]
derive = [
"scale-info-derive"
]
# Include rustdoc strings in the type metadata.
docs = [
"scale-info-derive/docs"
]
# enables decoding and deserialization of portable scale-info type metadata
decode = [
"scale/full"
Expand Down
5 changes: 5 additions & 0 deletions derive/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,8 @@ quote = "1.0"
syn = { version = "1.0", features = ["derive", "visit", "visit-mut", "extra-traits"] }
proc-macro2 = "1.0"
proc-macro-crate = "1"

[features]
default = ["docs"]
# Include code docs in type metadata.
docs = []
48 changes: 40 additions & 8 deletions derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ fn generate_type(input: TokenStream2) -> Result<TokenStream2> {
Data::Enum(ref e) => generate_variant_type(e, &scale_info),
Data::Union(_) => return Err(Error::new_spanned(input, "Unions not supported")),
};
let docs = utils::get_doc_literals(&ast.attrs);
let docs = generate_docs(&ast.attrs);

let type_info_impl = quote! {
impl #impl_generics :: #scale_info ::TypeInfo for #ident #ty_generics #where_clause {
Expand All @@ -114,7 +114,7 @@ fn generate_type(input: TokenStream2) -> Result<TokenStream2> {
:: #scale_info ::Type::builder()
.path(:: #scale_info ::Path::new(::core::stringify!(#ident), ::core::module_path!()))
.type_params(:: #scale_info ::prelude::vec![ #( #type_params_meta_types ),* ])
.docs(&[ #( #docs ),* ])
#docs
.#build_type
}
}
Expand Down Expand Up @@ -161,7 +161,7 @@ fn generate_fields(fields: &FieldsList) -> Vec<TokenStream2> {
StaticLifetimesReplace.visit_type_mut(&mut ty);

let type_name = clean_type_string(&quote!(#ty).to_string());
let docs = utils::get_doc_literals(&f.attrs);
let docs = generate_docs(&f.attrs);
let type_of_method = if utils::is_compact(f) {
quote!(compact)
} else {
Expand All @@ -177,7 +177,7 @@ fn generate_fields(fields: &FieldsList) -> Vec<TokenStream2> {
.#type_of_method::<#ty>()
#name
.type_name(#type_name)
.docs(&[ #( #docs ),* ])
#docs
)
)
})
Expand Down Expand Up @@ -236,12 +236,12 @@ fn generate_c_like_enum_def(variants: &VariantList, scale_info: &Ident) -> Token
.map(|(i, v)| {
let name = &v.ident;
let discriminant = utils::variant_index(v, i);
let docs = utils::get_doc_literals(&v.attrs);
let docs = generate_docs(&v.attrs);
quote! {
.variant(::core::stringify!(#name), |v|
v
.discriminant(#discriminant as ::core::primitive::u64)
.docs(&[ #( #docs ),* ])
#docs
)
}
});
Expand Down Expand Up @@ -273,7 +273,7 @@ fn generate_variant_type(data_enum: &DataEnum, scale_info: &Ident) -> TokenStrea
.map(|v| {
let ident = &v.ident;
let v_name = quote! {::core::stringify!(#ident) };
let docs = utils::get_doc_literals(&v.attrs);
let docs = generate_docs(&v.attrs);
let index = utils::maybe_index(v).map(|i| quote!(.index(#i)));

let fields = match v.fields {
Expand Down Expand Up @@ -302,7 +302,7 @@ fn generate_variant_type(data_enum: &DataEnum, scale_info: &Ident) -> TokenStrea
.variant(#v_name, |v|
v
.fields(#fields)
.docs(&[ #( #docs ),* ])
#docs
#index
)
}
Expand All @@ -314,3 +314,35 @@ fn generate_variant_type(data_enum: &DataEnum, scale_info: &Ident) -> TokenStrea
)
}
}

#[cfg(feature = "docs")]
fn generate_docs(attrs: &[syn::Attribute]) -> Option<TokenStream2> {
let docs = attrs
.iter()
.filter_map(|attr| {
if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() {
if meta.path.get_ident().map_or(false, |ident| ident == "doc") {
let lit = &meta.lit;
let doc_lit = quote!(#lit).to_string();
let trimmed_doc_lit =
doc_lit.trim_start_matches(r#"" "#).trim_end_matches('"');
let lit: syn::Lit = parse_quote!(#trimmed_doc_lit);
Some(lit)
} else {
None
}
} else {
None
}
})
.collect::<Vec<_>>();

Some(quote! {
.docs(&[ #( #docs ),* ])
})
}

#[cfg(not(feature = "docs"))]
fn generate_docs(_: &[syn::Attribute]) -> Option<TokenStream2> {
None
}
27 changes: 0 additions & 27 deletions derive/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,10 @@
//!
//! NOTE: The code here is copied verbatim from `parity-scale-codec-derive`.
use alloc::{
string::ToString,
vec::Vec,
};
use proc_macro2::TokenStream;
use quote::quote;
use syn::{
parse::Parse,
parse_quote,
spanned::Spanned,
AttrStyle,
Attribute,
Expand All @@ -34,28 +29,6 @@ use syn::{
Variant,
};

/// Return all doc attributes literals found.
pub fn get_doc_literals(attrs: &[syn::Attribute]) -> Vec<syn::Lit> {
attrs
.iter()
.filter_map(|attr| {
if let Ok(syn::Meta::NameValue(meta)) = attr.parse_meta() {
if meta.path.get_ident().map_or(false, |ident| ident == "doc") {
let lit = &meta.lit;
let doc_lit = quote!(#lit).to_string();
let trimmed_doc_lit =
doc_lit.trim_start_matches(r#"" "#).trim_end_matches('"');
Some(parse_quote!(#trimmed_doc_lit))
} else {
None
}
} else {
None
}
})
.collect()
}

/// Look for a `#[codec(index = $int)]` attribute on a variant. If no attribute
/// is found, fall back to the discriminant or just the variant index.
pub fn variant_index(v: &Variant, i: usize) -> TokenStream {
Expand Down

0 comments on commit a7e1e6b

Please sign in to comment.