diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index b9c04dea3b..e273f00fe0 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -63,7 +63,7 @@ jobs: uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: nightly + toolchain: stable override: true components: rustfmt diff --git a/.rustfmt.toml b/.rustfmt.toml deleted file mode 100644 index 3340762773..0000000000 --- a/.rustfmt.toml +++ /dev/null @@ -1,64 +0,0 @@ -max_width = 90 # changed -hard_tabs = false -tab_spaces = 4 -newline_style = "Auto" -use_small_heuristics = "Default" -indent_style = "Block" -wrap_comments = false -format_code_in_doc_comments = false -comment_width = 80 -normalize_comments = true # changed -normalize_doc_attributes = false -format_strings = false -format_macro_matchers = false -format_macro_bodies = true -empty_item_single_line = true -struct_lit_single_line = true -fn_single_line = false -where_single_line = false -imports_indent = "Block" -imports_layout = "Vertical" # changed -imports_granularity = "Crate" # changed -reorder_imports = true -reorder_modules = true -reorder_impl_items = false -type_punctuation_density = "Wide" -space_before_colon = false -space_after_colon = true -spaces_around_ranges = false -binop_separator = "Front" -remove_nested_parens = true -combine_control_expr = false # changed -overflow_delimited_expr = false -struct_field_align_threshold = 0 -enum_discrim_align_threshold = 0 -match_arm_blocks = true -force_multiline_blocks = true # changed -fn_params_layout = "Tall" -brace_style = "SameLineWhere" -control_brace_style = "AlwaysSameLine" -trailing_semicolon = false # changed -trailing_comma = "Vertical" -match_block_trailing_comma = false -blank_lines_upper_bound = 1 -blank_lines_lower_bound = 0 -edition = "2021" # changed -version = "One" -merge_derives = true -use_try_shorthand = true # changed -use_field_init_shorthand = true # changed -force_explicit_abi = true -condense_wildcard_suffixes = false -color = "Auto" -unstable_features = true # changed -disable_all_formatting = false -skip_children = false -hide_parse_errors = false -error_on_line_overflow = false -error_on_unformatted = false -ignore = [] - -# Below are `rustfmt` internal settings -# -# emit_mode = "Files" -# make_backup = false diff --git a/cli/build.rs b/cli/build.rs index 934108807c..7cb961e1b7 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -1,7 +1,4 @@ -use std::{ - borrow::Cow, - process::Command, -}; +use std::{borrow::Cow, process::Command}; fn main() { // Make git hash available via GIT_HASH build-time env var: diff --git a/cli/src/commands/codegen.rs b/cli/src/commands/codegen.rs index e4ce0ce617..3137b28029 100644 --- a/cli/src/commands/codegen.rs +++ b/cli/src/commands/codegen.rs @@ -5,15 +5,8 @@ use clap::Parser as ClapParser; use color_eyre::eyre; use jsonrpsee::client_transport::ws::Uri; -use std::{ - fs, - io::Read, - path::PathBuf, -}; -use subxt_codegen::{ - DerivesRegistry, - TypeSubstitutes, -}; +use std::{fs, io::Read, path::PathBuf}; +use subxt_codegen::{DerivesRegistry, TypeSubstitutes}; /// Generate runtime API client code from metadata. /// diff --git a/cli/src/commands/compatibility.rs b/cli/src/commands/compatibility.rs index d7ba6299e4..849c40adab 100644 --- a/cli/src/commands/compatibility.rs +++ b/cli/src/commands/compatibility.rs @@ -3,27 +3,13 @@ // see LICENSE for license details. use clap::Parser as ClapParser; -use color_eyre::eyre::{ - self, - WrapErr, -}; -use frame_metadata::{ - RuntimeMetadata, - RuntimeMetadataPrefixed, - RuntimeMetadataV14, - META_RESERVED, -}; +use color_eyre::eyre::{self, WrapErr}; +use frame_metadata::{RuntimeMetadata, RuntimeMetadataPrefixed, RuntimeMetadataV14, META_RESERVED}; use jsonrpsee::client_transport::ws::Uri; use scale::Decode; -use serde::{ - Deserialize, - Serialize, -}; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; -use subxt_metadata::{ - get_metadata_hash, - get_pallet_hash, -}; +use subxt_metadata::{get_metadata_hash, get_pallet_hash}; /// Verify metadata compatibility between substrate nodes. #[derive(Debug, ClapParser)] @@ -41,9 +27,7 @@ pub struct Opts { pub async fn run(opts: Opts) -> color_eyre::Result<()> { match opts.pallet { - Some(pallet) => { - handle_pallet_metadata(opts.nodes.as_slice(), pallet.as_str()).await - } + Some(pallet) => handle_pallet_metadata(opts.nodes.as_slice(), pallet.as_str()).await, None => handle_full_metadata(opts.nodes.as_slice()).await, } } @@ -120,17 +104,15 @@ async fn fetch_runtime_metadata(url: &Uri) -> color_eyre::Result Ok(v14), - _ => { - Err(eyre::eyre!( - "Node {:?} with unsupported metadata version: {:?}", - url, - metadata.1 - )) - } + _ => Err(eyre::eyre!( + "Node {:?} with unsupported metadata version: {:?}", + url, + metadata.1 + )), } } diff --git a/cli/src/commands/metadata.rs b/cli/src/commands/metadata.rs index 4fe5ddc743..46f7c5cae6 100644 --- a/cli/src/commands/metadata.rs +++ b/cli/src/commands/metadata.rs @@ -7,10 +7,7 @@ use color_eyre::eyre; use frame_metadata::RuntimeMetadataPrefixed; use jsonrpsee::client_transport::ws::Uri; use scale::Decode; -use std::io::{ - self, - Write, -}; +use std::io::{self, Write}; use subxt_codegen::utils::fetch_metadata_hex; /// Download metadata from a substrate node, for use with `subxt` codegen. @@ -48,11 +45,9 @@ pub async fn run(opts: Opts) -> color_eyre::Result<()> { let bytes = hex::decode(hex_data.trim_start_matches("0x"))?; Ok(io::stdout().write_all(&bytes)?) } - _ => { - Err(eyre::eyre!( - "Unsupported format `{}`, expected `json`, `hex` or `bytes`", - opts.format - )) - } + _ => Err(eyre::eyre!( + "Unsupported format `{}`, expected `json`, `hex` or `bytes`", + opts.format + )), } } diff --git a/codegen/src/api/calls.rs b/codegen/src/api/calls.rs index a9db65aee7..b9905e0574 100644 --- a/codegen/src/api/calls.rs +++ b/codegen/src/api/calls.rs @@ -4,25 +4,13 @@ use super::CodegenError; use crate::{ - types::{ - CompositeDefFields, - TypeGenerator, - }, + types::{CompositeDefFields, TypeGenerator}, CratePath, }; -use frame_metadata::{ - v14::RuntimeMetadataV14, - PalletMetadata, -}; -use heck::{ - ToSnakeCase as _, - ToUpperCamelCase as _, -}; +use frame_metadata::{v14::RuntimeMetadataV14, PalletMetadata}; +use heck::{ToSnakeCase as _, ToUpperCamelCase as _}; use proc_macro2::TokenStream as TokenStream2; -use quote::{ - format_ident, - quote, -}; +use quote::{format_ident, quote}; use scale_info::form::PortableForm; /// Generate calls from the provided pallet's metadata. Each call returns a `StaticTxPayload` @@ -59,20 +47,18 @@ pub fn generate_calls( .iter_mut() .map(|(variant_name, struct_def)| { let (call_fn_args, call_args): (Vec<_>, Vec<_>) = match struct_def.fields { - CompositeDefFields::Named(ref named_fields) => { - named_fields - .iter() - .map(|(name, field)| { - let fn_arg_type = &field.type_path; - let call_arg = if field.is_boxed() { - quote! { #name: ::std::boxed::Box::new(#name) } - } else { - quote! { #name } - }; - (quote!( #name: #fn_arg_type ), call_arg) - }) - .unzip() - } + CompositeDefFields::Named(ref named_fields) => named_fields + .iter() + .map(|(name, field)| { + let fn_arg_type = &field.type_path; + let call_arg = if field.is_boxed() { + quote! { #name: ::std::boxed::Box::new(#name) } + } else { + quote! { #name } + }; + (quote!( #name: #fn_arg_type ), call_arg) + }) + .unzip(), CompositeDefFields::NoFields => Default::default(), CompositeDefFields::Unnamed(_) => { return Err(CodegenError::InvalidCallVariant(call.ty.id())) diff --git a/codegen/src/api/constants.rs b/codegen/src/api/constants.rs index 1704e2dbf4..b75c2880a2 100644 --- a/codegen/src/api/constants.rs +++ b/codegen/src/api/constants.rs @@ -2,20 +2,11 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - types::TypeGenerator, - CratePath, -}; -use frame_metadata::{ - v14::RuntimeMetadataV14, - PalletMetadata, -}; +use crate::{types::TypeGenerator, CratePath}; +use frame_metadata::{v14::RuntimeMetadataV14, PalletMetadata}; use heck::ToSnakeCase as _; use proc_macro2::TokenStream as TokenStream2; -use quote::{ - format_ident, - quote, -}; +use quote::{format_ident, quote}; use scale_info::form::PortableForm; use super::CodegenError; @@ -53,7 +44,7 @@ pub fn generate_constants( ) -> Result { // Early return if the pallet has no constants. if pallet.constants.is_empty() { - return Ok(quote!()) + return Ok(quote!()); } let constants = &pallet.constants; diff --git a/codegen/src/api/events.rs b/codegen/src/api/events.rs index 4576dc7bea..12334f6d31 100644 --- a/codegen/src/api/events.rs +++ b/codegen/src/api/events.rs @@ -2,10 +2,7 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - types::TypeGenerator, - CratePath, -}; +use crate::{types::TypeGenerator, CratePath}; use frame_metadata::PalletMetadata; use proc_macro2::TokenStream as TokenStream2; use quote::quote; diff --git a/codegen/src/api/mod.rs b/codegen/src/api/mod.rs index a60b77a341..ecda5ee512 100644 --- a/codegen/src/api/mod.rs +++ b/codegen/src/api/mod.rs @@ -14,40 +14,16 @@ use subxt_metadata::get_metadata_per_pallet_hash; use super::DerivesRegistry; use crate::{ ir, - types::{ - CompositeDef, - CompositeDefFields, - TypeGenerator, - TypeSubstitutes, - }, - utils::{ - fetch_metadata_bytes_blocking, - FetchMetadataError, - Uri, - }, + types::{CompositeDef, CompositeDefFields, TypeGenerator, TypeSubstitutes}, + utils::{fetch_metadata_bytes_blocking, FetchMetadataError, Uri}, CratePath, }; use codec::Decode; -use frame_metadata::{ - v14::RuntimeMetadataV14, - RuntimeMetadata, - RuntimeMetadataPrefixed, -}; +use frame_metadata::{v14::RuntimeMetadataV14, RuntimeMetadata, RuntimeMetadataPrefixed}; use heck::ToSnakeCase as _; -use proc_macro2::{ - Span, - TokenStream as TokenStream2, -}; -use quote::{ - format_ident, - quote, -}; -use std::{ - fs, - io::Read, - path, - string::ToString, -}; +use proc_macro2::{Span, TokenStream as TokenStream2}; +use quote::{format_ident, quote}; +use std::{fs, io::Read, path, string::ToString}; use syn::parse_quote; /// Error returned when the Codegen cannot generate the runtime API. @@ -90,7 +66,9 @@ pub enum CodegenError { #[error("Call variant for type {0} must have all named fields. Make sure you are providing a valid metadata V14")] InvalidCallVariant(u32), /// Type should be an variant/enum. - #[error("{0} type should be an variant/enum type. Make sure you are providing a valid metadata V14")] + #[error( + "{0} type should be an variant/enum type. Make sure you are providing a valid metadata V14" + )] InvalidType(String), } @@ -460,9 +438,7 @@ impl RuntimeGenerator { let pallets_with_calls: Vec<_> = pallets_with_mod_names .iter() - .filter_map(|(pallet, pallet_mod_name)| { - pallet.calls.as_ref().map(|_| pallet_mod_name) - }) + .filter_map(|(pallet, pallet_mod_name)| pallet.calls.as_ref().map(|_| pallet_mod_name)) .collect(); let rust_items = item_mod_ir.rust_items(); diff --git a/codegen/src/api/storage.rs b/codegen/src/api/storage.rs index 26f5b560b0..759a74ccf3 100644 --- a/codegen/src/api/storage.rs +++ b/codegen/src/api/storage.rs @@ -2,27 +2,15 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - types::TypeGenerator, - CratePath, -}; +use crate::{types::TypeGenerator, CratePath}; use frame_metadata::{ - v14::RuntimeMetadataV14, - PalletMetadata, - StorageEntryMetadata, - StorageEntryModifier, + v14::RuntimeMetadataV14, PalletMetadata, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, }; use heck::ToSnakeCase as _; use proc_macro2::TokenStream as TokenStream2; -use quote::{ - format_ident, - quote, -}; -use scale_info::{ - form::PortableForm, - TypeDef, -}; +use quote::{format_ident, quote}; +use scale_info::{form::PortableForm, TypeDef}; use super::CodegenError; @@ -127,14 +115,10 @@ fn generate_storage_entry_fns( let pallet_name = &pallet.name; let storage_name = &storage_entry.name; - let storage_hash = subxt_metadata::get_storage_hash( - metadata, - pallet_name, - storage_name, - ) - .map_err(|_| { - CodegenError::MissingStorageMetadata(pallet_name.into(), storage_name.into()) - })?; + let storage_hash = subxt_metadata::get_storage_hash(metadata, pallet_name, storage_name) + .map_err(|_| { + CodegenError::MissingStorageMetadata(pallet_name.into(), storage_name.into()) + })?; let fn_name = format_ident!("{}", storage_entry.name.to_snake_case()); let storage_entry_ty = match storage_entry.ty { diff --git a/codegen/src/lib.rs b/codegen/src/lib.rs index de1b569492..40e1c32949 100644 --- a/codegen/src/lib.rs +++ b/codegen/src/lib.rs @@ -53,18 +53,8 @@ pub mod utils; pub use self::{ api::{ - generate_runtime_api_from_bytes, - generate_runtime_api_from_path, - generate_runtime_api_from_url, - CodegenError, - RuntimeGenerator, - }, - types::{ - CratePath, - Derives, - DerivesRegistry, - Module, - TypeGenerator, - TypeSubstitutes, + generate_runtime_api_from_bytes, generate_runtime_api_from_path, + generate_runtime_api_from_url, CodegenError, RuntimeGenerator, }, + types::{CratePath, Derives, DerivesRegistry, Module, TypeGenerator, TypeSubstitutes}, }; diff --git a/codegen/src/types/composite_def.rs b/codegen/src/types/composite_def.rs index 8226883a11..7062f20a0b 100644 --- a/codegen/src/types/composite_def.rs +++ b/codegen/src/types/composite_def.rs @@ -4,26 +4,10 @@ use crate::api::CodegenError; -use super::{ - CratePath, - Derives, - Field, - TypeDefParameters, - TypeGenerator, - TypeParameter, - TypePath, -}; +use super::{CratePath, Derives, Field, TypeDefParameters, TypeGenerator, TypeParameter, TypePath}; use proc_macro2::TokenStream; -use quote::{ - format_ident, - quote, -}; -use scale_info::{ - form::PortableForm, - Type, - TypeDef, - TypeDefPrimitive, -}; +use quote::{format_ident, quote}; +use scale_info::{form::PortableForm, Type, TypeDef, TypeDefPrimitive}; /// Representation of a type which consists of a set of fields. Used to generate Rust code for /// either a standalone `struct` definition, or an `enum` variant. @@ -98,11 +82,7 @@ impl CompositeDef { } /// Construct a definition which will generate code for an `enum` variant. - pub fn enum_variant_def( - ident: &str, - fields: CompositeDefFields, - docs: &[String], - ) -> Self { + pub fn enum_variant_def(ident: &str, fields: CompositeDefFields, docs: &[String]) -> Self { let name = format_ident!("{}", ident); let docs_token = Some(quote! { #( #[doc = #docs ] )* }); Self { @@ -186,20 +166,16 @@ impl CompositeDefFields { type_gen: &TypeGenerator, ) -> Result { if fields.is_empty() { - return Ok(Self::NoFields) + return Ok(Self::NoFields); } let mut named_fields = Vec::new(); let mut unnamed_fields = Vec::new(); for field in fields { - let type_path = - type_gen.resolve_field_type_path(field.ty().id(), parent_type_params); - let field_type = CompositeDefFieldType::new( - field.ty().id(), - type_path, - field.type_name().cloned(), - ); + let type_path = type_gen.resolve_field_type_path(field.ty().id(), parent_type_params); + let field_type = + CompositeDefFieldType::new(field.ty().id(), type_path, field.type_name().cloned()); if let Some(name) = field.name() { let field_name = format_ident!("{}", name); @@ -210,7 +186,7 @@ impl CompositeDefFields { } if !named_fields.is_empty() && !unnamed_fields.is_empty() { - return Err(CodegenError::InvalidFields(name.into())) + return Err(CodegenError::InvalidFields(name.into())); } let res = if !named_fields.is_empty() { diff --git a/codegen/src/types/derives.rs b/codegen/src/types/derives.rs index 1513fb7e6b..51ad0f3614 100644 --- a/codegen/src/types/derives.rs +++ b/codegen/src/types/derives.rs @@ -3,15 +3,9 @@ // see LICENSE for license details. use crate::CratePath; -use syn::{ - parse_quote, - Path, -}; +use syn::{parse_quote, Path}; -use std::collections::{ - HashMap, - HashSet, -}; +use std::collections::{HashMap, HashSet}; #[derive(Debug, Clone)] pub struct DerivesRegistry { @@ -92,17 +86,11 @@ impl Derives { let mut attributes = HashSet::new(); derives.insert(syn::parse_quote!(#crate_path::ext::scale_encode::EncodeAsType)); - let encode_crate_path = - quote::quote! { #crate_path::ext::scale_encode }.to_string(); - attributes.insert( - syn::parse_quote!(#[encode_as_type(crate_path = #encode_crate_path)]), - ); + let encode_crate_path = quote::quote! { #crate_path::ext::scale_encode }.to_string(); + attributes.insert(syn::parse_quote!(#[encode_as_type(crate_path = #encode_crate_path)])); derives.insert(syn::parse_quote!(#crate_path::ext::scale_decode::DecodeAsType)); - let decode_crate_path = - quote::quote! { #crate_path::ext::scale_decode }.to_string(); - attributes.insert( - syn::parse_quote!(#[decode_as_type(crate_path = #decode_crate_path)]), - ); + let decode_crate_path = quote::quote! { #crate_path::ext::scale_decode }.to_string(); + attributes.insert(syn::parse_quote!(#[decode_as_type(crate_path = #decode_crate_path)])); derives.insert(syn::parse_quote!(#crate_path::ext::codec::Encode)); derives.insert(syn::parse_quote!(#crate_path::ext::codec::Decode)); diff --git a/codegen/src/types/mod.rs b/codegen/src/types/mod.rs index f7cc79fd36..c6e575a00a 100644 --- a/codegen/src/types/mod.rs +++ b/codegen/src/types/mod.rs @@ -12,46 +12,20 @@ mod type_def_params; mod type_path; use darling::FromMeta; -use proc_macro2::{ - Ident, - Span, - TokenStream, -}; -use quote::{ - quote, - ToTokens, -}; -use scale_info::{ - form::PortableForm, - PortableRegistry, - Type, - TypeDef, -}; +use proc_macro2::{Ident, Span, TokenStream}; +use quote::{quote, ToTokens}; +use scale_info::{form::PortableForm, PortableRegistry, Type, TypeDef}; use std::collections::BTreeMap; use crate::api::CodegenError; pub use self::{ - composite_def::{ - CompositeDef, - CompositeDefFieldType, - CompositeDefFields, - }, - derives::{ - Derives, - DerivesRegistry, - }, - substitutes::{ - AbsolutePath, - TypeSubstitutes, - }, + composite_def::{CompositeDef, CompositeDefFieldType, CompositeDefFields}, + derives::{Derives, DerivesRegistry}, + substitutes::{AbsolutePath, TypeSubstitutes}, type_def::TypeDefGen, type_def_params::TypeDefParameters, - type_path::{ - TypeParameter, - TypePath, - TypePathType, - }, + type_path::{TypeParameter, TypePath, TypePathType}, }; pub type Field = scale_info::Field; @@ -104,13 +78,13 @@ impl<'a> TypeGenerator<'a> { // Don't generate a type if it was substituted - the target type might // not be in the type registry + our resolution already performs the substitution. if self.type_substitutes.for_path(path).is_some() { - continue + continue; } let namespace = path.namespace(); // prelude types e.g. Option/Result have no namespace, so we don't generate them if namespace.is_empty() { - continue + continue; } // Lazily create submodules for the encountered namespace path, if they don't exist @@ -126,12 +100,7 @@ impl<'a> TypeGenerator<'a> { innermost_module.types.insert( path.clone(), - TypeDefGen::from_type( - ty.ty(), - self, - &self.crate_path, - self.should_gen_docs, - )?, + TypeDefGen::from_type(ty.ty(), self, &self.crate_path, self.should_gen_docs)?, ); } @@ -194,7 +163,7 @@ impl<'a> TypeGenerator<'a> { .iter() .find(|tp| tp.concrete_type_id == id) { - return TypePath::Parameter(parent_type_param.clone()) + return TypePath::Parameter(parent_type_param.clone()); } let mut ty = self.resolve_type(id); @@ -212,9 +181,8 @@ impl<'a> TypeGenerator<'a> { .type_params() .iter() .filter_map(|f| { - f.ty().map(|f| { - self.resolve_type_path_recurse(f.id(), false, parent_type_params) - }) + f.ty() + .map(|f| self.resolve_type_path_recurse(f.id(), false, parent_type_params)) }) .collect(); @@ -239,71 +207,53 @@ impl<'a> TypeGenerator<'a> { ) } } - TypeDef::Primitive(primitive) => { - TypePathType::Primitive { - def: primitive.clone(), - } - } - TypeDef::Array(arr) => { - TypePathType::Array { - len: arr.len() as usize, - of: Box::new(self.resolve_type_path_recurse( - arr.type_param().id(), - false, - parent_type_params, - )), - } - } - TypeDef::Sequence(seq) => { - TypePathType::Vec { - of: Box::new(self.resolve_type_path_recurse( - seq.type_param().id(), - false, - parent_type_params, - )), - } - } - TypeDef::Tuple(tuple) => { - TypePathType::Tuple { - elements: tuple - .fields() - .iter() - .map(|f| { - self.resolve_type_path_recurse( - f.id(), - false, - parent_type_params, - ) - }) - .collect(), - } - } - TypeDef::Compact(compact) => { - TypePathType::Compact { - inner: Box::new(self.resolve_type_path_recurse( - compact.type_param().id(), - false, - parent_type_params, - )), - is_field, - crate_path: self.crate_path.clone(), - } - } - TypeDef::BitSequence(bitseq) => { - TypePathType::BitVec { - bit_order_type: Box::new(self.resolve_type_path_recurse( - bitseq.bit_order_type().id(), - false, - parent_type_params, - )), - bit_store_type: Box::new(self.resolve_type_path_recurse( - bitseq.bit_store_type().id(), - false, - parent_type_params, - )), - crate_path: self.crate_path.clone(), - } - } + TypeDef::Primitive(primitive) => TypePathType::Primitive { + def: primitive.clone(), + }, + TypeDef::Array(arr) => TypePathType::Array { + len: arr.len() as usize, + of: Box::new(self.resolve_type_path_recurse( + arr.type_param().id(), + false, + parent_type_params, + )), + }, + TypeDef::Sequence(seq) => TypePathType::Vec { + of: Box::new(self.resolve_type_path_recurse( + seq.type_param().id(), + false, + parent_type_params, + )), + }, + TypeDef::Tuple(tuple) => TypePathType::Tuple { + elements: tuple + .fields() + .iter() + .map(|f| self.resolve_type_path_recurse(f.id(), false, parent_type_params)) + .collect(), + }, + TypeDef::Compact(compact) => TypePathType::Compact { + inner: Box::new(self.resolve_type_path_recurse( + compact.type_param().id(), + false, + parent_type_params, + )), + is_field, + crate_path: self.crate_path.clone(), + }, + TypeDef::BitSequence(bitseq) => TypePathType::BitVec { + bit_order_type: Box::new(self.resolve_type_path_recurse( + bitseq.bit_order_type().id(), + false, + parent_type_params, + )), + bit_store_type: Box::new(self.resolve_type_path_recurse( + bitseq.bit_store_type().id(), + false, + parent_type_params, + )), + crate_path: self.crate_path.clone(), + }, }; TypePath::Type(ty) @@ -372,9 +322,7 @@ impl Module { } /// Returns the generated types. - pub fn types( - &self, - ) -> impl Iterator, &TypeDefGen)> { + pub fn types(&self) -> impl Iterator, &TypeDefGen)> { self.types.iter() } diff --git a/codegen/src/types/substitutes.rs b/codegen/src/types/substitutes.rs index a263d87690..741b5463fa 100644 --- a/codegen/src/types/substitutes.rs +++ b/codegen/src/types/substitutes.rs @@ -2,18 +2,9 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - api::CodegenError, - CratePath, -}; -use std::{ - borrow::Cow, - collections::HashMap, -}; -use syn::{ - parse_quote, - spanned::Spanned as _, -}; +use crate::{api::CodegenError, CratePath}; +use std::{borrow::Cow, collections::HashMap}; +use syn::{parse_quote, spanned::Spanned as _}; use super::TypePath; @@ -160,8 +151,7 @@ impl TypeSubstitutes { .iter() .position(|&src| src == arg) .map(|src_idx| { - u8::try_from(src_idx) - .expect("type arguments to be fewer than 256; qed") + u8::try_from(src_idx).expect("type arguments to be fewer than 256; qed") }) }) .collect(); @@ -201,15 +191,13 @@ impl TypeSubstitutes { mapping: &TypeParamMapping, ) -> Cow<'a, [TypePath]> { match mapping { - TypeParamMapping::Specified(mapping) => { - Cow::Owned( - mapping - .iter() - .filter_map(|&idx| params.get(idx as usize)) - .cloned() - .collect(), - ) - } + TypeParamMapping::Specified(mapping) => Cow::Owned( + mapping + .iter() + .filter_map(|&idx| params.get(idx as usize)) + .cloned() + .collect(), + ), _ => Cow::Borrowed(params), } } @@ -259,13 +247,9 @@ fn type_args(path_args: &syn::PathArguments) -> impl Iterator _ => None, }; - args_opt.into_iter().flatten().filter_map(|arg| { - match arg { - syn::GenericArgument::Type(syn::Type::Path(type_path)) => { - Some(&type_path.path) - } - _ => None, - } + args_opt.into_iter().flatten().filter_map(|arg| match arg { + syn::GenericArgument::Type(syn::Type::Path(type_path)) => Some(&type_path.path), + _ => None, }) } diff --git a/codegen/src/types/tests.rs b/codegen/src/types/tests.rs index f42cf8211b..2936af4de8 100644 --- a/codegen/src/types/tests.rs +++ b/codegen/src/types/tests.rs @@ -4,12 +4,7 @@ use super::*; use pretty_assertions::assert_eq; -use scale_info::{ - meta_type, - scale, - Registry, - TypeInfo, -}; +use scale_info::{meta_type, scale, Registry, TypeInfo}; use syn::parse_quote; const MOD_PATH: &[&str] = &["subxt_codegen", "types", "tests"]; @@ -813,10 +808,7 @@ fn generics_nested() { #[test] fn generate_bitvec() { use bitvec::{ - order::{ - Lsb0, - Msb0, - }, + order::{Lsb0, Msb0}, vec::BitVec, }; diff --git a/codegen/src/types/type_def.rs b/codegen/src/types/type_def.rs index 2cc669a2d3..340e70ab53 100644 --- a/codegen/src/types/type_def.rs +++ b/codegen/src/types/type_def.rs @@ -5,24 +5,12 @@ use crate::api::CodegenError; use super::{ - CompositeDef, - CompositeDefFields, - CratePath, - Derives, - TypeDefParameters, - TypeGenerator, + CompositeDef, CompositeDefFields, CratePath, Derives, TypeDefParameters, TypeGenerator, TypeParameter, }; use proc_macro2::TokenStream; -use quote::{ - format_ident, - quote, -}; -use scale_info::{ - form::PortableForm, - Type, - TypeDef, -}; +use quote::{format_ident, quote}; +use scale_info::{form::PortableForm, Type, TypeDef}; use syn::parse_quote; /// Generates a Rust `struct` or `enum` definition based on the supplied [`scale-info::Type`]. @@ -55,18 +43,16 @@ impl TypeDefGen { .type_params() .iter() .enumerate() - .filter_map(|(i, tp)| { - match tp.ty() { - Some(ty) => { - let tp_name = format_ident!("_{}", i); - Some(TypeParameter { - concrete_type_id: ty.id(), - original_name: tp.name().clone(), - name: tp_name, - }) - } - None => None, + .filter_map(|(i, tp)| match tp.ty() { + Some(ty) => { + let tp_name = format_ident!("_{}", i); + Some(TypeParameter { + concrete_type_id: ty.id(), + original_name: tp.name().clone(), + name: tp_name, + }) } + None => None, }) .collect::>(); @@ -109,10 +95,8 @@ impl TypeDefGen { type_gen, )?; type_params.update_unused(fields.field_types()); - let docs = - should_gen_docs.then_some(v.docs()).unwrap_or_default(); - let variant_def = - CompositeDef::enum_variant_def(v.name(), fields, docs); + let docs = should_gen_docs.then_some(v.docs()).unwrap_or_default(); + let variant_def = CompositeDef::enum_variant_def(v.name(), fields, docs); Ok((v.index(), variant_def)) }) .collect::, CodegenError>>()?; diff --git a/codegen/src/types/type_def_params.rs b/codegen/src/types/type_def_params.rs index 1208325c6c..a43c8a33ff 100644 --- a/codegen/src/types/type_def_params.rs +++ b/codegen/src/types/type_def_params.rs @@ -27,10 +27,7 @@ impl TypeDefParameters { /// Update the set of unused type parameters by removing those that are used in the given /// fields. - pub fn update_unused<'a>( - &mut self, - fields: impl Iterator, - ) { + pub fn update_unused<'a>(&mut self, fields: impl Iterator) { let mut used_type_params = BTreeSet::new(); for field in fields { field.type_path.parent_type_params(&mut used_type_params) @@ -43,7 +40,7 @@ impl TypeDefParameters { /// Construct a [`core::marker::PhantomData`] for the type unused type params. pub fn unused_params_phantom_data(&self) -> Option { if self.unused.is_empty() { - return None + return None; } let params = if self.unused.len() == 1 { let param = self diff --git a/codegen/src/types/type_path.rs b/codegen/src/types/type_path.rs index 9c3df5d748..a0ab8f4364 100644 --- a/codegen/src/types/type_path.rs +++ b/codegen/src/types/type_path.rs @@ -4,16 +4,9 @@ use crate::CratePath; -use proc_macro2::{ - Ident, - TokenStream, -}; +use proc_macro2::{Ident, TokenStream}; use quote::format_ident; -use scale_info::{ - form::PortableForm, - Path, - TypeDefPrimitive, -}; +use scale_info::{form::PortableForm, Path, TypeDefPrimitive}; use std::collections::BTreeSet; use syn::parse_quote; @@ -135,10 +128,7 @@ impl TypePathType { let mut ty_path = path_segments .iter() .map(|s| syn::PathSegment::from(format_ident!("{}", s))) - .collect::>(); + .collect::>(); ty_path.insert(0, syn::PathSegment::from(root_mod_ident)); parse_quote!( #ty_path ) } @@ -208,25 +198,23 @@ impl TypePathType { let tuple = parse_quote! { (#( # elements, )* ) }; syn::Type::Tuple(tuple) } - TypePathType::Primitive { def } => { - syn::Type::Path(match def { - TypeDefPrimitive::Bool => parse_quote!(::core::primitive::bool), - TypeDefPrimitive::Char => parse_quote!(::core::primitive::char), - TypeDefPrimitive::Str => parse_quote!(::std::string::String), - TypeDefPrimitive::U8 => parse_quote!(::core::primitive::u8), - TypeDefPrimitive::U16 => parse_quote!(::core::primitive::u16), - TypeDefPrimitive::U32 => parse_quote!(::core::primitive::u32), - TypeDefPrimitive::U64 => parse_quote!(::core::primitive::u64), - TypeDefPrimitive::U128 => parse_quote!(::core::primitive::u128), - TypeDefPrimitive::U256 => unimplemented!("not a rust primitive"), - TypeDefPrimitive::I8 => parse_quote!(::core::primitive::i8), - TypeDefPrimitive::I16 => parse_quote!(::core::primitive::i16), - TypeDefPrimitive::I32 => parse_quote!(::core::primitive::i32), - TypeDefPrimitive::I64 => parse_quote!(::core::primitive::i64), - TypeDefPrimitive::I128 => parse_quote!(::core::primitive::i128), - TypeDefPrimitive::I256 => unimplemented!("not a rust primitive"), - }) - } + TypePathType::Primitive { def } => syn::Type::Path(match def { + TypeDefPrimitive::Bool => parse_quote!(::core::primitive::bool), + TypeDefPrimitive::Char => parse_quote!(::core::primitive::char), + TypeDefPrimitive::Str => parse_quote!(::std::string::String), + TypeDefPrimitive::U8 => parse_quote!(::core::primitive::u8), + TypeDefPrimitive::U16 => parse_quote!(::core::primitive::u16), + TypeDefPrimitive::U32 => parse_quote!(::core::primitive::u32), + TypeDefPrimitive::U64 => parse_quote!(::core::primitive::u64), + TypeDefPrimitive::U128 => parse_quote!(::core::primitive::u128), + TypeDefPrimitive::U256 => unimplemented!("not a rust primitive"), + TypeDefPrimitive::I8 => parse_quote!(::core::primitive::i8), + TypeDefPrimitive::I16 => parse_quote!(::core::primitive::i16), + TypeDefPrimitive::I32 => parse_quote!(::core::primitive::i32), + TypeDefPrimitive::I64 => parse_quote!(::core::primitive::i64), + TypeDefPrimitive::I128 => parse_quote!(::core::primitive::i128), + TypeDefPrimitive::I256 => unimplemented!("not a rust primitive"), + }), TypePathType::Compact { inner, is_field, diff --git a/codegen/src/utils/fetch_metadata.rs b/codegen/src/utils/fetch_metadata.rs index 9c63f495c0..57ecdf2f3c 100644 --- a/codegen/src/utils/fetch_metadata.rs +++ b/codegen/src/utils/fetch_metadata.rs @@ -1,13 +1,7 @@ use jsonrpsee::{ async_client::ClientBuilder, - client_transport::ws::{ - Uri, - WsTransportClientBuilder, - }, - core::{ - client::ClientT, - Error, - }, + client_transport::ws::{Uri, WsTransportClientBuilder}, + core::{client::ClientT, Error}, http_client::HttpClientBuilder, rpc_params, }; diff --git a/codegen/src/utils/mod.rs b/codegen/src/utils/mod.rs index 0e29fc7561..ec9a9d0f6e 100644 --- a/codegen/src/utils/mod.rs +++ b/codegen/src/utils/mod.rs @@ -4,9 +4,6 @@ mod fetch_metadata; pub use jsonrpsee::client_transport::ws::Uri; pub use fetch_metadata::{ - fetch_metadata_bytes, - fetch_metadata_bytes_blocking, - fetch_metadata_hex, - fetch_metadata_hex_blocking, - FetchMetadataError, + fetch_metadata_bytes, fetch_metadata_bytes_blocking, fetch_metadata_hex, + fetch_metadata_hex_blocking, FetchMetadataError, }; diff --git a/examples/examples/balance_transfer.rs b/examples/examples/balance_transfer.rs index b297c9776a..c15e79b9b3 100644 --- a/examples/examples/balance_transfer.rs +++ b/examples/examples/balance_transfer.rs @@ -11,11 +11,7 @@ //! ``` use sp_keyring::AccountKeyring; -use subxt::{ - tx::PairSigner, - OnlineClient, - PolkadotConfig, -}; +use subxt::{tx::PairSigner, OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/balance_transfer_with_params.rs b/examples/examples/balance_transfer_with_params.rs index be53272d23..fd00ba48cc 100644 --- a/examples/examples/balance_transfer_with_params.rs +++ b/examples/examples/balance_transfer_with_params.rs @@ -13,11 +13,7 @@ use sp_keyring::AccountKeyring; use subxt::{ config::{ - polkadot::{ - Era, - PlainTip, - PolkadotExtrinsicParamsBuilder as Params, - }, + polkadot::{Era, PlainTip, PolkadotExtrinsicParamsBuilder as Params}, PolkadotConfig, }, tx::PairSigner, diff --git a/examples/examples/concurrent_storage_requests.rs b/examples/examples/concurrent_storage_requests.rs index 688fea960e..25c80c7244 100644 --- a/examples/examples/concurrent_storage_requests.rs +++ b/examples/examples/concurrent_storage_requests.rs @@ -12,10 +12,7 @@ use futures::join; use sp_keyring::AccountKeyring; -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/custom_config.rs b/examples/examples/custom_config.rs index 0b4f80da3d..f92ef059fc 100644 --- a/examples/examples/custom_config.rs +++ b/examples/examples/custom_config.rs @@ -7,11 +7,7 @@ use sp_keyring::AccountKeyring; use subxt::{ - config::{ - substrate::SubstrateExtrinsicParams, - Config, - SubstrateConfig, - }, + config::{substrate::SubstrateExtrinsicParams, Config, SubstrateConfig}, tx::PairSigner, OnlineClient, }; diff --git a/examples/examples/custom_rpc_client.rs b/examples/examples/custom_rpc_client.rs index ca56a53e9c..5a463ab44e 100644 --- a/examples/examples/custom_rpc_client.rs +++ b/examples/examples/custom_rpc_client.rs @@ -5,20 +5,11 @@ use std::{ fmt::Write, pin::Pin, - sync::{ - Arc, - Mutex, - }, + sync::{Arc, Mutex}, }; use subxt::{ - rpc::{ - RawValue, - RpcClientT, - RpcFuture, - RpcSubscription, - }, - OnlineClient, - PolkadotConfig, + rpc::{RawValue, RpcClientT, RpcFuture, RpcSubscription}, + OnlineClient, PolkadotConfig, }; // A dummy RPC client that doesn't actually handle requests properly diff --git a/examples/examples/dynamic_multisig.rs b/examples/examples/dynamic_multisig.rs index 001a26fce7..6c8e47341c 100644 --- a/examples/examples/dynamic_multisig.rs +++ b/examples/examples/dynamic_multisig.rs @@ -11,12 +11,7 @@ //! ``` use sp_keyring::AccountKeyring; -use subxt::{ - dynamic::Value, - tx::PairSigner, - OnlineClient, - PolkadotConfig, -}; +use subxt::{dynamic::Value, tx::PairSigner, OnlineClient, PolkadotConfig}; #[tokio::main] async fn main() -> Result<(), Box> { diff --git a/examples/examples/dynamic_queries.rs b/examples/examples/dynamic_queries.rs index 5607f5a2ee..61437ecfbd 100644 --- a/examples/examples/dynamic_queries.rs +++ b/examples/examples/dynamic_queries.rs @@ -13,12 +13,7 @@ // This example showcases working with dynamic values rather than those that are generated via the subxt proc macro. use sp_keyring::AccountKeyring; -use subxt::{ - dynamic::Value, - tx::PairSigner, - OnlineClient, - PolkadotConfig, -}; +use subxt::{dynamic::Value, tx::PairSigner, OnlineClient, PolkadotConfig}; #[tokio::main] async fn main() -> Result<(), Box> { diff --git a/examples/examples/fetch_all_accounts.rs b/examples/examples/fetch_all_accounts.rs index 2d8b18f9c9..a578f13cb3 100644 --- a/examples/examples/fetch_all_accounts.rs +++ b/examples/examples/fetch_all_accounts.rs @@ -10,10 +10,7 @@ //! polkadot --dev --tmp //! ``` -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/fetch_constants.rs b/examples/examples/fetch_constants.rs index f8c2a11797..817893c0da 100644 --- a/examples/examples/fetch_constants.rs +++ b/examples/examples/fetch_constants.rs @@ -10,10 +10,7 @@ //! polkadot --dev --tmp //! ``` -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; // Generate the API from a static metadata path. #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] diff --git a/examples/examples/fetch_staking_details.rs b/examples/examples/fetch_staking_details.rs index ce678cfa46..cd0af22b54 100644 --- a/examples/examples/fetch_staking_details.rs +++ b/examples/examples/fetch_staking_details.rs @@ -10,16 +10,9 @@ //! polkadot --dev --tmp //! ``` -use sp_core::{ - sr25519, - Pair, -}; +use sp_core::{sr25519, Pair}; use sp_keyring::AccountKeyring; -use subxt::{ - utils::AccountId32, - OnlineClient, - PolkadotConfig, -}; +use subxt::{utils::AccountId32, OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/metadata_compatibility.rs b/examples/examples/metadata_compatibility.rs index 70f645dd9a..c961ed3133 100644 --- a/examples/examples/metadata_compatibility.rs +++ b/examples/examples/metadata_compatibility.rs @@ -10,10 +10,7 @@ //! polkadot --dev --tmp //! ``` -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/multisig.rs b/examples/examples/multisig.rs index 7daa281c0a..9cf246dabe 100644 --- a/examples/examples/multisig.rs +++ b/examples/examples/multisig.rs @@ -11,11 +11,7 @@ //! ``` use sp_keyring::AccountKeyring; -use subxt::{ - tx::PairSigner, - OnlineClient, - PolkadotConfig, -}; +use subxt::{tx::PairSigner, OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/rpc_call.rs b/examples/examples/rpc_call.rs index beda078021..75766b7eaf 100644 --- a/examples/examples/rpc_call.rs +++ b/examples/examples/rpc_call.rs @@ -10,10 +10,7 @@ //! polkadot --dev --tmp //! ``` -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; #[tokio::main] async fn main() -> Result<(), Box> { diff --git a/examples/examples/rpc_call_subscribe_blocks.rs b/examples/examples/rpc_call_subscribe_blocks.rs index 7081adf3f1..2a816d26aa 100644 --- a/examples/examples/rpc_call_subscribe_blocks.rs +++ b/examples/examples/rpc_call_subscribe_blocks.rs @@ -10,18 +10,13 @@ //! polkadot --dev --tmp //! ``` -use subxt::{ - config::Header, - OnlineClient, - PolkadotConfig, -}; +use subxt::{config::Header, OnlineClient, PolkadotConfig}; #[tokio::main] async fn main() -> Result<(), Box> { tracing_subscriber::fmt::init(); - let api = - OnlineClient::::from_url("wss://rpc.polkadot.io:443").await?; + let api = OnlineClient::::from_url("wss://rpc.polkadot.io:443").await?; // For non-finalised blocks use `.subscribe_blocks()` let mut blocks = api.rpc().subscribe_finalized_block_headers().await?; diff --git a/examples/examples/runtime_types_only.rs b/examples/examples/runtime_types_only.rs index 2599dd014d..e1270a4f81 100644 --- a/examples/examples/runtime_types_only.rs +++ b/examples/examples/runtime_types_only.rs @@ -11,11 +11,7 @@ use sp_core::H256; use sp_runtime::{ generic, - traits::{ - BlakeTwo256, - Block as _, - Header as _, - }, + traits::{BlakeTwo256, Block as _, Header as _}, Digest, }; use subxt::PolkadotConfig; diff --git a/examples/examples/storage_iterating.rs b/examples/examples/storage_iterating.rs index e3d6a1da56..8fc8f65584 100644 --- a/examples/examples/storage_iterating.rs +++ b/examples/examples/storage_iterating.rs @@ -10,14 +10,8 @@ //! polkadot --dev --tmp //! ``` -use codec::{ - Decode, - Encode, -}; -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use codec::{Decode, Encode}; +use subxt::{OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} @@ -60,9 +54,7 @@ async fn main() -> Result<(), Box> { for key in keys.iter() { println!("Key: 0x{}", hex::encode(key)); - if let Some(storage_data) = - api.storage().at(None).await?.fetch_raw(&key.0).await? - { + if let Some(storage_data) = api.storage().at(None).await?.fetch_raw(&key.0).await? { // We know the return value to be `QueryId` (`u64`) from inspecting either: // - polkadot code // - polkadot.rs generated file under `version_notifiers()` fn @@ -102,9 +94,7 @@ async fn main() -> Result<(), Box> { for key in keys.iter() { println!("Key: 0x{}", hex::encode(key)); - if let Some(storage_data) = - api.storage().at(None).await?.fetch_raw(&key.0).await? - { + if let Some(storage_data) = api.storage().at(None).await?.fetch_raw(&key.0).await? { // We know the return value to be `QueryId` (`u64`) from inspecting either: // - polkadot code // - polkadot.rs generated file under `version_notifiers()` fn diff --git a/examples/examples/submit_and_watch.rs b/examples/examples/submit_and_watch.rs index 6776146069..6e4b758ac2 100644 --- a/examples/examples/submit_and_watch.rs +++ b/examples/examples/submit_and_watch.rs @@ -12,11 +12,7 @@ use futures::StreamExt; use sp_keyring::AccountKeyring; -use subxt::{ - tx::PairSigner, - OnlineClient, - PolkadotConfig, -}; +use subxt::{tx::PairSigner, OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} @@ -50,8 +46,7 @@ async fn simple_transfer() -> Result<(), Box> { .wait_for_finalized_success() .await?; - let transfer_event = - balance_transfer.find_first::()?; + let transfer_event = balance_transfer.find_first::()?; if let Some(event) = transfer_event { println!("Balance transfer success: {event:?}"); @@ -88,8 +83,7 @@ async fn simple_transfer_separate_events() -> Result<(), Box()?; + let failed_event = events.find_first::()?; if let Some(_ev) = failed_event { // We found a failed event; the transfer didn't succeed. @@ -97,8 +91,7 @@ async fn simple_transfer_separate_events() -> Result<(), Box()?; + let transfer_event = events.find_first::()?; if let Some(event) = transfer_event { println!("Balance transfer success: {event:?}"); } else { @@ -138,13 +131,10 @@ async fn handle_transfer_events() -> Result<(), Box> { ); let events = details.wait_for_success().await?; - let transfer_event = - events.find_first::()?; + let transfer_event = events.find_first::()?; if let Some(event) = transfer_event { - println!( - "Balance transfer is now in block (but not finalized): {event:?}" - ); + println!("Balance transfer is now in block (but not finalized): {event:?}"); } else { println!("Failed to find Balances::Transfer Event"); } @@ -158,8 +148,7 @@ async fn handle_transfer_events() -> Result<(), Box> { ); let events = details.wait_for_success().await?; - let transfer_event = - events.find_first::()?; + let transfer_event = events.find_first::()?; if let Some(event) = transfer_event { println!("Balance transfer success: {event:?}"); diff --git a/examples/examples/subscribe_block_events.rs b/examples/examples/subscribe_block_events.rs index b1c60580e5..1737fd1ff4 100644 --- a/examples/examples/subscribe_block_events.rs +++ b/examples/examples/subscribe_block_events.rs @@ -13,11 +13,7 @@ use futures::StreamExt; use sp_keyring::AccountKeyring; use std::time::Duration; -use subxt::{ - tx::PairSigner, - OnlineClient, - PolkadotConfig, -}; +use subxt::{tx::PairSigner, OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} @@ -43,10 +39,9 @@ async fn main() -> Result<(), Box> { // Make small balance transfers from Alice to Bob in a loop: loop { - let transfer_tx = polkadot::tx().balances().transfer( - AccountKeyring::Bob.to_account_id().into(), - transfer_amount, - ); + let transfer_tx = polkadot::tx() + .balances() + .transfer(AccountKeyring::Bob.to_account_id().into(), transfer_amount); api.tx() .sign_and_submit_default(&transfer_tx, &signer) .await @@ -76,14 +71,11 @@ async fn main() -> Result<(), Box> { .is_some(); let pallet = event.pallet_name(); let variant = event.variant_name(); - println!( - " {pallet}::{variant} (is balance transfer? {is_balance_transfer})" - ); + println!(" {pallet}::{variant} (is balance transfer? {is_balance_transfer})"); } // Or we can find the first transfer event, ignoring any others: - let transfer_event = - events.find_first::()?; + let transfer_event = events.find_first::()?; if let Some(ev) = transfer_event { println!(" - Balance transfer success: value: {:?}", ev.amount); diff --git a/examples/examples/subscribe_blocks.rs b/examples/examples/subscribe_blocks.rs index 1838952a46..c9a277e32b 100644 --- a/examples/examples/subscribe_blocks.rs +++ b/examples/examples/subscribe_blocks.rs @@ -11,10 +11,7 @@ //! ``` use futures::StreamExt; -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; #[subxt::subxt(runtime_metadata_path = "../artifacts/polkadot_metadata.scale")] pub mod polkadot {} diff --git a/examples/examples/subscribe_runtime_updates.rs b/examples/examples/subscribe_runtime_updates.rs index 641f444944..6e097706ae 100644 --- a/examples/examples/subscribe_runtime_updates.rs +++ b/examples/examples/subscribe_runtime_updates.rs @@ -11,10 +11,7 @@ //! ``` use std::time::Duration; -use subxt::{ - OnlineClient, - PolkadotConfig, -}; +use subxt::{OnlineClient, PolkadotConfig}; #[tokio::main] async fn main() -> Result<(), Box> { diff --git a/macro/src/lib.rs b/macro/src/lib.rs index ac404ce781..9d9fa68619 100644 --- a/macro/src/lib.rs +++ b/macro/src/lib.rs @@ -115,21 +115,9 @@ use std::str::FromStr; use darling::FromMeta; use proc_macro::TokenStream; -use proc_macro_error::{ - abort, - abort_call_site, - proc_macro_error, -}; -use subxt_codegen::{ - utils::Uri, - DerivesRegistry, - TypeSubstitutes, -}; -use syn::{ - parse_macro_input, - punctuated::Punctuated, - spanned::Spanned as _, -}; +use proc_macro_error::{abort, abort_call_site, proc_macro_error}; +use subxt_codegen::{utils::Uri, DerivesRegistry, TypeSubstitutes}; +use syn::{parse_macro_input, punctuated::Punctuated, spanned::Spanned as _}; #[derive(Debug, FromMeta)] struct RuntimeMetadataArgs { @@ -198,13 +186,11 @@ pub fn subxt(args: TokenStream, input: TokenStream) -> TokenStream { ( ty, with.try_into() - .unwrap_or_else(|(node, msg): (syn::Path, String)| { - abort!(node.span(), msg) - }), + .unwrap_or_else(|(node, msg): (syn::Path, String)| abort!(node.span(), msg)), ) }, )) { - return err.into_compile_error().into() + return err.into_compile_error().into(); } let should_gen_docs = args.generate_docs.is_present(); @@ -240,10 +226,14 @@ pub fn subxt(args: TokenStream, input: TokenStream) -> TokenStream { .map_or_else(|err| err.into_compile_error().into(), Into::into) } (None, None) => { - abort_call_site!("One of 'runtime_metadata_path' or 'runtime_metadata_url' must be provided") + abort_call_site!( + "One of 'runtime_metadata_path' or 'runtime_metadata_url' must be provided" + ) } (Some(_), Some(_)) => { - abort_call_site!("Only one of 'runtime_metadata_path' or 'runtime_metadata_url' can be provided") + abort_call_site!( + "Only one of 'runtime_metadata_path' or 'runtime_metadata_url' can be provided" + ) } } } diff --git a/metadata/benches/bench.rs b/metadata/benches/bench.rs index a0adb1ab44..6627e2c41f 100644 --- a/metadata/benches/bench.rs +++ b/metadata/benches/bench.rs @@ -4,26 +4,11 @@ use codec::Decode; use criterion::*; -use frame_metadata::{ - RuntimeMetadata::V14, - RuntimeMetadataPrefixed, - RuntimeMetadataV14, -}; -use scale_info::{ - form::PortableForm, - TypeDef, - TypeDefVariant, -}; -use std::{ - fs, - path::Path, -}; +use frame_metadata::{RuntimeMetadata::V14, RuntimeMetadataPrefixed, RuntimeMetadataV14}; +use scale_info::{form::PortableForm, TypeDef, TypeDefVariant}; +use std::{fs, path::Path}; use subxt_metadata::{ - get_call_hash, - get_constant_hash, - get_metadata_hash, - get_pallet_hash, - get_storage_hash, + get_call_hash, get_constant_hash, get_metadata_hash, get_pallet_hash, get_storage_hash, }; fn load_metadata() -> RuntimeMetadataV14 { diff --git a/metadata/src/lib.rs b/metadata/src/lib.rs index e9a5aeeb8b..9b197049fe 100644 --- a/metadata/src/lib.rs +++ b/metadata/src/lib.rs @@ -3,18 +3,9 @@ // see LICENSE for license details. use frame_metadata::{ - ExtrinsicMetadata, - RuntimeMetadataV14, - StorageEntryMetadata, - StorageEntryType, -}; -use scale_info::{ - form::PortableForm, - Field, - PortableRegistry, - TypeDef, - Variant, + ExtrinsicMetadata, RuntimeMetadataV14, StorageEntryMetadata, StorageEntryType, }; +use scale_info::{form::PortableForm, Field, PortableRegistry, TypeDef, Variant}; use std::collections::HashSet; /// Internal byte representation for various metadata types utilized for @@ -137,8 +128,7 @@ fn get_type_def_hash( TypeDef::Tuple(tuple) => { let mut bytes = hash(&[TypeBeingHashed::Tuple as u8]); for field in tuple.fields() { - bytes = - hash_hashes(bytes, get_type_hash(registry, field.id(), visited_ids)); + bytes = hash_hashes(bytes, get_type_hash(registry, field.id(), visited_ids)); } bytes } @@ -169,14 +159,10 @@ fn get_type_def_hash( } /// Obtain the hash representation of a `scale_info::Type` identified by id. -fn get_type_hash( - registry: &PortableRegistry, - id: u32, - visited_ids: &mut HashSet, -) -> [u8; 32] { +fn get_type_hash(registry: &PortableRegistry, id: u32, visited_ids: &mut HashSet) -> [u8; 32] { // Guard against recursive types and return a fixed arbitrary hash if !visited_ids.insert(id) { - return hash(&[123u8]) + return hash(&[123u8]); } let ty = registry.resolve(id).unwrap(); @@ -459,21 +445,11 @@ pub enum NotFound { #[cfg(test)] mod tests { use super::*; - use bitvec::{ - order::Lsb0, - vec::BitVec, - }; + use bitvec::{order::Lsb0, vec::BitVec}; use frame_metadata::{ - ExtrinsicMetadata, - PalletCallMetadata, - PalletConstantMetadata, - PalletErrorMetadata, - PalletEventMetadata, - PalletMetadata, - PalletStorageMetadata, - RuntimeMetadataV14, - StorageEntryMetadata, - StorageEntryModifier, + ExtrinsicMetadata, PalletCallMetadata, PalletConstantMetadata, PalletErrorMetadata, + PalletEventMetadata, PalletMetadata, PalletStorageMetadata, RuntimeMetadataV14, + StorageEntryMetadata, StorageEntryModifier, }; use scale_info::meta_type; @@ -722,9 +698,11 @@ mod tests { ); // Check hashing with all pallets. - let hash_second = - get_metadata_per_pallet_hash(&metadata_both, &["First", "Second"]); - assert_ne!(hash_second, hash, "hashing both pallets should produce a different result from hashing just one pallet"); + let hash_second = get_metadata_per_pallet_hash(&metadata_both, &["First", "Second"]); + assert_ne!( + hash_second, hash, + "hashing both pallets should produce a different result from hashing just one pallet" + ); } #[test] diff --git a/subxt/src/blocks/block_types.rs b/subxt/src/blocks/block_types.rs index 36b42d3569..068802d8de 100644 --- a/subxt/src/blocks/block_types.rs +++ b/subxt/src/blocks/block_types.rs @@ -3,19 +3,9 @@ // see LICENSE for license details. use crate::{ - client::{ - OfflineClientT, - OnlineClientT, - }, - config::{ - Config, - Hasher, - Header, - }, - error::{ - BlockError, - Error, - }, + client::{OfflineClientT, OnlineClientT}, + config::{Config, Hasher, Header}, + error::{BlockError, Error}, events, rpc::types::ChainBlockResponse, runtime_api::RuntimeApi, @@ -135,15 +125,13 @@ where .extrinsics .iter() .enumerate() - .map(|(idx, e)| { - Extrinsic { - index: idx as u32, - bytes: &e.0, - client: self.client.clone(), - block_hash: self.details.block.header.hash(), - cached_events: self.cached_events.clone(), - _marker: std::marker::PhantomData, - } + .map(|(idx, e)| Extrinsic { + index: idx as u32, + bytes: &e.0, + client: self.client.clone(), + block_hash: self.details.block.header.hash(), + cached_events: self.cached_events.clone(), + _marker: std::marker::PhantomData, }) } } @@ -181,8 +169,7 @@ where { /// The events associated with the extrinsic. pub async fn events(&self) -> Result, Error> { - let events = - get_events(&self.client, self.block_hash, &self.cached_events).await?; + let events = get_events(&self.client, self.block_hash, &self.cached_events).await?; let ext_hash = T::Hasher::hash_of(&self.bytes); Ok(ExtrinsicEvents::new(ext_hash, self.index, events)) } @@ -248,9 +235,7 @@ impl ExtrinsicEvents { /// /// This works in the same way that [`events::Events::find()`] does, with the /// exception that it filters out events not related to the submitted extrinsic. - pub fn find( - &self, - ) -> impl Iterator> + '_ { + pub fn find(&self) -> impl Iterator> + '_ { self.iter().filter_map(|ev| { ev.and_then(|ev| ev.as_event::().map_err(Into::into)) .transpose() diff --git a/subxt/src/blocks/blocks_client.rs b/subxt/src/blocks/blocks_client.rs index cd48db5360..5cccfb9904 100644 --- a/subxt/src/blocks/blocks_client.rs +++ b/subxt/src/blocks/blocks_client.rs @@ -5,27 +5,13 @@ use super::Block; use crate::{ client::OnlineClientT, - config::{ - Config, - Header, - }, - error::{ - BlockError, - Error, - }, + config::{Config, Header}, + error::{BlockError, Error}, utils::PhantomDataSendSync, }; use derivative::Derivative; -use futures::{ - future::Either, - stream, - Stream, - StreamExt, -}; -use std::{ - future::Future, - pin::Pin, -}; +use futures::{future::Either, stream, Stream, StreamExt}; +use std::{future::Future, pin::Pin}; type BlockStream = Pin> + Send>>; type BlockStreamRes = Result, Error>; @@ -71,13 +57,11 @@ where // for the latest block and use that. let block_hash = match block_hash { Some(hash) => hash, - None => { - client - .rpc() - .block_hash(None) - .await? - .expect("didn't pass a block number; qed") - } + None => client + .rpc() + .block_hash(None) + .await? + .expect("didn't pass a block number; qed"), }; let block_header = match client.rpc().header(Some(block_hash)).await? { @@ -145,12 +129,8 @@ where // Adjust the subscription stream to fill in any missing blocks. BlockStreamRes::Ok( - subscribe_to_block_headers_filling_in_gaps( - client, - last_finalized_block_num, - sub, - ) - .boxed(), + subscribe_to_block_headers_filling_in_gaps(client, last_finalized_block_num, sub) + .boxed(), ) }) } diff --git a/subxt/src/blocks/mod.rs b/subxt/src/blocks/mod.rs index ae7f212f48..1e51052fe8 100644 --- a/subxt/src/blocks/mod.rs +++ b/subxt/src/blocks/mod.rs @@ -7,12 +7,5 @@ mod block_types; mod blocks_client; -pub use block_types::{ - Block, - Extrinsic, - ExtrinsicEvents, -}; -pub use blocks_client::{ - subscribe_to_block_headers_filling_in_gaps, - BlocksClient, -}; +pub use block_types::{Block, Extrinsic, ExtrinsicEvents}; +pub use blocks_client::{subscribe_to_block_headers_filling_in_gaps, BlocksClient}; diff --git a/subxt/src/client/mod.rs b/subxt/src/client/mod.rs index f056abde9d..2948ade6b2 100644 --- a/subxt/src/client/mod.rs +++ b/subxt/src/client/mod.rs @@ -11,17 +11,9 @@ mod offline_client; mod online_client; -pub use offline_client::{ - OfflineClient, - OfflineClientT, -}; +pub use offline_client::{OfflineClient, OfflineClientT}; pub use online_client::{ - ClientRuntimeUpdater, - OnlineClient, - OnlineClientT, - RuntimeUpdaterStream, - Update, - UpgradeError, + ClientRuntimeUpdater, OnlineClient, OnlineClientT, RuntimeUpdaterStream, Update, UpgradeError, }; #[cfg(any( diff --git a/subxt/src/client/offline_client.rs b/subxt/src/client/offline_client.rs index c2681102f5..3f7cf273c0 100644 --- a/subxt/src/client/offline_client.rs +++ b/subxt/src/client/offline_client.rs @@ -3,15 +3,9 @@ // see LICENSE for license details. use crate::{ - blocks::BlocksClient, - constants::ConstantsClient, - events::EventsClient, - rpc::types::RuntimeVersion, - runtime_api::RuntimeApiClient, - storage::StorageClient, - tx::TxClient, - Config, - Metadata, + blocks::BlocksClient, constants::ConstantsClient, events::EventsClient, + rpc::types::RuntimeVersion, runtime_api::RuntimeApiClient, storage::StorageClient, + tx::TxClient, Config, Metadata, }; use derivative::Derivative; use std::sync::Arc; diff --git a/subxt/src/client/online_client.rs b/subxt/src/client/online_client.rs index 6babec8971..06542ac637 100644 --- a/subxt/src/client/online_client.rs +++ b/subxt/src/client/online_client.rs @@ -2,33 +2,22 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use super::{ - OfflineClient, - OfflineClientT, -}; +use super::{OfflineClient, OfflineClientT}; use crate::{ blocks::BlocksClient, constants::ConstantsClient, error::Error, events::EventsClient, rpc::{ - types::{ - RuntimeVersion, - Subscription, - }, - Rpc, - RpcClientT, + types::{RuntimeVersion, Subscription}, + Rpc, RpcClientT, }, runtime_api::RuntimeApiClient, storage::StorageClient, tx::TxClient, - Config, - Metadata, -}; -use codec::{ - Compact, - Decode, + Config, Metadata, }; +use codec::{Compact, Decode}; use derivative::Derivative; use frame_metadata::RuntimeMetadataPrefixed; use futures::future; @@ -114,12 +103,7 @@ impl OnlineClient { ) .await; - OnlineClient::from_rpc_client_with( - genesis_hash?, - runtime_version?, - metadata?, - rpc_client, - ) + OnlineClient::from_rpc_client_with(genesis_hash?, runtime_version?, metadata?, rpc_client) } /// Construct a new [`OnlineClient`] by providing all of the underlying details needed @@ -341,7 +325,7 @@ impl ClientRuntimeUpdater { /// Tries to apply a new update. pub fn apply_update(&self, update: Update) -> Result<(), UpgradeError> { if !self.is_runtime_version_different(&update.runtime_version) { - return Err(UpgradeError::SameVersion) + return Err(UpgradeError::SameVersion); } self.do_update(update); @@ -442,18 +426,9 @@ impl Update { #[cfg(feature = "jsonrpsee-ws")] mod jsonrpsee_helpers { pub use jsonrpsee::{ - client_transport::ws::{ - InvalidUri, - Receiver, - Sender, - Uri, - WsTransportClientBuilder, - }, + client_transport::ws::{InvalidUri, Receiver, Sender, Uri, WsTransportClientBuilder}, core::{ - client::{ - Client, - ClientBuilder, - }, + client::{Client, ClientBuilder}, Error, }, }; @@ -483,10 +458,7 @@ mod jsonrpsee_helpers { pub use jsonrpsee::{ client_transport::web, core::{ - client::{ - Client, - ClientBuilder, - }, + client::{Client, ClientBuilder}, Error, }, }; diff --git a/subxt/src/config/extrinsic_params.rs b/subxt/src/config/extrinsic_params.rs index e9fae6e022..7175d1cada 100644 --- a/subxt/src/config/extrinsic_params.rs +++ b/subxt/src/config/extrinsic_params.rs @@ -7,21 +7,11 @@ //! implementation of the trait is provided ([`BaseExtrinsicParams`]) which is //! used by the provided Substrate and Polkadot configuration. -use crate::{ - utils::Encoded, - Config, -}; -use codec::{ - Compact, - Decode, - Encode, -}; +use crate::{utils::Encoded, Config}; +use codec::{Compact, Decode, Encode}; use core::fmt::Debug; use derivative::Derivative; -use serde::{ - Deserialize, - Serialize, -}; +use serde::{Deserialize, Serialize}; /// This trait allows you to configure the "signed extra" and /// "additional" parameters that are signed and used in transactions. @@ -148,9 +138,7 @@ impl ExtrinsicParams Self { BaseExtrinsicParams { era: other_params.era, - mortality_checkpoint: other_params - .mortality_checkpoint - .unwrap_or(genesis_hash), + mortality_checkpoint: other_params.mortality_checkpoint.unwrap_or(genesis_hash), tip: other_params.tip, nonce, spec_version, diff --git a/subxt/src/config/mod.rs b/subxt/src/config/mod.rs index ce75b4664e..04251306e7 100644 --- a/subxt/src/config/mod.rs +++ b/subxt/src/config/mod.rs @@ -12,15 +12,9 @@ pub mod extrinsic_params; pub mod polkadot; pub mod substrate; -use codec::{ - Decode, - Encode, -}; +use codec::{Decode, Encode}; use core::fmt::Debug; -use serde::{ - de::DeserializeOwned, - Serialize, -}; +use serde::{de::DeserializeOwned, Serialize}; pub use extrinsic_params::ExtrinsicParams; pub use polkadot::PolkadotConfig; @@ -110,10 +104,7 @@ pub trait Header: Sized + Encode { /// // This is how PolkadotConfig is implemented: /// type PolkadotConfig = WithExtrinsicParams>; /// ``` -pub struct WithExtrinsicParams< - T: Config, - E: extrinsic_params::ExtrinsicParams, -> { +pub struct WithExtrinsicParams> { _marker: std::marker::PhantomData<(T, E)>, } diff --git a/subxt/src/config/polkadot.rs b/subxt/src/config/polkadot.rs index b9a5e2263a..c26dc79008 100644 --- a/subxt/src/config/polkadot.rs +++ b/subxt/src/config/polkadot.rs @@ -6,10 +6,7 @@ use codec::Encode; -use super::extrinsic_params::{ - BaseExtrinsicParams, - BaseExtrinsicParamsBuilder, -}; +use super::extrinsic_params::{BaseExtrinsicParams, BaseExtrinsicParamsBuilder}; /// Default set of commonly used types by Polkadot nodes. pub type PolkadotConfig = super::WithExtrinsicParams< diff --git a/subxt/src/config/substrate.rs b/subxt/src/config/substrate.rs index 4b56315ef5..4b6530dd2f 100644 --- a/subxt/src/config/substrate.rs +++ b/subxt/src/config/substrate.rs @@ -5,32 +5,14 @@ //! Substrate specific configuration use super::{ - extrinsic_params::{ - BaseExtrinsicParams, - BaseExtrinsicParamsBuilder, - }, - Config, - Hasher, - Header, -}; -use codec::{ - Decode, - Encode, -}; -use serde::{ - Deserialize, - Serialize, + extrinsic_params::{BaseExtrinsicParams, BaseExtrinsicParamsBuilder}, + Config, Hasher, Header, }; +use codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; -pub use crate::utils::{ - AccountId32, - MultiAddress, - MultiSignature, -}; -pub use primitive_types::{ - H256, - U256, -}; +pub use crate::utils::{AccountId32, MultiAddress, MultiSignature}; +pub use primitive_types::{H256, U256}; /// Default set of commonly used types by Substrate runtimes. // Note: We only use this at the type level, so it should be impossible to @@ -137,9 +119,7 @@ where } /// Generic header digest. From `sp_runtime::generic::digest`. -#[derive( - Encode, Decode, Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default, -)] +#[derive(Encode, Decode, Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)] pub struct Digest { /// A list of digest items. pub logs: Vec, @@ -240,9 +220,7 @@ impl Decode for DigestItem { Ok(Self::Seal(vals.0, vals.1)) } DigestItemType::Other => Ok(Self::Other(Decode::decode(input)?)), - DigestItemType::RuntimeEnvironmentUpdated => { - Ok(Self::RuntimeEnvironmentUpdated) - } + DigestItemType::RuntimeEnvironmentUpdated => Ok(Self::RuntimeEnvironmentUpdated), } } } diff --git a/subxt/src/constants/constant_address.rs b/subxt/src/constants/constant_address.rs index 18318ba369..7996a83040 100644 --- a/subxt/src/constants/constant_address.rs +++ b/subxt/src/constants/constant_address.rs @@ -2,10 +2,7 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - dynamic::DecodedValueThunk, - metadata::DecodeWithMetadata, -}; +use crate::{dynamic::DecodedValueThunk, metadata::DecodeWithMetadata}; use std::borrow::Cow; /// This represents a constant address. Anything implementing this trait @@ -94,9 +91,6 @@ impl ConstantAddress for Address { } /// Construct a new dynamic constant lookup. -pub fn dynamic( - pallet_name: impl Into, - constant_name: impl Into, -) -> DynamicAddress { +pub fn dynamic(pallet_name: impl Into, constant_name: impl Into) -> DynamicAddress { DynamicAddress::new(pallet_name, constant_name) } diff --git a/subxt/src/constants/constants_client.rs b/subxt/src/constants/constants_client.rs index 0df7ffd6a6..30ef5067b3 100644 --- a/subxt/src/constants/constants_client.rs +++ b/subxt/src/constants/constants_client.rs @@ -6,10 +6,7 @@ use super::ConstantAddress; use crate::{ client::OfflineClientT, error::Error, - metadata::{ - DecodeWithMetadata, - MetadataError, - }, + metadata::{DecodeWithMetadata, MetadataError}, Config, }; use derivative::Derivative; @@ -37,10 +34,7 @@ impl> ConstantsClient { /// if the address is valid (or if it's not possible to check since the address has no validation hash). /// Return an error if the address was not valid or something went wrong trying to validate it (ie /// the pallet or constant in question do not exist at all). - pub fn validate( - &self, - address: &Address, - ) -> Result<(), Error> { + pub fn validate(&self, address: &Address) -> Result<(), Error> { if let Some(actual_hash) = address.validation_hash() { let expected_hash = self .client @@ -51,7 +45,7 @@ impl> ConstantsClient { address.pallet_name().into(), address.constant_name().into(), ) - .into()) + .into()); } } Ok(()) diff --git a/subxt/src/constants/mod.rs b/subxt/src/constants/mod.rs index cbfd3c2c8e..721c21ae50 100644 --- a/subxt/src/constants/mod.rs +++ b/subxt/src/constants/mod.rs @@ -7,10 +7,5 @@ mod constant_address; mod constants_client; -pub use constant_address::{ - dynamic, - Address, - ConstantAddress, - DynamicAddress, -}; +pub use constant_address::{dynamic, Address, ConstantAddress, DynamicAddress}; pub use constants_client::ConstantsClient; diff --git a/subxt/src/dynamic.rs b/subxt/src/dynamic.rs index 01a036b1fa..d548d14a33 100644 --- a/subxt/src/dynamic.rs +++ b/subxt/src/dynamic.rs @@ -7,10 +7,7 @@ use crate::{ error::Error, - metadata::{ - DecodeWithMetadata, - Metadata, - }, + metadata::{DecodeWithMetadata, Metadata}, }; use scale_decode::DecodeAsType; @@ -29,10 +26,7 @@ pub use crate::tx::dynamic as tx; pub use crate::constants::dynamic as constant; // Lookup storage values dynamically. -pub use crate::storage::{ - dynamic as storage, - dynamic_root as storage_root, -}; +pub use crate::storage::{dynamic as storage, dynamic_root as storage_root}; /// This is the result of making a dynamic request to a node. From this, /// we can return the raw SCALE bytes that we were handed back, or we can diff --git a/subxt/src/error.rs b/subxt/src/error.rs index 227c0835cd..d49c2ad9ad 100644 --- a/subxt/src/error.rs +++ b/subxt/src/error.rs @@ -11,10 +11,7 @@ use scale_info::TypeDef; use std::borrow::Cow; // Re-expose the errors we use from other crates here: -pub use crate::metadata::{ - InvalidMetadataError, - MetadataError, -}; +pub use crate::metadata::{InvalidMetadataError, MetadataError}; pub use scale_decode::Error as DecodeError; pub use scale_encode::Error as EncodeError; @@ -123,7 +120,7 @@ impl DispatchError { tracing::warn!( "Can't decode error: sp_runtime::DispatchError was not found in Metadata" ); - return DispatchError::Other(bytes.into_owned()) + return DispatchError::Other(bytes.into_owned()); } }; @@ -131,7 +128,7 @@ impl DispatchError { Some(ty) => ty, None => { tracing::warn!("Can't decode error: sp_runtime::DispatchError type ID doesn't resolve to a known type"); - return DispatchError::Other(bytes.into_owned()) + return DispatchError::Other(bytes.into_owned()); } }; @@ -141,7 +138,7 @@ impl DispatchError { tracing::warn!( "Can't decode error: sp_runtime::DispatchError type is not a Variant" ); - return DispatchError::Other(bytes.into_owned()) + return DispatchError::Other(bytes.into_owned()); } }; @@ -154,14 +151,14 @@ impl DispatchError { Some(idx) => idx, None => { tracing::warn!("Can't decode error: sp_runtime::DispatchError does not have a 'Module' variant"); - return DispatchError::Other(bytes.into_owned()) + return DispatchError::Other(bytes.into_owned()); } }; // If the error bytes don't correspond to a ModuleError, just return the bytes. // This is perfectly reasonable and expected, so no logging. if bytes[0] != module_variant_idx { - return DispatchError::Other(bytes.into_owned()) + return DispatchError::Other(bytes.into_owned()); } // The remaining bytes are the module error, all being well: @@ -189,7 +186,7 @@ impl DispatchError { Ok(err) => err, Err(_) => { tracing::warn!("Can't decode error: sp_runtime::DispatchError does not match known formats"); - return DispatchError::Other(bytes.to_vec()) + return DispatchError::Other(bytes.to_vec()); } }; CurrentModuleError { @@ -203,7 +200,7 @@ impl DispatchError { Ok(details) => details, Err(_) => { tracing::warn!("Can't decode error: sp_runtime::DispatchError::Module details do not match known information"); - return DispatchError::Other(bytes.to_vec()) + return DispatchError::Other(bytes.to_vec()); } }; @@ -223,9 +220,7 @@ impl DispatchError { #[derive(Clone, Debug, Eq, thiserror::Error, PartialEq)] pub enum BlockError { /// The block - #[error( - "Could not find a block with hash {0} (perhaps it was on a non-finalized fork?)" - )] + #[error("Could not find a block with hash {0} (perhaps it was on a non-finalized fork?)")] BlockHashNotFound(String), } @@ -299,14 +294,10 @@ pub enum StorageAddressError { expected: usize, }, /// Storage lookup requires a type that wasn't found in the metadata. - #[error( - "Storage lookup requires type {0} to exist in the metadata, but it was not found" - )] + #[error("Storage lookup requires type {0} to exist in the metadata, but it was not found")] TypeNotFound(u32), /// This storage entry in the metadata does not have the correct number of hashers to fields. - #[error( - "Storage entry in metadata does not have the correct number of hashers to fields" - )] + #[error("Storage entry in metadata does not have the correct number of hashers to fields")] WrongNumberOfHashers { /// The number of hashers in the metadata for this storage entry. hashers: usize, diff --git a/subxt/src/events/events_client.rs b/subxt/src/events/events_client.rs index c5ea1bab3b..256d50c7bf 100644 --- a/subxt/src/events/events_client.rs +++ b/subxt/src/events/events_client.rs @@ -2,13 +2,7 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - client::OnlineClientT, - error::Error, - events::Events, - rpc::types::StorageKey, - Config, -}; +use crate::{client::OnlineClientT, error::Error, events::Events, rpc::types::StorageKey, Config}; use derivative::Derivative; use std::future::Future; @@ -54,13 +48,11 @@ where // for the latest block and use that. let block_hash = match block_hash { Some(hash) => hash, - None => { - client - .rpc() - .block_hash(None) - .await? - .expect("didn't pass a block number; qed") - } + None => client + .rpc() + .block_hash(None) + .await? + .expect("didn't pass a block number; qed"), }; let event_bytes = get_event_bytes(&client, Some(block_hash)).await?; diff --git a/subxt/src/events/events_type.rs b/subxt/src/events/events_type.rs index 88ba95503e..d14fad46f2 100644 --- a/subxt/src/events/events_type.rs +++ b/subxt/src/events/events_type.rs @@ -4,25 +4,15 @@ //! A representation of a block of events. -use super::{ - Phase, - StaticEvent, -}; +use super::{Phase, StaticEvent}; use crate::{ client::OnlineClientT, error::Error, events::events_client::get_event_bytes, - metadata::{ - DecodeWithMetadata, - EventMetadata, - }, - Config, - Metadata, -}; -use codec::{ - Compact, - Decode, + metadata::{DecodeWithMetadata, EventMetadata}, + Config, Metadata, }; +use codec::{Compact, Decode}; use derivative::Derivative; use std::sync::Arc; @@ -42,11 +32,7 @@ pub struct Events { } impl Events { - pub(crate) fn new( - metadata: Metadata, - block_hash: T::Hash, - event_bytes: Vec, - ) -> Self { + pub(crate) fn new(metadata: Metadata, block_hash: T::Hash, event_bytes: Vec) -> Self { // event_bytes is a SCALE encoded vector of events. So, pluck the // compact encoded length from the front, leaving the remaining bytes // for our iterating to decode. @@ -348,12 +334,11 @@ impl EventDetails { let event_metadata = self.event_metadata(); use scale_decode::DecodeAsFields; - let decoded = - >::decode_as_fields( - bytes, - event_metadata.fields(), - &self.metadata.runtime_metadata().types, - )?; + let decoded = >::decode_as_fields( + bytes, + event_metadata.fields(), + &self.metadata.runtime_metadata().types, + )?; Ok(decoded) } @@ -403,8 +388,7 @@ impl EventDetails { /// the pallet and event enum variants as well as the event fields). A compatible /// type for this is exposed via static codegen as a root level `Event` type. pub fn as_root_event(&self) -> Result { - let pallet_bytes = - &self.all_bytes[self.event_start_idx + 1..self.event_fields_end_idx]; + let pallet_bytes = &self.all_bytes[self.event_start_idx + 1..self.event_fields_end_idx]; let pallet = self.metadata.pallet(self.pallet_name())?; let pallet_event_ty = pallet.event_ty_id().ok_or_else(|| { Error::Metadata(crate::metadata::MetadataError::EventNotFound( @@ -443,24 +427,13 @@ pub trait RootEvent: Sized { #[cfg(test)] pub(crate) mod test_utils { use super::*; - use crate::{ - Config, - SubstrateConfig, - }; + use crate::{Config, SubstrateConfig}; use codec::Encode; use frame_metadata::{ - v14::{ - ExtrinsicMetadata, - PalletEventMetadata, - PalletMetadata, - RuntimeMetadataV14, - }, + v14::{ExtrinsicMetadata, PalletEventMetadata, PalletMetadata, RuntimeMetadataV14}, RuntimeMetadataPrefixed, }; - use scale_info::{ - meta_type, - TypeInfo, - }; + use scale_info::{meta_type, TypeInfo}; use std::convert::TryFrom; /// An "outer" events enum containing exactly one event. @@ -492,7 +465,7 @@ pub(crate) mod test_utils { &mut bytes, pallet_event_ty, metadata, - )?)) + )?)); } panic!("Asked for pallet name '{pallet_name}', which isn't in our test AllEvents type") } @@ -579,12 +552,7 @@ pub(crate) mod test_utils { #[cfg(test)] mod tests { use super::{ - test_utils::{ - event_record, - events, - events_raw, - AllEvents, - }, + test_utils::{event_record, events, events_raw, AllEvents}, *, }; use codec::Encode; @@ -627,13 +595,8 @@ mod tests { let mut actual_bytes = vec![]; for field in actual_fields.into_values() { - scale_value::scale::encode_as_type( - &field, - field.context, - types, - &mut actual_bytes, - ) - .expect("should be able to encode properly"); + scale_value::scale::encode_as_type(&field, field.context, types, &mut actual_bytes) + .expect("should be able to encode properly"); } assert_eq!(actual_bytes, actual.field_bytes()); @@ -656,9 +619,7 @@ mod tests { #[test] fn statically_decode_single_root_event() { - #[derive( - Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType, - )] + #[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType)] enum Event { A(u8, bool, Vec), } @@ -691,9 +652,7 @@ mod tests { #[test] fn statically_decode_single_pallet_event() { - #[derive( - Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType, - )] + #[derive(Clone, Debug, PartialEq, Decode, Encode, TypeInfo, scale_decode::DecodeAsType)] enum Event { A(u8, bool, Vec), } @@ -847,8 +806,7 @@ mod tests { // Encode 2 events: let mut event_bytes = vec![]; event_record(Phase::Initialization, Event::A(1)).encode_to(&mut event_bytes); - event_record(Phase::ApplyExtrinsic(123), Event::B(true)) - .encode_to(&mut event_bytes); + event_record(Phase::ApplyExtrinsic(123), Event::B(true)).encode_to(&mut event_bytes); // Push a few naff bytes to the end (a broken third event): event_bytes.extend_from_slice(&[3, 127, 45, 0, 2]); diff --git a/subxt/src/events/mod.rs b/subxt/src/events/mod.rs index 4499d10751..ed41428de4 100644 --- a/subxt/src/events/mod.rs +++ b/subxt/src/events/mod.rs @@ -9,10 +9,7 @@ mod events_client; mod events_type; -use codec::{ - Decode, - Encode, -}; +use codec::{Decode, Encode}; pub use events_client::EventsClient; pub use events_type::{ EventDetails, diff --git a/subxt/src/lib.rs b/subxt/src/lib.rs index 108a368524..b442e3d7d5 100644 --- a/subxt/src/lib.rs +++ b/subxt/src/lib.rs @@ -144,7 +144,9 @@ pub use subxt_macro::subxt; pub use getrandom as _; #[cfg(all(feature = "jsonrpsee-ws", feature = "jsonrpsee-web"))] -std::compile_error!("Both the features `jsonrpsee-ws` and `jsonrpsee-web` are enabled which are mutually exclusive"); +std::compile_error!( + "Both the features `jsonrpsee-ws` and `jsonrpsee-web` are enabled which are mutually exclusive" +); pub mod blocks; pub mod client; @@ -163,15 +165,8 @@ pub mod utils; // Expose a few of the most common types at root, // but leave most types behind their respective modules. pub use crate::{ - client::{ - OfflineClient, - OnlineClient, - }, - config::{ - Config, - PolkadotConfig, - SubstrateConfig, - }, + client::{OfflineClient, OnlineClient}, + config::{Config, PolkadotConfig, SubstrateConfig}, error::Error, metadata::Metadata, }; diff --git a/subxt/src/metadata/hash_cache.rs b/subxt/src/metadata/hash_cache.rs index 0bee8c739f..16313deb36 100644 --- a/subxt/src/metadata/hash_cache.rs +++ b/subxt/src/metadata/hash_cache.rs @@ -3,10 +3,7 @@ // see LICENSE for license details. use parking_lot::RwLock; -use std::{ - borrow::Cow, - collections::HashMap, -}; +use std::{borrow::Cow, collections::HashMap}; /// A cache with the simple goal of storing 32 byte hashes against pallet+item keys #[derive(Default, Debug)] @@ -17,12 +14,7 @@ pub struct HashCache { impl HashCache { /// get a hash out of the cache by its pallet and item key. If the item doesn't exist, /// run the function provided to obtain a hash to insert (or bail with some error on failure). - pub fn get_or_insert( - &self, - pallet: &str, - item: &str, - f: F, - ) -> Result<[u8; 32], E> + pub fn get_or_insert(&self, pallet: &str, item: &str, f: F) -> Result<[u8; 32], E> where F: FnOnce() -> Result<[u8; 32], E>, { @@ -33,7 +25,7 @@ impl HashCache { .copied(); if let Some(hash) = maybe_hash { - return Ok(hash) + return Ok(hash); } let hash = f()?; diff --git a/subxt/src/metadata/metadata_type.rs b/subxt/src/metadata/metadata_type.rs index 2ae83f31d1..c73013483a 100644 --- a/subxt/src/metadata/metadata_type.rs +++ b/subxt/src/metadata/metadata_type.rs @@ -5,24 +5,12 @@ use super::hash_cache::HashCache; use codec::Error as CodecError; use frame_metadata::{ - PalletConstantMetadata, - RuntimeMetadata, - RuntimeMetadataPrefixed, - RuntimeMetadataV14, - StorageEntryMetadata, - META_RESERVED, + PalletConstantMetadata, RuntimeMetadata, RuntimeMetadataPrefixed, RuntimeMetadataV14, + StorageEntryMetadata, META_RESERVED, }; use parking_lot::RwLock; -use scale_info::{ - form::PortableForm, - PortableRegistry, - Type, -}; -use std::{ - collections::HashMap, - convert::TryFrom, - sync::Arc, -}; +use scale_info::{form::PortableForm, PortableRegistry, Type}; +use std::{collections::HashMap, convert::TryFrom, sync::Arc}; /// Metadata error originated from inspecting the internal representation of the runtime metadata. #[derive(Debug, thiserror::Error, PartialEq, Eq)] @@ -164,82 +152,54 @@ impl Metadata { } /// Obtain the unique hash for a specific storage entry. - pub fn storage_hash( - &self, - pallet: &str, - storage: &str, - ) -> Result<[u8; 32], MetadataError> { + pub fn storage_hash(&self, pallet: &str, storage: &str) -> Result<[u8; 32], MetadataError> { self.inner .cached_storage_hashes .get_or_insert(pallet, storage, || { - subxt_metadata::get_storage_hash(&self.inner.metadata, pallet, storage) - .map_err(|e| { - match e { - subxt_metadata::NotFound::Pallet => { - MetadataError::PalletNotFound - } - subxt_metadata::NotFound::Item => { - MetadataError::StorageNotFound - } - } - }) + subxt_metadata::get_storage_hash(&self.inner.metadata, pallet, storage).map_err( + |e| match e { + subxt_metadata::NotFound::Pallet => MetadataError::PalletNotFound, + subxt_metadata::NotFound::Item => MetadataError::StorageNotFound, + }, + ) }) } /// Obtain the unique hash for a constant. - pub fn constant_hash( - &self, - pallet: &str, - constant: &str, - ) -> Result<[u8; 32], MetadataError> { + pub fn constant_hash(&self, pallet: &str, constant: &str) -> Result<[u8; 32], MetadataError> { self.inner .cached_constant_hashes .get_or_insert(pallet, constant, || { - subxt_metadata::get_constant_hash(&self.inner.metadata, pallet, constant) - .map_err(|e| { - match e { - subxt_metadata::NotFound::Pallet => { - MetadataError::PalletNotFound - } - subxt_metadata::NotFound::Item => { - MetadataError::ConstantNotFound - } - } - }) + subxt_metadata::get_constant_hash(&self.inner.metadata, pallet, constant).map_err( + |e| match e { + subxt_metadata::NotFound::Pallet => MetadataError::PalletNotFound, + subxt_metadata::NotFound::Item => MetadataError::ConstantNotFound, + }, + ) }) } /// Obtain the unique hash for a call. - pub fn call_hash( - &self, - pallet: &str, - function: &str, - ) -> Result<[u8; 32], MetadataError> { + pub fn call_hash(&self, pallet: &str, function: &str) -> Result<[u8; 32], MetadataError> { self.inner .cached_call_hashes .get_or_insert(pallet, function, || { - subxt_metadata::get_call_hash(&self.inner.metadata, pallet, function) - .map_err(|e| { - match e { - subxt_metadata::NotFound::Pallet => { - MetadataError::PalletNotFound - } - subxt_metadata::NotFound::Item => MetadataError::CallNotFound, - } - }) + subxt_metadata::get_call_hash(&self.inner.metadata, pallet, function).map_err(|e| { + match e { + subxt_metadata::NotFound::Pallet => MetadataError::PalletNotFound, + subxt_metadata::NotFound::Item => MetadataError::CallNotFound, + } + }) }) } /// Obtain the unique hash for this metadata. pub fn metadata_hash>(&self, pallets: &[T]) -> [u8; 32] { if let Some(hash) = *self.inner.cached_metadata_hash.read() { - return hash + return hash; } - let hash = subxt_metadata::get_metadata_per_pallet_hash( - self.runtime_metadata(), - pallets, - ); + let hash = subxt_metadata::get_metadata_per_pallet_hash(self.runtime_metadata(), pallets); *self.inner.cached_metadata_hash.write() = Some(hash); hash @@ -292,10 +252,7 @@ impl PalletMetadata { } /// Return [`StorageEntryMetadata`] given some storage key. - pub fn storage( - &self, - key: &str, - ) -> Result<&StorageEntryMetadata, MetadataError> { + pub fn storage(&self, key: &str) -> Result<&StorageEntryMetadata, MetadataError> { self.storage.get(key).ok_or(MetadataError::StorageNotFound) } @@ -411,7 +368,7 @@ impl TryFrom for Metadata { fn try_from(metadata: RuntimeMetadataPrefixed) -> Result { if metadata.0 != META_RESERVED { - return Err(InvalidMetadataError::InvalidPrefix) + return Err(InvalidMetadataError::InvalidPrefix); } let metadata = match metadata.1 { RuntimeMetadata::V14(meta) => meta, @@ -436,24 +393,23 @@ impl TryFrom for Metadata { let call_ty_id = pallet.calls.as_ref().map(|c| c.ty.id()); let event_ty_id = pallet.event.as_ref().map(|e| e.ty.id()); - let call_metadata = - pallet.calls.as_ref().map_or(Ok(HashMap::new()), |call| { - let type_def_variant = get_type_def_variant(call.ty.id())?; - let call_indexes = type_def_variant - .variants() - .iter() - .map(|v| { - ( - v.name().clone(), - CallMetadata { - call_index: v.index(), - fields: v.fields().to_vec(), - }, - ) - }) - .collect(); - Ok(call_indexes) - })?; + let call_metadata = pallet.calls.as_ref().map_or(Ok(HashMap::new()), |call| { + let type_def_variant = get_type_def_variant(call.ty.id())?; + let call_indexes = type_def_variant + .variants() + .iter() + .map(|v| { + ( + v.name().clone(), + CallMetadata { + call_index: v.index(), + fields: v.fields().to_vec(), + }, + ) + }) + .collect(); + Ok(call_indexes) + })?; let storage = pallet.storage.as_ref().map_or(HashMap::new(), |storage| { storage @@ -548,15 +504,9 @@ impl TryFrom for Metadata { mod tests { use super::*; use frame_metadata::{ - ExtrinsicMetadata, - PalletStorageMetadata, - StorageEntryModifier, - StorageEntryType, - }; - use scale_info::{ - meta_type, - TypeInfo, + ExtrinsicMetadata, PalletStorageMetadata, StorageEntryModifier, StorageEntryType, }; + use scale_info::{meta_type, TypeInfo}; fn load_metadata() -> Metadata { #[allow(dead_code)] diff --git a/subxt/src/metadata/mod.rs b/subxt/src/metadata/mod.rs index 66490a258f..034c96a523 100644 --- a/subxt/src/metadata/mod.rs +++ b/subxt/src/metadata/mod.rs @@ -12,15 +12,7 @@ mod metadata_type; pub use metadata_location::MetadataLocation; pub use metadata_type::{ - ErrorMetadata, - EventMetadata, - InvalidMetadataError, - Metadata, - MetadataError, - PalletMetadata, + ErrorMetadata, EventMetadata, InvalidMetadataError, Metadata, MetadataError, PalletMetadata, }; -pub use decode_encode_traits::{ - DecodeWithMetadata, - EncodeWithMetadata, -}; +pub use decode_encode_traits::{DecodeWithMetadata, EncodeWithMetadata}; diff --git a/subxt/src/rpc/jsonrpsee_impl.rs b/subxt/src/rpc/jsonrpsee_impl.rs index e04a7828b8..3d8c0192cd 100644 --- a/subxt/src/rpc/jsonrpsee_impl.rs +++ b/subxt/src/rpc/jsonrpsee_impl.rs @@ -2,24 +2,12 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use super::{ - RpcClientT, - RpcFuture, - RpcSubscription, -}; +use super::{RpcClientT, RpcFuture, RpcSubscription}; use crate::error::RpcError; -use futures::stream::{ - StreamExt, - TryStreamExt, -}; +use futures::stream::{StreamExt, TryStreamExt}; use jsonrpsee::{ core::{ - client::{ - Client, - ClientT, - SubscriptionClientT, - SubscriptionKind, - }, + client::{Client, ClientT, SubscriptionClientT, SubscriptionKind}, traits::ToRpcParams, Error as JsonRpseeError, }, diff --git a/subxt/src/rpc/mod.rs b/subxt/src/rpc/mod.rs index 843c99e3ba..5d91d611dd 100644 --- a/subxt/src/rpc/mod.rs +++ b/subxt/src/rpc/mod.rs @@ -58,17 +58,7 @@ pub mod types; pub use rpc::*; pub use rpc_client_t::{ - RawValue, - RpcClientT, - RpcFuture, - RpcSubscription, - RpcSubscriptionId, - RpcSubscriptionStream, + RawValue, RpcClientT, RpcFuture, RpcSubscription, RpcSubscriptionId, RpcSubscriptionStream, }; -pub use rpc_client::{ - rpc_params, - RpcClient, - RpcParams, - Subscription, -}; +pub use rpc_client::{rpc_params, RpcClient, RpcParams, Subscription}; diff --git a/subxt/src/rpc/rpc.rs b/subxt/src/rpc/rpc.rs index 09ee3a125c..9dc79c226f 100644 --- a/subxt/src/rpc/rpc.rs +++ b/subxt/src/rpc/rpc.rs @@ -33,25 +33,11 @@ use super::{ rpc_params, - types::{ - self, - ChainHeadEvent, - FollowEvent, - }, - RpcClient, - RpcClientT, - Subscription, -}; -use crate::{ - error::Error, - utils::PhantomDataSendSync, - Config, - Metadata, -}; -use codec::{ - Decode, - Encode, + types::{self, ChainHeadEvent, FollowEvent}, + RpcClient, RpcClientT, Subscription, }; +use crate::{error::Error, utils::PhantomDataSendSync, Config, Metadata}; +use codec::{Decode, Encode}; use frame_metadata::RuntimeMetadataPrefixed; use serde::Serialize; use std::sync::Arc; @@ -222,10 +208,7 @@ impl Rpc { } /// Get a header - pub async fn header( - &self, - hash: Option, - ) -> Result, Error> { + pub async fn header(&self, hash: Option) -> Result, Error> { let params = rpc_params![hash]; let header = self.client.request("chain_getHeader", params).await?; Ok(header) @@ -300,9 +283,7 @@ impl Rpc { } /// Subscribe to all new best block headers. - pub async fn subscribe_best_block_headers( - &self, - ) -> Result, Error> { + pub async fn subscribe_best_block_headers(&self) -> Result, Error> { let subscription = self .client .subscribe( @@ -319,9 +300,7 @@ impl Rpc { } /// Subscribe to all new block headers. - pub async fn subscribe_all_block_headers( - &self, - ) -> Result, Error> { + pub async fn subscribe_all_block_headers(&self) -> Result, Error> { let subscription = self .client .subscribe( @@ -374,10 +353,7 @@ impl Rpc { } /// Create and submit an extrinsic and return corresponding Hash if successful - pub async fn submit_extrinsic( - &self, - extrinsic: X, - ) -> Result { + pub async fn submit_extrinsic(&self, extrinsic: X) -> Result { let bytes: types::Bytes = extrinsic.encode().into(); let params = rpc_params![bytes]; let xt_hash = self @@ -448,10 +424,7 @@ impl Rpc { /// `session_keys` is the SCALE encoded session keys object from the runtime. /// /// Returns `true` iff all private keys could be found. - pub async fn has_session_keys( - &self, - session_keys: types::Bytes, - ) -> Result { + pub async fn has_session_keys(&self, session_keys: types::Bytes) -> Result { let params = rpc_params![session_keys]; self.client.request("author_hasSessionKeys", params).await } @@ -459,11 +432,7 @@ impl Rpc { /// Checks if the keystore has private keys for the given public key and key type. /// /// Returns `true` if a private key could be found. - pub async fn has_key( - &self, - public_key: types::Bytes, - key_type: String, - ) -> Result { + pub async fn has_key(&self, public_key: types::Bytes, key_type: String) -> Result { let params = rpc_params![public_key, key_type]; self.client.request("author_hasKey", params).await } @@ -477,8 +446,7 @@ impl Rpc { at: Option, ) -> Result { let params = rpc_params![to_hex(encoded_signed), at]; - let result_bytes: types::Bytes = - self.client.request("system_dryRun", params).await?; + let result_bytes: types::Bytes = self.client.request("system_dryRun", params).await?; Ok(types::decode_dry_run_result(&mut &*result_bytes.0)?) } diff --git a/subxt/src/rpc/rpc_client.rs b/subxt/src/rpc/rpc_client.rs index 2d49dc1926..a23483258c 100644 --- a/subxt/src/rpc/rpc_client.rs +++ b/subxt/src/rpc/rpc_client.rs @@ -2,26 +2,12 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use super::{ - RpcClientT, - RpcSubscription, - RpcSubscriptionId, -}; +use super::{RpcClientT, RpcSubscription, RpcSubscriptionId}; use crate::error::Error; -use futures::{ - Stream, - StreamExt, -}; -use serde::{ - de::DeserializeOwned, - Serialize, -}; +use futures::{Stream, StreamExt}; +use serde::{de::DeserializeOwned, Serialize}; use serde_json::value::RawValue; -use std::{ - pin::Pin, - sync::Arc, - task::Poll, -}; +use std::{pin::Pin, sync::Arc, task::Poll}; /// A concrete wrapper around an [`RpcClientT`] which exposes the udnerlying interface via some /// higher level methods that make it a little easier to work with. @@ -214,9 +200,8 @@ impl Stream for Subscription { // Decode the inner RawValue to the type we're expecting and map // any errors to the right shape: let res = res.map(|r| { - r.map_err(|e| e.into()).and_then(|raw_val| { - serde_json::from_str(raw_val.get()).map_err(|e| e.into()) - }) + r.map_err(|e| e.into()) + .and_then(|raw_val| serde_json::from_str(raw_val.get()).map_err(|e| e.into())) }); Poll::Ready(res) diff --git a/subxt/src/rpc/rpc_client_t.rs b/subxt/src/rpc/rpc_client_t.rs index 7e1ed0f665..e68515cf6e 100644 --- a/subxt/src/rpc/rpc_client_t.rs +++ b/subxt/src/rpc/rpc_client_t.rs @@ -4,10 +4,7 @@ use crate::error::RpcError; use futures::Stream; -use std::{ - future::Future, - pin::Pin, -}; +use std::{future::Future, pin::Pin}; // Re-exporting for simplicity since it's used a bunch in the trait definition. pub use serde_json::value::RawValue; @@ -56,8 +53,7 @@ pub trait RpcClientT: Send + Sync + 'static { } /// A boxed future that is returned from the [`RpcClientT`] methods. -pub type RpcFuture<'a, T> = - Pin> + Send + 'a>>; +pub type RpcFuture<'a, T> = Pin> + Send + 'a>>; /// The RPC subscription returned from [`RpcClientT`]'s `subscription` method. pub struct RpcSubscription { diff --git a/subxt/src/rpc/types.rs b/subxt/src/rpc/types.rs index a1a333f29b..a4ffacf352 100644 --- a/subxt/src/rpc/types.rs +++ b/subxt/src/rpc/types.rs @@ -5,15 +5,9 @@ //! Types sent to/from the Substrate RPC interface. use crate::Config; -use codec::{ - Decode, - Encode, -}; +use codec::{Decode, Encode}; use primitive_types::U256; -use serde::{ - Deserialize, - Serialize, -}; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; // Subscription types are returned from some calls, so expose it with the rest of the returned types. @@ -334,17 +328,7 @@ pub struct BlockStats { /// Storage key. #[derive( - Serialize, - Deserialize, - Hash, - PartialOrd, - Ord, - PartialEq, - Eq, - Clone, - Encode, - Decode, - Debug, + Serialize, Deserialize, Hash, PartialOrd, Ord, PartialEq, Eq, Clone, Encode, Decode, Debug, )] pub struct StorageKey(#[serde(with = "impl_serde::serialize")] pub Vec); impl AsRef<[u8]> for StorageKey { @@ -355,17 +339,7 @@ impl AsRef<[u8]> for StorageKey { /// Storage data. #[derive( - Serialize, - Deserialize, - Hash, - PartialOrd, - Ord, - PartialEq, - Eq, - Clone, - Encode, - Decode, - Debug, + Serialize, Deserialize, Hash, PartialOrd, Ord, PartialEq, Eq, Clone, Encode, Decode, Debug, )] pub struct StorageData(#[serde(with = "impl_serde::serialize")] pub Vec); impl AsRef<[u8]> for StorageData { @@ -709,14 +683,10 @@ impl From> for TransactionEventIR { TransactionEventIR::NonBlock(TransactionEventNonBlockIR::Validated) } TransactionEvent::Broadcasted(event) => { - TransactionEventIR::NonBlock(TransactionEventNonBlockIR::Broadcasted( - event, - )) + TransactionEventIR::NonBlock(TransactionEventNonBlockIR::Broadcasted(event)) } TransactionEvent::BestChainBlockIncluded(event) => { - TransactionEventIR::Block( - TransactionEventBlockIR::BestChainBlockIncluded(event), - ) + TransactionEventIR::Block(TransactionEventBlockIR::BestChainBlockIncluded(event)) } TransactionEvent::Finalized(event) => { TransactionEventIR::Block(TransactionEventBlockIR::Finalized(event)) @@ -737,33 +707,21 @@ impl From> for TransactionEventIR { impl From> for TransactionEvent { fn from(value: TransactionEventIR) -> Self { match value { - TransactionEventIR::NonBlock(status) => { - match status { - TransactionEventNonBlockIR::Validated => TransactionEvent::Validated, - TransactionEventNonBlockIR::Broadcasted(event) => { - TransactionEvent::Broadcasted(event) - } - TransactionEventNonBlockIR::Error(event) => { - TransactionEvent::Error(event) - } - TransactionEventNonBlockIR::Invalid(event) => { - TransactionEvent::Invalid(event) - } - TransactionEventNonBlockIR::Dropped(event) => { - TransactionEvent::Dropped(event) - } + TransactionEventIR::NonBlock(status) => match status { + TransactionEventNonBlockIR::Validated => TransactionEvent::Validated, + TransactionEventNonBlockIR::Broadcasted(event) => { + TransactionEvent::Broadcasted(event) } - } - TransactionEventIR::Block(block) => { - match block { - TransactionEventBlockIR::Finalized(event) => { - TransactionEvent::Finalized(event) - } - TransactionEventBlockIR::BestChainBlockIncluded(event) => { - TransactionEvent::BestChainBlockIncluded(event) - } + TransactionEventNonBlockIR::Error(event) => TransactionEvent::Error(event), + TransactionEventNonBlockIR::Invalid(event) => TransactionEvent::Invalid(event), + TransactionEventNonBlockIR::Dropped(event) => TransactionEvent::Dropped(event), + }, + TransactionEventIR::Block(block) => match block { + TransactionEventBlockIR::Finalized(event) => TransactionEvent::Finalized(event), + TransactionEventBlockIR::BestChainBlockIncluded(event) => { + TransactionEvent::BestChainBlockIncluded(event) } - } + }, } } } @@ -773,9 +731,7 @@ mod as_string { use super::*; use serde::Deserializer; - pub fn deserialize<'de, D: Deserializer<'de>>( - deserializer: D, - ) -> Result { + pub fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result { String::deserialize(deserializer)? .parse() .map_err(|e| serde::de::Error::custom(format!("Parsing failed: {e}"))) @@ -789,10 +745,7 @@ mod test { /// A util function to assert the result of serialization and deserialization is the same. pub fn assert_deser(s: &str, expected: T) where - T: std::fmt::Debug - + serde::ser::Serialize - + serde::de::DeserializeOwned - + PartialEq, + T: std::fmt::Debug + serde::ser::Serialize + serde::de::DeserializeOwned + PartialEq, { assert_eq!(serde_json::from_str::(s).unwrap(), expected); assert_eq!(serde_json::to_string(&expected).unwrap(), s); @@ -820,10 +773,8 @@ mod test { ..Default::default() }; - let json = serde_json::to_string(&substrate_runtime_version) - .expect("serializing failed"); - let val: RuntimeVersion = - serde_json::from_str(&json).expect("deserializing failed"); + let json = serde_json::to_string(&substrate_runtime_version).expect("serializing failed"); + let val: RuntimeVersion = serde_json::from_str(&json).expect("deserializing failed"); // We ignore any other properties. assert_eq!(val.spec_version, 123); @@ -873,8 +824,7 @@ mod test { InvalidTransaction as SpInvalidTransaction, TransactionValidityError as SpTransactionValidityError, }, - ApplyExtrinsicResult as SpApplyExtrinsicResult, - DispatchError as SpDispatchError, + ApplyExtrinsicResult as SpApplyExtrinsicResult, DispatchError as SpDispatchError, }; let pairs = vec![ @@ -915,8 +865,7 @@ mod test { #[test] fn storage_types_are_substrate_compatible() { use sp_core::storage::{ - StorageChangeSet as SpStorageChangeSet, - StorageData as SpStorageData, + StorageChangeSet as SpStorageChangeSet, StorageData as SpStorageData, StorageKey as SpStorageKey, }; diff --git a/subxt/src/runtime_api/runtime_client.rs b/subxt/src/runtime_api/runtime_client.rs index 745bd4f820..fa5949344a 100644 --- a/subxt/src/runtime_api/runtime_client.rs +++ b/subxt/src/runtime_api/runtime_client.rs @@ -4,16 +4,9 @@ use super::runtime_types::RuntimeApi; -use crate::{ - client::OnlineClientT, - error::Error, - Config, -}; +use crate::{client::OnlineClientT, error::Error, Config}; use derivative::Derivative; -use std::{ - future::Future, - marker::PhantomData, -}; +use std::{future::Future, marker::PhantomData}; /// Execute runtime API calls. #[derive(Derivative)] @@ -51,13 +44,9 @@ where // for the latest block and use that. let block_hash = match block_hash { Some(hash) => hash, - None => { - client - .rpc() - .block_hash(None) - .await? - .expect("substrate RPC returns the best block when no block number is provided; qed") - } + None => client.rpc().block_hash(None).await?.expect( + "substrate RPC returns the best block when no block number is provided; qed", + ), }; Ok(RuntimeApi::new(client, block_hash)) diff --git a/subxt/src/runtime_api/runtime_types.rs b/subxt/src/runtime_api/runtime_types.rs index a449c0e377..d9ddafc1d9 100644 --- a/subxt/src/runtime_api/runtime_types.rs +++ b/subxt/src/runtime_api/runtime_types.rs @@ -2,16 +2,9 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - client::OnlineClientT, - error::Error, - Config, -}; +use crate::{client::OnlineClientT, error::Error, Config}; use derivative::Derivative; -use std::{ - future::Future, - marker::PhantomData, -}; +use std::{future::Future, marker::PhantomData}; /// Execute runtime API calls. #[derive(Derivative)] diff --git a/subxt/src/storage/mod.rs b/subxt/src/storage/mod.rs index b4b2c85424..a71013dd13 100644 --- a/subxt/src/storage/mod.rs +++ b/subxt/src/storage/mod.rs @@ -12,10 +12,7 @@ pub mod utils; pub use storage_client::StorageClient; -pub use storage_type::{ - KeyIter, - Storage, -}; +pub use storage_type::{KeyIter, Storage}; // Re-export as this is used in the public API in this module: pub use crate::rpc::types::StorageKey; @@ -24,22 +21,10 @@ pub use crate::rpc::types::StorageKey; /// entry lives and how to properly decode it. pub mod address { pub use super::storage_address::{ - dynamic, - dynamic_root, - Address, - DynamicAddress, - StaticStorageMapKey, - StorageAddress, - Yes, + dynamic, dynamic_root, Address, DynamicAddress, StaticStorageMapKey, StorageAddress, Yes, }; } // For consistency with other modules, also expose // the basic address stuff at the root of the module. -pub use storage_address::{ - dynamic, - dynamic_root, - Address, - DynamicAddress, - StorageAddress, -}; +pub use storage_address::{dynamic, dynamic_root, Address, DynamicAddress, StorageAddress}; diff --git a/subxt/src/storage/storage_address.rs b/subxt/src/storage/storage_address.rs index 3d69464832..ffb1c41927 100644 --- a/subxt/src/storage/storage_address.rs +++ b/subxt/src/storage/storage_address.rs @@ -3,24 +3,11 @@ // see LICENSE for license details. use crate::{ - dynamic::{ - DecodedValueThunk, - Value, - }, - error::{ - Error, - StorageAddressError, - }, - metadata::{ - DecodeWithMetadata, - EncodeWithMetadata, - Metadata, - }, -}; -use frame_metadata::{ - StorageEntryType, - StorageHasher, + dynamic::{DecodedValueThunk, Value}, + error::{Error, StorageAddressError}, + metadata::{DecodeWithMetadata, EncodeWithMetadata, Metadata}, }; +use frame_metadata::{StorageEntryType, StorageHasher}; use scale_info::TypeDef; use std::borrow::Cow; @@ -47,11 +34,7 @@ pub trait StorageAddress { /// Output the non-prefix bytes; that is, any additional bytes that need /// to be appended to the key to dig into maps. - fn append_entry_bytes( - &self, - metadata: &Metadata, - bytes: &mut Vec, - ) -> Result<(), Error>; + fn append_entry_bytes(&self, metadata: &Metadata, bytes: &mut Vec) -> Result<(), Error>; /// An optional hash which, if present, will be checked against /// the node metadata to confirm that the return type matches what @@ -77,8 +60,7 @@ pub struct Address { /// A typical storage address constructed at runtime rather than via the `subxt` macro; this /// has no restriction on what it can be used for (since we don't statically know). -pub type DynamicAddress = - Address; +pub type DynamicAddress = Address; impl Address @@ -154,11 +136,7 @@ where &self.entry_name } - fn append_entry_bytes( - &self, - metadata: &Metadata, - bytes: &mut Vec, - ) -> Result<(), Error> { + fn append_entry_bytes(&self, metadata: &Metadata, bytes: &mut Vec) -> Result<(), Error> { let pallet = metadata.pallet(&self.pallet_name)?; let storage = pallet.storage(&self.entry_name)?; @@ -193,7 +171,7 @@ where expected: type_ids.len(), actual: self.storage_entry_keys.len(), } - .into()) + .into()); } if hashers.len() == 1 { diff --git a/subxt/src/storage/storage_client.rs b/subxt/src/storage/storage_client.rs index 8a7b82185f..ee97bef1fb 100644 --- a/subxt/src/storage/storage_client.rs +++ b/subxt/src/storage/storage_client.rs @@ -3,27 +3,17 @@ // see LICENSE for license details. use super::{ - storage_type::{ - validate_storage_address, - Storage, - }, - utils, - StorageAddress, + storage_type::{validate_storage_address, Storage}, + utils, StorageAddress, }; use crate::{ - client::{ - OfflineClientT, - OnlineClientT, - }, + client::{OfflineClientT, OnlineClientT}, error::Error, Config, }; use derivative::Derivative; -use std::{ - future::Future, - marker::PhantomData, -}; +use std::{future::Future, marker::PhantomData}; /// Query the runtime storage. #[derive(Derivative)] @@ -52,19 +42,13 @@ where /// if the address is valid (or if it's not possible to check since the address has no validation hash). /// Return an error if the address was not valid or something went wrong trying to validate it (ie /// the pallet or storage entry in question do not exist at all). - pub fn validate( - &self, - address: &Address, - ) -> Result<(), Error> { + pub fn validate(&self, address: &Address) -> Result<(), Error> { validate_storage_address(address, &self.client.metadata()) } /// Convert some storage address into the raw bytes that would be submitted to the node in order /// to retrieve the entries at the root of the associated address. - pub fn address_root_bytes( - &self, - address: &Address, - ) -> Vec { + pub fn address_root_bytes(&self, address: &Address) -> Vec { utils::storage_address_root_bytes(address) } @@ -99,13 +83,11 @@ where // for the latest block and use that. let block_hash = match block_hash { Some(hash) => hash, - None => { - client - .rpc() - .block_hash(None) - .await? - .expect("didn't pass a block number; qed") - } + None => client + .rpc() + .block_hash(None) + .await? + .expect("didn't pass a block number; qed"), }; Ok(Storage::new(client, block_hash)) diff --git a/subxt/src/storage/storage_type.rs b/subxt/src/storage/storage_type.rs index 201f754976..108a95e730 100644 --- a/subxt/src/storage/storage_type.rs +++ b/subxt/src/storage/storage_type.rs @@ -2,30 +2,18 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use super::storage_address::{ - StorageAddress, - Yes, -}; +use super::storage_address::{StorageAddress, Yes}; use crate::{ client::OnlineClientT, error::Error, - metadata::{ - DecodeWithMetadata, - Metadata, - }, - rpc::types::{ - StorageData, - StorageKey, - }, + metadata::{DecodeWithMetadata, Metadata}, + rpc::types::{StorageData, StorageKey}, Config, }; use derivative::Derivative; use frame_metadata::StorageEntryType; use scale_info::form::PortableForm; -use std::{ - future::Future, - marker::PhantomData, -}; +use std::{future::Future, marker::PhantomData}; /// Query the runtime storage. #[derive(Derivative)] @@ -150,15 +138,10 @@ where // We have to dig into metadata already, so no point using the optimised `decode_storage_with_metadata` call. let pallet_metadata = metadata.pallet(pallet_name)?; let storage_metadata = pallet_metadata.storage(storage_name)?; - let return_ty_id = - return_type_from_storage_entry_type(&storage_metadata.ty); + let return_ty_id = return_type_from_storage_entry_type(&storage_metadata.ty); let bytes = &mut &storage_metadata.default[..]; - let val = Address::Target::decode_with_metadata( - bytes, - return_ty_id, - &metadata, - )?; + let val = Address::Target::decode_with_metadata(bytes, return_ty_id, &metadata)?; Ok(val) } } @@ -237,11 +220,8 @@ where // Look up the return type for flexible decoding. Do this once here to avoid // potentially doing it every iteration if we used `decode_storage_with_metadata` // in the iterator. - let return_type_id = lookup_storage_return_type( - &metadata, - address.pallet_name(), - address.entry_name(), - )?; + let return_type_id = + lookup_storage_return_type(&metadata, address.pallet_name(), address.entry_name())?; // The root pallet/entry bytes for this storage entry: let address_root_bytes = super::utils::storage_address_root_bytes(&address); @@ -289,7 +269,7 @@ where self.return_type_id, &self.metadata, )?; - return Ok(Some((k, val))) + return Ok(Some((k, val))); } else { let start_key = self.start_key.take(); let keys = self @@ -302,7 +282,7 @@ where .await?; if keys.is_empty() { - return Ok(None) + return Ok(None); } self.start_key = keys.last().cloned(); @@ -350,13 +330,11 @@ fn validate_storage( }; match expected_hash == hash { true => Ok(()), - false => { - Err(crate::error::MetadataError::IncompatibleStorageMetadata( - pallet_name.into(), - storage_name.into(), - ) - .into()) - } + false => Err(crate::error::MetadataError::IncompatibleStorageMetadata( + pallet_name.into(), + storage_name.into(), + ) + .into()), } } diff --git a/subxt/src/storage/utils.rs b/subxt/src/storage/utils.rs index a776aa65f4..bcc290e4b6 100644 --- a/subxt/src/storage/utils.rs +++ b/subxt/src/storage/utils.rs @@ -7,10 +7,7 @@ //! the trait itself. use super::StorageAddress; -use crate::{ - error::Error, - metadata::Metadata, -}; +use crate::{error::Error, metadata::Metadata}; /// Return the root of a given [`StorageAddress`]: hash the pallet name and entry name /// and append those bytes to the output. @@ -35,9 +32,7 @@ pub(crate) fn storage_address_bytes( } /// Outputs a vector containing the bytes written by [`write_storage_address_root_bytes`]. -pub(crate) fn storage_address_root_bytes( - addr: &Address, -) -> Vec { +pub(crate) fn storage_address_root_bytes(addr: &Address) -> Vec { let mut bytes = Vec::new(); write_storage_address_root_bytes(addr, &mut bytes); bytes diff --git a/subxt/src/tx/mod.rs b/subxt/src/tx/mod.rs index 6ded187f09..037708f461 100644 --- a/subxt/src/tx/mod.rs +++ b/subxt/src/tx/mod.rs @@ -21,20 +21,7 @@ pub use self::signer::PairSigner; pub use self::{ signer::Signer, - tx_client::{ - SubmittableExtrinsic, - TxClient, - }, - tx_payload::{ - dynamic, - BoxedPayload, - DynamicPayload, - Payload, - TxPayload, - }, - tx_progress::{ - TxInBlock, - TxProgress, - TxStatus, - }, + tx_client::{SubmittableExtrinsic, TxClient}, + tx_payload::{dynamic, BoxedPayload, DynamicPayload, Payload, TxPayload}, + tx_progress::{TxInBlock, TxProgress, TxStatus}, }; diff --git a/subxt/src/tx/signer.rs b/subxt/src/tx/signer.rs index 8a9583ad44..21cffbd2c7 100644 --- a/subxt/src/tx/signer.rs +++ b/subxt/src/tx/signer.rs @@ -35,12 +35,8 @@ mod pair_signer { use crate::Config; use sp_core::Pair as PairT; use sp_runtime::{ - traits::{ - IdentifyAccount, - Verify, - }, - AccountId32 as SpAccountId32, - MultiSignature as SpMultiSignature, + traits::{IdentifyAccount, Verify}, + AccountId32 as SpAccountId32, MultiSignature as SpMultiSignature, }; /// A [`Signer`] implementation that can be constructed from an [`sp_core::Pair`]. @@ -62,8 +58,8 @@ mod pair_signer { { /// Creates a new [`Signer`] from an [`sp_core::Pair`]. pub fn new(signer: Pair) -> Self { - let account_id = ::Signer::from(signer.public()) - .into_account(); + let account_id = + ::Signer::from(signer.public()).into_account(); Self { account_id: account_id.into(), signer, diff --git a/subxt/src/tx/tx_client.rs b/subxt/src/tx/tx_client.rs index edcc6b1eb4..de3ab4e406 100644 --- a/subxt/src/tx/tx_client.rs +++ b/subxt/src/tx/tx_client.rs @@ -4,29 +4,13 @@ use super::TxPayload; use crate::{ - client::{ - OfflineClientT, - OnlineClientT, - }, - config::{ - Config, - ExtrinsicParams, - Hasher, - }, + client::{OfflineClientT, OnlineClientT}, + config::{Config, ExtrinsicParams, Hasher}, error::Error, - tx::{ - Signer as SignerT, - TxProgress, - }, - utils::{ - Encoded, - PhantomDataSendSync, - }, -}; -use codec::{ - Compact, - Encode, + tx::{Signer as SignerT, TxProgress}, + utils::{Encoded, PhantomDataSendSync}, }; +use codec::{Compact, Encode}; use derivative::Derivative; use std::borrow::Cow; @@ -62,14 +46,13 @@ impl> TxClient { { if let Some(details) = call.validation_details() { let metadata = self.client.metadata(); - let expected_hash = - metadata.call_hash(details.pallet_name, details.call_name)?; + let expected_hash = metadata.call_hash(details.pallet_name, details.call_name)?; if details.hash != expected_hash { return Err(crate::metadata::MetadataError::IncompatibleCallMetadata( details.pallet_name.into(), details.call_name.into(), ) - .into()) + .into()); } } Ok(()) @@ -87,10 +70,7 @@ impl> TxClient { } /// Creates an unsigned extrinsic without submitting it. - pub fn create_unsigned( - &self, - call: &Call, - ) -> Result, Error> + pub fn create_unsigned(&self, call: &Call) -> Result, Error> where Call: TxPayload, { @@ -107,8 +87,7 @@ impl> TxClient { call.encode_call_data_to(&self.client.metadata(), &mut encoded_inner)?; // now, prefix byte length: let len = Compact( - u32::try_from(encoded_inner.len()) - .expect("extrinsic size expected to be <4GB"), + u32::try_from(encoded_inner.len()).expect("extrinsic size expected to be <4GB"), ); let mut encoded = Vec::new(); len.encode_to(&mut encoded); @@ -193,10 +172,7 @@ where C: OnlineClientT, { // Get the next account nonce to use. - async fn next_account_nonce( - &self, - account_id: &T::AccountId, - ) -> Result { + async fn next_account_nonce(&self, account_id: &T::AccountId) -> Result { self.client .rpc() .system_account_next_index(account_id) @@ -399,8 +375,7 @@ where encoded_inner.extend(&self.call_data); // now, prefix byte length: let len = Compact( - u32::try_from(encoded_inner.len()) - .expect("extrinsic size expected to be <4GB"), + u32::try_from(encoded_inner.len()).expect("extrinsic size expected to be <4GB"), ); let mut encoded = Vec::new(); len.encode_to(&mut encoded); diff --git a/subxt/src/tx/tx_payload.rs b/subxt/src/tx/tx_payload.rs index 79d4ef83fe..c66aa76e0d 100644 --- a/subxt/src/tx/tx_payload.rs +++ b/subxt/src/tx/tx_payload.rs @@ -5,32 +5,17 @@ //! This module contains the trait and types used to represent //! transactions that can be submitted. -use crate::{ - dynamic::Value, - error::Error, - metadata::Metadata, -}; +use crate::{dynamic::Value, error::Error, metadata::Metadata}; use codec::Encode; use scale_encode::EncodeAsFields; -use scale_value::{ - Composite, - ValueDef, - Variant, -}; -use std::{ - borrow::Cow, - sync::Arc, -}; +use scale_value::{Composite, ValueDef, Variant}; +use std::{borrow::Cow, sync::Arc}; /// This represents a transaction payload that can be submitted /// to a node. pub trait TxPayload { /// Encode call data to the provided output. - fn encode_call_data_to( - &self, - metadata: &Metadata, - out: &mut Vec, - ) -> Result<(), Error>; + fn encode_call_data_to(&self, metadata: &Metadata, out: &mut Vec) -> Result<(), Error>; /// Encode call data and return the output. This is a convenience /// wrapper around [`TxPayload::encode_call_data_to`]. @@ -151,11 +136,7 @@ impl Payload> { } impl TxPayload for Payload { - fn encode_call_data_to( - &self, - metadata: &Metadata, - out: &mut Vec, - ) -> Result<(), Error> { + fn encode_call_data_to(&self, metadata: &Metadata, out: &mut Vec) -> Result<(), Error> { let pallet = metadata.pallet(&self.pallet_name)?; let call = pallet.call(&self.call_name)?; @@ -171,12 +152,10 @@ impl TxPayload for Payload { } fn validation_details(&self) -> Option> { - self.validation_hash.map(|hash| { - ValidationDetails { - pallet_name: &self.pallet_name, - call_name: &self.call_name, - hash, - } + self.validation_hash.map(|hash| ValidationDetails { + pallet_name: &self.pallet_name, + call_name: &self.call_name, + hash, }) } } diff --git a/subxt/src/tx/tx_progress.rs b/subxt/src/tx/tx_progress.rs index c6b0179f1d..2fa1af119d 100644 --- a/subxt/src/tx/tx_progress.rs +++ b/subxt/src/tx/tx_progress.rs @@ -8,24 +8,13 @@ use std::task::Poll; use crate::{ client::OnlineClientT, - error::{ - DispatchError, - Error, - RpcError, - TransactionError, - }, + error::{DispatchError, Error, RpcError, TransactionError}, events::EventsClient, - rpc::types::{ - Subscription, - SubstrateTxStatus, - }, + rpc::types::{Subscription, SubstrateTxStatus}, Config, }; use derivative::Derivative; -use futures::{ - Stream, - StreamExt, -}; +use futures::{Stream, StreamExt}; /// This struct represents a subscription to the progress of some transaction. #[derive(Derivative)] @@ -163,11 +152,7 @@ impl> Stream for TxProgress { SubstrateTxStatus::Ready => TxStatus::Ready, SubstrateTxStatus::Broadcast(peers) => TxStatus::Broadcast(peers), SubstrateTxStatus::InBlock(hash) => { - TxStatus::InBlock(TxInBlock::new( - hash, - self.ext_hash, - self.client.clone(), - )) + TxStatus::InBlock(TxInBlock::new(hash, self.ext_hash, self.client.clone())) } SubstrateTxStatus::Retracted(hash) => TxStatus::Retracted(hash), SubstrateTxStatus::Usurped(hash) => TxStatus::Usurped(hash), @@ -188,11 +173,7 @@ impl> Stream for TxProgress { } SubstrateTxStatus::Finalized(hash) => { self.sub = None; - TxStatus::Finalized(TxInBlock::new( - hash, - self.ext_hash, - self.client.clone(), - )) + TxStatus::Finalized(TxInBlock::new(hash, self.ext_hash, self.client.clone())) } } }) @@ -335,9 +316,7 @@ impl> TxInBlock { /// /// **Note:** This has to download block details from the node and decode events /// from them. - pub async fn wait_for_success( - &self, - ) -> Result, Error> { + pub async fn wait_for_success(&self) -> Result, Error> { let events = self.fetch_events().await?; // Try to find any errors; return the first one we encounter. @@ -346,7 +325,7 @@ impl> TxInBlock { if ev.pallet_name() == "System" && ev.variant_name() == "ExtrinsicFailed" { let dispatch_error = DispatchError::decode_from(ev.field_bytes(), &self.client.metadata()); - return Err(dispatch_error.into()) + return Err(dispatch_error.into()); } } @@ -367,7 +346,9 @@ impl> TxInBlock { .await? .ok_or(Error::Transaction(TransactionError::BlockHashNotFound))?; - let extrinsic_idx = block.block.extrinsics + let extrinsic_idx = block + .block + .extrinsics .iter() .position(|ext| { use crate::config::Hasher; diff --git a/subxt/src/utils/account_id.rs b/subxt/src/utils/account_id.rs index 2d8423937d..3ecd3ca7ff 100644 --- a/subxt/src/utils/account_id.rs +++ b/subxt/src/utils/account_id.rs @@ -6,14 +6,8 @@ //! This doesn't contain much functionality itself, but is easy to convert to/from an `sp_core::AccountId32` //! for instance, to gain functionality without forcing a dependency on Substrate crates here. -use codec::{ - Decode, - Encode, -}; -use serde::{ - Deserialize, - Serialize, -}; +use codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; /// A 32-byte cryptographic identifier. This is a simplified version of Substrate's /// `sp_core::crypto::AccountId32`. To obtain more functionality, convert this into @@ -80,7 +74,7 @@ impl AccountId32 { use base58::FromBase58; let data = s.from_base58().map_err(|_| FromSs58Error::BadBase58)?; if data.len() < 2 { - return Err(FromSs58Error::BadLength) + return Err(FromSs58Error::BadLength); } let prefix_len = match data[0] { 0..=63 => 1, @@ -88,14 +82,13 @@ impl AccountId32 { _ => return Err(FromSs58Error::InvalidPrefix), }; if data.len() != prefix_len + body_len + CHECKSUM_LEN { - return Err(FromSs58Error::BadLength) + return Err(FromSs58Error::BadLength); } let hash = ss58hash(&data[0..body_len + prefix_len]); let checksum = &hash[0..CHECKSUM_LEN]; - if data[body_len + prefix_len..body_len + prefix_len + CHECKSUM_LEN] != *checksum - { + if data[body_len + prefix_len..body_len + prefix_len + CHECKSUM_LEN] != *checksum { // Invalid checksum. - return Err(FromSs58Error::InvalidChecksum) + return Err(FromSs58Error::InvalidChecksum); } let result = data[prefix_len..body_len + prefix_len] @@ -121,10 +114,7 @@ pub enum FromSs58Error { // We do this just to get a checksum to help verify the validity of the address in to_ss58check fn ss58hash(data: &[u8]) -> Vec { - use blake2::{ - Blake2b512, - Digest, - }; + use blake2::{Blake2b512, Digest}; const PREFIX: &[u8] = b"SS58PRE"; let mut ctx = Blake2b512::new(); ctx.update(PREFIX); diff --git a/subxt/src/utils/bits.rs b/subxt/src/utils/bits.rs index 60e63c56fd..fc7ccbe93b 100644 --- a/subxt/src/utils/bits.rs +++ b/subxt/src/utils/bits.rs @@ -4,16 +4,9 @@ //! Generic `scale_bits` over `bitvec`-like `BitOrder` and `BitFormat` types. -use codec::{ - Compact, - Input, -}; +use codec::{Compact, Input}; use scale_bits::{ - scale::format::{ - Format, - OrderFormat, - StoreFormat, - }, + scale::format::{Format, OrderFormat, StoreFormat}, Bits, }; use scale_decode::IntoVisitor; @@ -109,7 +102,7 @@ impl codec::Decode for DecodedBits>::decode(input)?; // Otherwise it is impossible to store it on 32bit machine. if bits > ARCH32BIT_BITSLICE_MAX_BITS { - return Err("Attempt to decode a BitVec with too many bits".into()) + return Err("Attempt to decode a BitVec with too many bits".into()); } // NOTE: Replace with `bits.div_ceil(Store::BITS)` if `int_roundings` is stabilised let elements = (bits / Store::BITS) + u32::from(bits % Store::BITS != 0); @@ -125,8 +118,7 @@ impl codec::Decode for DecodedBits())?; + let decoder = scale_bits::decode_using_format_from(&storage, bit_format::())?; let bits = decoder.collect::, _>>()?; let bits = Bits::from_iter(bits); @@ -172,11 +164,9 @@ impl scale_decode::Visitor for DecodedBitsVisitor { types, Bits::into_visitor(), ) - .map(|bits| { - DecodedBits { - bits, - _marker: PhantomData, - } + .map(|bits| DecodedBits { + bits, + _marker: PhantomData, }); scale_decode::visitor::DecodeAsTypeResult::Decoded(res) } diff --git a/subxt/src/utils/mod.rs b/subxt/src/utils/mod.rs index 8a4bd3823a..6dfbe25121 100644 --- a/subxt/src/utils/mod.rs +++ b/subxt/src/utils/mod.rs @@ -10,10 +10,7 @@ mod multi_address; mod multi_signature; mod wrapper_opaque; -use codec::{ - Decode, - Encode, -}; +use codec::{Decode, Encode}; use derivative::Derivative; pub use account_id::AccountId32; @@ -23,11 +20,7 @@ pub use wrapper_opaque::WrapperKeepOpaque; // Used in codegen #[doc(hidden)] -pub use primitive_types::{ - H160, - H256, - H512, -}; +pub use primitive_types::{H160, H256, H512}; /// Wraps an already encoded byte vector, prevents being encoded as a raw byte vector as part of /// the transaction payload diff --git a/subxt/src/utils/multi_address.rs b/subxt/src/utils/multi_address.rs index c1975b2e85..802183a174 100644 --- a/subxt/src/utils/multi_address.rs +++ b/subxt/src/utils/multi_address.rs @@ -6,10 +6,7 @@ //! This doesn't contain much functionality itself, but is easy to convert to/from an `sp_runtime::MultiAddress` //! for instance, to gain functionality without forcing a dependency on Substrate crates here. -use codec::{ - Decode, - Encode, -}; +use codec::{Decode, Encode}; /// A multi-format address wrapper for on-chain accounts. This is a simplified version of Substrate's /// `sp_runtime::MultiAddress`. To obtain more functionality, convert this into that type (this conversion @@ -48,10 +45,7 @@ impl From for MultiAddress From for MultiAddress { fn from(value: sp_runtime::AccountId32) -> Self { diff --git a/subxt/src/utils/multi_signature.rs b/subxt/src/utils/multi_signature.rs index 482bcc1f6a..8b663bf277 100644 --- a/subxt/src/utils/multi_signature.rs +++ b/subxt/src/utils/multi_signature.rs @@ -6,10 +6,7 @@ //! This doesn't contain much functionality itself, but is easy to convert to/from an `sp_runtime::MultiSignature` //! for instance, to gain functionality without forcing a dependency on Substrate crates here. -use codec::{ - Decode, - Encode, -}; +use codec::{Decode, Encode}; /// Signature container that can store known signature types. This is a simplified version of /// `sp_runtime::MultiSignature`. To obtain more functionality, convert this into that type. diff --git a/subxt/src/utils/wrapper_opaque.rs b/subxt/src/utils/wrapper_opaque.rs index 2bf81954ef..7a1efa1cde 100644 --- a/subxt/src/utils/wrapper_opaque.rs +++ b/subxt/src/utils/wrapper_opaque.rs @@ -3,17 +3,9 @@ // see LICENSE for license details. use super::PhantomDataSendSync; -use codec::{ - Compact, - Decode, - DecodeAll, - Encode, -}; +use codec::{Compact, Decode, DecodeAll, Encode}; use derivative::Derivative; -use scale_decode::{ - IntoVisitor, - Visitor, -}; +use scale_decode::{IntoVisitor, Visitor}; use scale_encode::EncodeAsType; /// A wrapper for any type `T` which implement encode/decode in a way compatible with `Vec`. @@ -88,11 +80,7 @@ impl EncodeAsType for WrapperKeepOpaque { types: &scale_info::PortableRegistry, out: &mut Vec, ) -> Result<(), scale_encode::Error> { - use scale_encode::error::{ - Error, - ErrorKind, - Kind, - }; + use scale_encode::error::{Error, ErrorKind, Kind}; let Some(ty) = types.resolve(type_id) else { return Err(Error::new(ErrorKind::TypeNotFound(type_id))) @@ -111,7 +99,7 @@ impl EncodeAsType for WrapperKeepOpaque { return Err(Error::new(ErrorKind::WrongShape { actual: Kind::Struct, expected: type_id, - })) + })); } // Just blat the bytes out. @@ -130,21 +118,18 @@ impl Visitor for WrapperKeepOpaqueVisitor { value: &mut scale_decode::visitor::types::Composite<'scale, 'info>, _type_id: scale_decode::visitor::TypeId, ) -> Result, Self::Error> { - use scale_decode::error::{ - Error, - ErrorKind, - }; + use scale_decode::error::{Error, ErrorKind}; if value.path().ident().as_deref() != Some("WrapperKeepOpaque") { return Err(Error::new(ErrorKind::Custom( "Type to decode is not 'WrapperTypeKeepOpaque'".into(), - ))) + ))); } if value.remaining() != 2 { return Err(Error::new(ErrorKind::WrongLength { actual_len: value.remaining(), expected_len: 2, - })) + })); } // The field to decode is a compact len followed by bytes. Decode the length, then grab the bytes. @@ -184,13 +169,7 @@ mod test { impl scale_info::TypeInfo for WrapperKeepOpaque { type Identity = Self; fn type_info() -> scale_info::Type { - use scale_info::{ - build::Fields, - meta_type, - Path, - Type, - TypeParameter, - }; + use scale_info::{build::Fields, meta_type, Path, Type, TypeParameter}; Type::builder() .path(Path::new("WrapperKeepOpaque", module_path!())) @@ -204,8 +183,7 @@ mod test { } /// Given a type definition, return type ID and registry representing it. - fn make_type( - ) -> (u32, scale_info::PortableRegistry) { + fn make_type() -> (u32, scale_info::PortableRegistry) { let m = scale_info::MetaType::new::(); let mut types = scale_info::Registry::new(); let id = types.register_type(&m); @@ -239,8 +217,7 @@ mod test { .expect("decode-as-type decodes"); let decode_scale_codec_bytes = &mut &*scale_codec_encoded; - let decoded_scale_codec = - T::decode(decode_scale_codec_bytes).expect("scale-codec decodes"); + let decoded_scale_codec = T::decode(decode_scale_codec_bytes).expect("scale-codec decodes"); assert!( decode_as_type_bytes.is_empty(), diff --git a/testing/integration-tests/src/blocks/mod.rs b/testing/integration-tests/src/blocks/mod.rs index a3730752fd..ae25594d76 100644 --- a/testing/integration-tests/src/blocks/mod.rs +++ b/testing/integration-tests/src/blocks/mod.rs @@ -3,10 +3,7 @@ // see LICENSE for license details. use crate::test_context; -use codec::{ - Compact, - Decode, -}; +use codec::{Compact, Decode}; use frame_metadata::RuntimeMetadataPrefixed; use futures::StreamExt; diff --git a/testing/integration-tests/src/client/mod.rs b/testing/integration-tests/src/client/mod.rs index 925f909674..7076967481 100644 --- a/testing/integration-tests/src/client/mod.rs +++ b/testing/integration-tests/src/client/mod.rs @@ -3,31 +3,16 @@ // see LICENSE for license details. use crate::{ - pair_signer, - test_context, - test_context_with, - utils::{ - node_runtime, - wait_for_blocks, - }, + pair_signer, test_context, test_context_with, + utils::{node_runtime, wait_for_blocks}, }; use assert_matches::assert_matches; -use codec::{ - Compact, - Decode, - Encode, -}; +use codec::{Compact, Decode, Encode}; use frame_metadata::RuntimeMetadataPrefixed; use sp_core::storage::well_known_keys; use sp_keyring::AccountKeyring; use subxt::{ - rpc::types::{ - ChainHeadEvent, - FollowEvent, - Initialized, - RuntimeEvent, - RuntimeVersionEvent, - }, + rpc::types::{ChainHeadEvent, FollowEvent, Initialized, RuntimeEvent, RuntimeVersionEvent}, tx::Signer, utils::AccountId32, }; @@ -260,8 +245,7 @@ async fn external_signing() { // Sign it (possibly externally). let signature = alice.sign(&signer_payload); // Use this to build a signed extrinsic. - let extrinsic = - partial_extrinsic.sign_with_address_and_signature(&alice.address(), &signature); + let extrinsic = partial_extrinsic.sign_with_address_and_signature(&alice.address(), &signature); // And now submit it. extrinsic @@ -417,8 +401,7 @@ async fn chainhead_unstable_body() { // Expected block's extrinsics scale encoded and hex encoded. let body = api.rpc().block(Some(hash)).await.unwrap().unwrap(); - let extrinsics: Vec> = - body.block.extrinsics.into_iter().map(|ext| ext.0).collect(); + let extrinsics: Vec> = body.block.extrinsics.into_iter().map(|ext| ext.0).collect(); let expected = format!("0x{}", hex::encode(extrinsics.encode())); assert_matches!(event, diff --git a/testing/integration-tests/src/codegen/codegen_documentation.rs b/testing/integration-tests/src/codegen/codegen_documentation.rs index 4080dc9edf..0fb20a55a7 100644 --- a/testing/integration-tests/src/codegen/codegen_documentation.rs +++ b/testing/integration-tests/src/codegen/codegen_documentation.rs @@ -3,12 +3,7 @@ // see LICENSE for license details. use regex::Regex; -use subxt_codegen::{ - CratePath, - DerivesRegistry, - RuntimeGenerator, - TypeSubstitutes, -}; +use subxt_codegen::{CratePath, DerivesRegistry, RuntimeGenerator, TypeSubstitutes}; fn load_test_metadata() -> frame_metadata::RuntimeMetadataPrefixed { let bytes = test_runtime::METADATA; @@ -162,7 +157,8 @@ fn check_root_attrs_preserved() { let doc_str_loc = generated_code .find("Some root level documentation") .expect("root docs should be preserved"); - let attr_loc = generated_code.find("some_root_attribute") // '#' is space separated in generated output. + let attr_loc = generated_code + .find("some_root_attribute") // '#' is space separated in generated output. .expect("root attr should be preserved"); let mod_start = generated_code .find("pub mod api") diff --git a/testing/integration-tests/src/frame/balances.rs b/testing/integration-tests/src/frame/balances.rs index be3f6c6235..b1877283be 100644 --- a/testing/integration-tests/src/frame/balances.rs +++ b/testing/integration-tests/src/frame/balances.rs @@ -3,21 +3,12 @@ // see LICENSE for license details. use crate::{ - node_runtime::{ - self, - balances, - runtime_types, - system, - }, - pair_signer, - test_context, + node_runtime::{self, balances, runtime_types, system}, + pair_signer, test_context, }; use codec::Decode; use sp_keyring::AccountKeyring; -use subxt::utils::{ - AccountId32, - MultiAddress, -}; +use subxt::utils::{AccountId32, MultiAddress}; #[tokio::test] async fn tx_basic_transfer() -> Result<(), subxt::Error> { @@ -87,11 +78,7 @@ async fn tx_basic_transfer() -> Result<(), subxt::Error> { #[tokio::test] async fn tx_dynamic_transfer() -> Result<(), subxt::Error> { - use subxt::ext::scale_value::{ - At, - Composite, - Value, - }; + use subxt::ext::scale_value::{At, Composite, Value}; let alice = pair_signer(AccountKeyring::Alice.pair()); let bob = pair_signer(AccountKeyring::Bob.pair()); @@ -229,8 +216,7 @@ async fn multiple_transfers_work_nonce_incremented() -> Result<(), subxt::Error> .balances() .transfer(bob_address.clone(), 10_000); for _ in 0..3 { - api - .tx() + api.tx() .sign_and_submit_then_watch_default(&tx, &alice) .await? .wait_for_in_block() // Don't need to wait for finalization; this is quicker. diff --git a/testing/integration-tests/src/frame/contracts.rs b/testing/integration-tests/src/frame/contracts.rs index ae7bd1b079..1a6c3a6e33 100644 --- a/testing/integration-tests/src/frame/contracts.rs +++ b/testing/integration-tests/src/frame/contracts.rs @@ -8,26 +8,16 @@ use crate::{ node_runtime::{ self, contracts::events, - runtime_types::{ - pallet_contracts::wasm::Determinism, - sp_weights::weight_v2::Weight, - }, + runtime_types::{pallet_contracts::wasm::Determinism, sp_weights::weight_v2::Weight}, system, }, - test_context, - TestContext, + test_context, TestContext, }; use sp_core::sr25519::Pair; use subxt::{ - tx::{ - PairSigner, - TxProgress, - }, + tx::{PairSigner, TxProgress}, utils::MultiAddress, - Config, - Error, - OnlineClient, - SubstrateConfig, + Config, Error, OnlineClient, SubstrateConfig, }; struct ContractsTestContext { @@ -113,9 +103,7 @@ impl ContractsTestContext { .ok_or_else(|| Error::Other("Failed to find a Instantiated event".into()))?; let _extrinsic_success = events .find_first::()? - .ok_or_else(|| { - Error::Other("Failed to find a ExtrinsicSuccess event".into()) - })?; + .ok_or_else(|| Error::Other("Failed to find a ExtrinsicSuccess event".into()))?; tracing::info!(" Block hash: {:?}", events.block_hash()); tracing::info!(" Code hash: {:?}", code_stored.code_hash); diff --git a/testing/integration-tests/src/frame/staking.rs b/testing/integration-tests/src/frame/staking.rs index a85857213c..b722f2733a 100644 --- a/testing/integration-tests/src/frame/staking.rs +++ b/testing/integration-tests/src/frame/staking.rs @@ -6,32 +6,21 @@ use crate::{ node_runtime::{ self, runtime_types::{ - pallet_staking::{ - RewardDestination, - ValidatorPrefs, - }, + pallet_staking::{RewardDestination, ValidatorPrefs}, sp_arithmetic::per_things::Perbill, }, staking, }, - pair_signer, - test_context, + pair_signer, test_context, }; use assert_matches::assert_matches; -use sp_core::{ - sr25519, - Pair, -}; +use sp_core::{sr25519, Pair}; use sp_keyring::AccountKeyring; -use subxt::error::{ - DispatchError, - Error, -}; +use subxt::error::{DispatchError, Error}; /// Helper function to generate a crypto pair from seed fn get_from_seed(seed: &str) -> sr25519::Pair { - sr25519::Pair::from_string(&format!("//{seed}"), None) - .expect("static values are valid; qed") + sr25519::Pair::from_string(&format!("//{seed}"), None).expect("static values are valid; qed") } fn default_validator_prefs() -> ValidatorPrefs { diff --git a/testing/integration-tests/src/frame/sudo.rs b/testing/integration-tests/src/frame/sudo.rs index 9047f56075..257ec46266 100644 --- a/testing/integration-tests/src/frame/sudo.rs +++ b/testing/integration-tests/src/frame/sudo.rs @@ -5,14 +5,10 @@ use crate::{ node_runtime::{ self, - runtime_types::{ - self, - sp_weights::weight_v2::Weight, - }, + runtime_types::{self, sp_weights::weight_v2::Weight}, sudo, }, - pair_signer, - test_context, + pair_signer, test_context, }; use sp_keyring::AccountKeyring; diff --git a/testing/integration-tests/src/frame/system.rs b/testing/integration-tests/src/frame/system.rs index d5d9b5ac7d..9c149c5557 100644 --- a/testing/integration-tests/src/frame/system.rs +++ b/testing/integration-tests/src/frame/system.rs @@ -3,12 +3,8 @@ // see LICENSE for license details. use crate::{ - node_runtime::{ - self, - system, - }, - pair_signer, - test_context, + node_runtime::{self, system}, + pair_signer, test_context, }; use assert_matches::assert_matches; use sp_keyring::AccountKeyring; diff --git a/testing/integration-tests/src/frame/timestamp.rs b/testing/integration-tests/src/frame/timestamp.rs index a365ded117..115d9f620f 100644 --- a/testing/integration-tests/src/frame/timestamp.rs +++ b/testing/integration-tests/src/frame/timestamp.rs @@ -2,10 +2,7 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - node_runtime, - test_context, -}; +use crate::{node_runtime, test_context}; #[tokio::test] async fn storage_get_current_timestamp() { diff --git a/testing/integration-tests/src/metadata/validation.rs b/testing/integration-tests/src/metadata/validation.rs index 5d8efcb4ea..1e29f5c74d 100644 --- a/testing/integration-tests/src/metadata/validation.rs +++ b/testing/integration-tests/src/metadata/validation.rs @@ -2,37 +2,17 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - node_runtime, - test_context, - TestContext, -}; +use crate::{node_runtime, test_context, TestContext}; use frame_metadata::{ - ExtrinsicMetadata, - PalletCallMetadata, - PalletMetadata, - PalletStorageMetadata, - RuntimeMetadataPrefixed, - RuntimeMetadataV14, - StorageEntryMetadata, - StorageEntryModifier, + ExtrinsicMetadata, PalletCallMetadata, PalletMetadata, PalletStorageMetadata, + RuntimeMetadataPrefixed, RuntimeMetadataV14, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, }; use scale_info::{ - build::{ - Fields, - Variants, - }, - meta_type, - Path, - Type, - TypeInfo, -}; -use subxt::{ - Metadata, - OfflineClient, - SubstrateConfig, + build::{Fields, Variants}, + meta_type, Path, Type, TypeInfo, }; +use subxt::{Metadata, OfflineClient, SubstrateConfig}; async fn metadata_to_api( metadata: RuntimeMetadataV14, @@ -147,9 +127,7 @@ async fn calls_check() { Variants::new() .variant("unbond", |v| { v.index(0).fields(Fields::named().field(|f| { - f.compact::() - .name("value") - .type_name("BalanceOf") + f.compact::().name("value").type_name("BalanceOf") })) }) .variant("withdraw_unbonded", |v| { diff --git a/testing/integration-tests/src/storage/mod.rs b/testing/integration-tests/src/storage/mod.rs index 508da662f2..0c6a5939a8 100644 --- a/testing/integration-tests/src/storage/mod.rs +++ b/testing/integration-tests/src/storage/mod.rs @@ -2,12 +2,7 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use crate::{ - node_runtime, - pair_signer, - test_context, - utils::wait_for_blocks, -}; +use crate::{node_runtime, pair_signer, test_context, utils::wait_for_blocks}; use sp_keyring::AccountKeyring; use subxt::utils::AccountId32; diff --git a/testing/integration-tests/src/utils/context.rs b/testing/integration-tests/src/utils/context.rs index 1f39aaa56a..980295269f 100644 --- a/testing/integration-tests/src/utils/context.rs +++ b/testing/integration-tests/src/utils/context.rs @@ -2,17 +2,11 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -pub(crate) use crate::{ - node_runtime, - TestNodeProcess, -}; +pub(crate) use crate::{node_runtime, TestNodeProcess}; use sp_core::sr25519::Pair; use sp_keyring::AccountKeyring; -use subxt::{ - tx::PairSigner, - SubstrateConfig, -}; +use subxt::{tx::PairSigner, SubstrateConfig}; /// substrate node should be installed on the $PATH const SUBSTRATE_NODE_PATH: &str = "substrate"; @@ -20,8 +14,10 @@ const SUBSTRATE_NODE_PATH: &str = "substrate"; pub async fn test_context_with(key: AccountKeyring) -> TestContext { let path = std::env::var("SUBSTRATE_NODE_PATH").unwrap_or_else(|_| { if which::which(SUBSTRATE_NODE_PATH).is_err() { - panic!("A substrate binary should be installed on your path for integration tests. \ - See https://github.com/paritytech/subxt/tree/master#integration-testing") + panic!( + "A substrate binary should be installed on your path for integration tests. \ + See https://github.com/paritytech/subxt/tree/master#integration-testing" + ) } SUBSTRATE_NODE_PATH.to_string() }); diff --git a/testing/integration-tests/src/utils/node_proc.rs b/testing/integration-tests/src/utils/node_proc.rs index 824950feae..717e99aefd 100644 --- a/testing/integration-tests/src/utils/node_proc.rs +++ b/testing/integration-tests/src/utils/node_proc.rs @@ -4,21 +4,11 @@ use sp_keyring::AccountKeyring; use std::{ - ffi::{ - OsStr, - OsString, - }, - io::{ - BufRead, - BufReader, - Read, - }, + ffi::{OsStr, OsString}, + io::{BufRead, BufReader, Read}, process, }; -use subxt::{ - Config, - OnlineClient, -}; +use subxt::{Config, OnlineClient}; /// Spawn a local substrate node for testing subxt. pub struct TestNodeProcess { @@ -53,7 +43,7 @@ where if let Err(err) = self.proc.kill() { let err = format!("Error killing node process {}: {}", self.proc.id(), err); tracing::error!("{}", err); - return Err(err) + return Err(err); } Ok(()) } @@ -143,24 +133,21 @@ fn find_substrate_port_from_output(r: impl Read + Send + 'static) -> u16 { BufReader::new(r) .lines() .find_map(|line| { - let line = - line.expect("failed to obtain next line from stdout for port discovery"); + let line = line.expect("failed to obtain next line from stdout for port discovery"); // does the line contain our port (we expect this specific output from substrate). let line_end = line .rsplit_once("Listening for new connections on 127.0.0.1:") - .or_else(|| { - line.rsplit_once("Running JSON-RPC WS server: addr=127.0.0.1:") - }) + .or_else(|| line.rsplit_once("Running JSON-RPC WS server: addr=127.0.0.1:")) .map(|(_, port_str)| port_str)?; // trim non-numeric chars from the end of the port part of the line. let port_str = line_end.trim_end_matches(|b: char| !b.is_ascii_digit()); // expect to have a number here (the chars after '127.0.0.1:') and parse them into a u16. - let port_num = port_str.parse().unwrap_or_else(|_| { - panic!("valid port expected for log line, got '{port_str}'") - }); + let port_num = port_str + .parse() + .unwrap_or_else(|_| panic!("valid port expected for log line, got '{port_str}'")); Some(port_num) }) diff --git a/testing/integration-tests/src/utils/wait_for_blocks.rs b/testing/integration-tests/src/utils/wait_for_blocks.rs index d17ae33076..f827c4f06a 100644 --- a/testing/integration-tests/src/utils/wait_for_blocks.rs +++ b/testing/integration-tests/src/utils/wait_for_blocks.rs @@ -2,10 +2,7 @@ // This file is dual-licensed as Apache-2.0 or GPL-3.0. // see LICENSE for license details. -use subxt::{ - client::OnlineClientT, - Config, -}; +use subxt::{client::OnlineClientT, Config}; /// Wait for blocks to be produced before running tests. Waiting for two blocks /// (the genesis block and another one) seems to be enough to allow tests diff --git a/testing/test-runtime/build.rs b/testing/test-runtime/build.rs index f5048b02e1..c86bc0e86a 100644 --- a/testing/test-runtime/build.rs +++ b/testing/test-runtime/build.rs @@ -3,17 +3,12 @@ // see LICENSE for license details. use std::{ - env, - fs, + env, fs, net::TcpListener, - ops::{ - Deref, - DerefMut, - }, + ops::{Deref, DerefMut}, path::Path, process::Command, - thread, - time, + thread, time, }; static SUBSTRATE_BIN_ENV_VAR: &str = "SUBSTRATE_NODE_PATH"; @@ -25,8 +20,7 @@ async fn main() { async fn run() { // Select substrate binary to run based on env var. - let substrate_bin = - env::var(SUBSTRATE_BIN_ENV_VAR).unwrap_or_else(|_| "substrate".to_owned()); + let substrate_bin = env::var(SUBSTRATE_BIN_ENV_VAR).unwrap_or_else(|_| "substrate".to_owned()); // Run binary. let port = next_open_port().expect("Cannot spawn substrate: no available ports"); @@ -38,8 +32,10 @@ async fn run() { let mut cmd = match cmd { Ok(cmd) => KillOnDrop(cmd), Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => { - panic!("A substrate binary should be installed on your path for testing purposes. \ - See https://github.com/paritytech/subxt/tree/master#integration-testing") + panic!( + "A substrate binary should be installed on your path for testing purposes. \ + See https://github.com/paritytech/subxt/tree/master#integration-testing" + ) } Err(e) => { panic!("Cannot spawn substrate command '{substrate_bin}': {e}") @@ -67,7 +63,7 @@ async fn run() { match res { Ok(res) => { let _ = cmd.kill(); - break res + break res; } _ => { thread::sleep(time::Duration::from_secs(1 << retries)); @@ -98,8 +94,7 @@ async fn run() { .expect("Path to metadata should be stringifiable") ); let runtime_path = Path::new(&out_dir).join("runtime.rs"); - fs::write(runtime_path, runtime_api_contents) - .expect("Couldn't write runtime rust output"); + fs::write(runtime_path, runtime_api_contents).expect("Couldn't write runtime rust output"); let substrate_path = which::which(substrate_bin).expect("Cannot resolve path to substrate binary"); @@ -155,26 +150,14 @@ impl Drop for KillOnDrop { // Use jsonrpsee to obtain metadata from the node. mod client { pub use jsonrpsee::{ - client_transport::ws::{ - InvalidUri, - Receiver, - Sender, - Uri, - WsTransportClientBuilder, - }, + client_transport::ws::{InvalidUri, Receiver, Sender, Uri, WsTransportClientBuilder}, core::{ - client::{ - Client, - ClientBuilder, - }, + client::{Client, ClientBuilder}, Error, }, }; - pub use jsonrpsee::core::{ - client::ClientT, - rpc_params, - }; + pub use jsonrpsee::core::{client::ClientT, rpc_params}; /// Build WS RPC client from URL pub async fn build(url: &str) -> Result { diff --git a/testing/ui-tests/src/dispatch_errors.rs b/testing/ui-tests/src/dispatch_errors.rs index 4492d128b6..e8035c89db 100644 --- a/testing/ui-tests/src/dispatch_errors.rs +++ b/testing/ui-tests/src/dispatch_errors.rs @@ -3,11 +3,7 @@ // see LICENSE for license details. use crate::utils::{ - dispatch_error::{ - ArrayDispatchError, - LegacyDispatchError, - NamedFieldDispatchError, - }, + dispatch_error::{ArrayDispatchError, LegacyDispatchError, NamedFieldDispatchError}, generate_metadata_from_pallets_custom_dispatch_error, }; use frame_metadata::RuntimeMetadataPrefixed; @@ -21,7 +17,5 @@ pub fn metadata_legacy_dispatch_error() -> RuntimeMetadataPrefixed { } pub fn metadata_named_field_dispatch_error() -> RuntimeMetadataPrefixed { - generate_metadata_from_pallets_custom_dispatch_error::( - vec![], - ) + generate_metadata_from_pallets_custom_dispatch_error::(vec![]) } diff --git a/testing/ui-tests/src/storage.rs b/testing/ui-tests/src/storage.rs index 639d64db25..b6586ffd21 100644 --- a/testing/ui-tests/src/storage.rs +++ b/testing/ui-tests/src/storage.rs @@ -3,10 +3,7 @@ // see LICENSE for license details. use frame_metadata::{ - RuntimeMetadataPrefixed, - StorageEntryMetadata, - StorageEntryModifier, - StorageEntryType, + RuntimeMetadataPrefixed, StorageEntryMetadata, StorageEntryModifier, StorageEntryType, }; use scale_info::meta_type; diff --git a/testing/ui-tests/src/utils/dispatch_error.rs b/testing/ui-tests/src/utils/dispatch_error.rs index 7e9cdb6fcd..72b9af0aad 100644 --- a/testing/ui-tests/src/utils/dispatch_error.rs +++ b/testing/ui-tests/src/utils/dispatch_error.rs @@ -3,13 +3,8 @@ // see LICENSE for license details. use scale_info::{ - build::{ - Fields, - Variants, - }, - Path, - Type, - TypeInfo, + build::{Fields, Variants}, + Path, Type, TypeInfo, }; /// See the `ModuleErrorType` in `subxt_codegen` for more info on the different DispatchError diff --git a/testing/ui-tests/src/utils/mod.rs b/testing/ui-tests/src/utils/mod.rs index 79d42c647b..7b4a145192 100644 --- a/testing/ui-tests/src/utils/mod.rs +++ b/testing/ui-tests/src/utils/mod.rs @@ -6,27 +6,17 @@ pub mod dispatch_error; mod metadata_test_runner; use frame_metadata::{ - v14::RuntimeMetadataV14, - ExtrinsicMetadata, - PalletMetadata, - PalletStorageMetadata, - RuntimeMetadataPrefixed, - StorageEntryMetadata, -}; -use scale_info::{ - meta_type, - IntoPortable, - TypeInfo, + v14::RuntimeMetadataV14, ExtrinsicMetadata, PalletMetadata, PalletStorageMetadata, + RuntimeMetadataPrefixed, StorageEntryMetadata, }; +use scale_info::{meta_type, IntoPortable, TypeInfo}; pub use metadata_test_runner::MetadataTestRunner; /// Given some pallet metadata, generate a [`RuntimeMetadataPrefixed`] struct. /// We default to a useless extrinsic type, and register a fake `DispatchError` /// type matching the generic type param provided. -pub fn generate_metadata_from_pallets_custom_dispatch_error< - DispatchError: TypeInfo + 'static, ->( +pub fn generate_metadata_from_pallets_custom_dispatch_error( pallets: Vec, ) -> RuntimeMetadataPrefixed { // We don't care about the extrinsic type. @@ -69,12 +59,10 @@ pub fn generate_metadata_from_pallets_custom_dispatch_error< /// Given some pallet metadata, generate a [`RuntimeMetadataPrefixed`] struct. /// We default to a useless extrinsic type, and register a fake `DispatchError` /// type so that codegen is happy with the metadata generated. -pub fn generate_metadata_from_pallets( - pallets: Vec, -) -> RuntimeMetadataPrefixed { - generate_metadata_from_pallets_custom_dispatch_error::< - dispatch_error::ArrayDispatchError, - >(pallets) +pub fn generate_metadata_from_pallets(pallets: Vec) -> RuntimeMetadataPrefixed { + generate_metadata_from_pallets_custom_dispatch_error::( + pallets, + ) } /// Given some storage entries, generate a [`RuntimeMetadataPrefixed`] struct.