Skip to content
This repository has been archived by the owner on Oct 19, 2024. It is now read-only.

Commit

Permalink
fix: always treat abi type structs as tuples (#417)
Browse files Browse the repository at this point in the history
* fix: always treat abi type structs as tuples

* fix: keep decoding for single field structs

* feat: unify event and struct encoding
  • Loading branch information
mattsse authored Sep 2, 2021
1 parent 664ccfe commit 8b5f4bf
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 23 deletions.
25 changes: 2 additions & 23 deletions ethers-contract/ethers-contract-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,7 @@ fn derive_decode_from_log_impl(
.filter(|f| !f.is_indexed())
.map(|f| param_type_quote(&f.param.kind));

let data_types_init = quote! {let data_types = ::std::vec![#( #data_types ),*];};
let data_types_init = quote! {let data_types = [#( #data_types ),*];};

// decode
let (signature_check, flat_topics_init, topic_tokens_len_check) = if event.anonymous {
Expand Down Expand Up @@ -478,7 +478,6 @@ fn derive_decode_from_log_impl(
#( tokens.push(#swap_tokens); )*
}
};

Ok(quote! {

let #core_crate::abi::RawLog {data, topics} = log;
Expand All @@ -492,7 +491,7 @@ fn derive_decode_from_log_impl(

#tokens_init

#core_crate::abi::Detokenize::from_tokens(tokens).map_err(|_|#core_crate::abi::Error::InvalidData)
#core_crate::abi::Tokenizable::from_token(#core_crate::abi::Token::Tuple(tokens)).map_err(|_|#core_crate::abi::Error::InvalidData)
})
}

Expand Down Expand Up @@ -808,26 +807,6 @@ fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream {
#core_crate::abi::Token::Tuple(Vec::new())
},
),
1 => {
// This is a hacky solution in order to keep the same tokenstream as for tuples
let from_token = quote! {
let mut iter = Some(token).into_iter();
Ok(#init_struct_impl)
};

// This is a hack to get rid of the trailing comma introduced in the macro that concatenates all the fields
if let Ok(into_token) = into_token_impl
.to_string()
.as_str()
.trim_end_matches(',')
.parse()
{
(from_token, into_token)
} else {
return Error::new(input.span(), "Failed to derive Tokenizeable implementation")
.to_compile_error();
}
}
_ => {
let from_token = quote! {
if let #core_crate::abi::Token::Tuple(tokens) = token {
Expand Down
25 changes: 25 additions & 0 deletions ethers-contract/tests/common/derive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@ struct ValueChangedTuple(Address, Address, String, String);
#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedTupleWrapper(ValueChangedTuple, String);

#[derive(Debug, Clone, PartialEq, EthAbiType)]
struct ValueChangedVecWrapper {
inner: Vec<ValueChanged>,
}

#[test]
fn can_detokenize_struct() {
let value = ValueChanged {
Expand Down Expand Up @@ -82,6 +87,26 @@ fn can_detokenize_nested_tuple_struct() {
assert_eq!(value, ValueChangedTupleWrapper::from_token(token).unwrap());
}

#[test]
fn can_detokenize_single_field() {
let value = ValueChangedVecWrapper { inner: vec![] };

let token = value.clone().into_token();
assert_eq!(value, ValueChangedVecWrapper::from_token(token).unwrap());

let value = ValueChangedVecWrapper {
inner: vec![ValueChanged {
old_author: "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee".parse().unwrap(),
new_author: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".parse().unwrap(),
old_value: "50".to_string(),
new_value: "100".to_string(),
}],
};

let token = value.clone().into_token();
assert_eq!(value, ValueChangedVecWrapper::from_token(token).unwrap());
}

#[test]
fn can_derive_eth_event() {
#[derive(Debug, Clone, PartialEq, EthEvent)]
Expand Down

0 comments on commit 8b5f4bf

Please sign in to comment.