From b0f90d1fc2448399cdd6dc72c5b4cbfdba40d5c8 Mon Sep 17 00:00:00 2001 From: Niklas Adolfsson Date: Mon, 30 Sep 2019 15:28:32 +0200 Subject: [PATCH 1/5] fix(deprecation warning): replace `try!` with `?` --- ethabi/src/decoder.rs | 46 ++++++++++++++++----------------- ethabi/src/event.rs | 4 +-- ethabi/src/operation.rs | 7 +++-- ethabi/src/param_type/reader.rs | 12 ++++----- ethabi/src/token/lenient.rs | 4 +-- ethabi/src/token/mod.rs | 6 ++--- ethabi/src/token/strict.rs | 11 ++++---- 7 files changed, 44 insertions(+), 46 deletions(-) diff --git a/ethabi/src/decoder.rs b/ethabi/src/decoder.rs index 41d499960..7ddee56d2 100644 --- a/ethabi/src/decoder.rs +++ b/ethabi/src/decoder.rs @@ -60,7 +60,7 @@ fn take_bytes(slices: &[Word], position: usize, len: usize) -> Result Result Result { match *param { ParamType::Address => { - let slice = try!(peek(slices, offset)); + let slice = peek(slices, offset)?; let mut address = [0u8; 20]; address.copy_from_slice(&slice[12..]); @@ -92,7 +92,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result { - let slice = try!(peek(slices, offset)); + let slice = peek(slices, offset)?; let result = DecodeResult { token: Token::Int(slice.clone().into()), @@ -102,7 +102,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result { - let slice = try!(peek(slices, offset)); + let slice = peek(slices, offset)?; let result = DecodeResult { token: Token::Uint(slice.clone().into()), @@ -112,9 +112,9 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result { - let slice = try!(peek(slices, offset)); + let slice = peek(slices, offset)?; - let b = try!(as_bool(slice)); + let b = as_bool(slice)?; let result = DecodeResult { token: Token::Bool(b), @@ -124,7 +124,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result { - let taken = try!(take_bytes(slices, offset, len)); + let taken = take_bytes(slices, offset, len)?; let result = DecodeResult { token: Token::FixedBytes(taken.bytes), @@ -134,13 +134,13 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result { - let offset_slice = try!(peek(slices, offset)); - let len_offset = (try!(as_u32(offset_slice)) / 32) as usize; + let offset_slice = peek(slices, offset)?; + let len_offset = (as_u32(offset_slice)? / 32) as usize; - let len_slice = try!(peek(slices, len_offset)); - let len = try!(as_u32(len_slice)) as usize; + let len_slice = peek(slices, len_offset)?; + let len = as_u32(len_slice)? as usize; - let taken = try!(take_bytes(slices, len_offset + 1, len)); + let taken = take_bytes(slices, len_offset + 1, len)?; let result = DecodeResult { token: Token::Bytes(taken.bytes), @@ -150,33 +150,33 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result { - let offset_slice = try!(peek(slices, offset)); - let len_offset = (try!(as_u32(offset_slice)) / 32) as usize; + let offset_slice = peek(slices, offset)?; + let len_offset = (as_u32(offset_slice)? / 32) as usize; - let len_slice = try!(peek(slices, len_offset)); - let len = try!(as_u32(len_slice)) as usize; + let len_slice = peek(slices, len_offset)?; + let len = as_u32(len_slice)? as usize; - let taken = try!(take_bytes(slices, len_offset + 1, len)); + let taken = take_bytes(slices, len_offset + 1, len)?; let result = DecodeResult { - token: Token::String(try!(String::from_utf8(taken.bytes))), + token: Token::String(String::from_utf8(taken.bytes)?), new_offset: offset + 1, }; Ok(result) }, ParamType::Array(ref t) => { - let offset_slice = try!(peek(slices, offset)); - let len_offset = (try!(as_u32(offset_slice)) / 32) as usize; + let offset_slice = peek(slices, offset)?; + let len_offset = (as_u32(offset_slice)? / 32) as usize; - let len_slice = try!(peek(slices, len_offset)); - let len = try!(as_u32(len_slice)) as usize; + let len_slice = peek(slices, len_offset)?; + let len = as_u32(len_slice)? as usize; let sub_slices = &slices[len_offset + 1..]; let mut tokens = Vec::with_capacity(len); let mut new_offset = 0; for _ in 0..len { - let res = try!(decode_param(t, &sub_slices, new_offset)); + let res = decode_param(t, &sub_slices, new_offset)?; new_offset = res.new_offset; tokens.push(res.token); } diff --git a/ethabi/src/event.rs b/ethabi/src/event.rs index 4d1109e43..e6f92b4ff 100644 --- a/ethabi/src/event.rs +++ b/ethabi/src/event.rs @@ -146,7 +146,7 @@ impl Event { .flat_map(|t| t.as_ref().to_vec()) .collect::>(); - let topic_tokens = try!(decode(&topic_types, &flat_topics)); + let topic_tokens = decode(&topic_types, &flat_topics)?; // topic may be only a 32 bytes encoded token if topic_tokens.len() != topics_len - to_skip { @@ -161,7 +161,7 @@ impl Event { .map(|p| p.kind.clone()) .collect::>(); - let data_tokens = try!(decode(&data_types, &data)); + let data_tokens = decode(&data_types, &data)?; let data_named_tokens = data_params.into_iter() .map(|p| p.name) diff --git a/ethabi/src/operation.rs b/ethabi/src/operation.rs index a73d72b23..c1b438c7a 100644 --- a/ethabi/src/operation.rs +++ b/ethabi/src/operation.rs @@ -21,10 +21,9 @@ pub enum Operation { impl<'a> Deserialize<'a> for Operation { fn deserialize(deserializer: D) -> Result where D: Deserializer<'a> { - let v: Value = try!(Deserialize::deserialize(deserializer)); - let cloned = v.clone(); - let map = try!(cloned.as_object().ok_or_else(|| SerdeError::custom("Invalid operation"))); - let s = try!(map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type"))); + let v: Value = Deserialize::deserialize(deserializer)?; + let map = v.as_object().ok_or_else(|| SerdeError::custom("Invalid operation"))?; + let s = map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type"))?; // This is a workaround to support non-spec compliant function and event names, // see: https://github.com/paritytech/parity/issues/4122 diff --git a/ethabi/src/param_type/reader.rs b/ethabi/src/param_type/reader.rs index 00ca6afb3..f320b6a95 100644 --- a/ethabi/src/param_type/reader.rs +++ b/ethabi/src/param_type/reader.rs @@ -21,12 +21,12 @@ impl Reader { let count = name.chars().count(); if num.is_empty() { // we already know it's a dynamic array! - let subtype = try!(Reader::read(&name[..count - 2])); + let subtype = Reader::read(&name[..count - 2])?; return Ok(ParamType::Array(Box::new(subtype))); } else { // it's a fixed array. - let len = try!(usize::from_str_radix(&num, 10)); - let subtype = try!(Reader::read(&name[..count - num.len() - 2])); + let len = usize::from_str_radix(&num, 10)?; + let subtype = Reader::read(&name[..count - num.len() - 2])?; return Ok(ParamType::FixedArray(Box::new(subtype), len)); } } @@ -39,15 +39,15 @@ impl Reader { "int" => ParamType::Int(256), "uint" => ParamType::Uint(256), s if s.starts_with("int") => { - let len = try!(usize::from_str_radix(&s[3..], 10)); + let len = usize::from_str_radix(&s[3..], 10)?; ParamType::Int(len) }, s if s.starts_with("uint") => { - let len = try!(usize::from_str_radix(&s[4..], 10)); + let len = usize::from_str_radix(&s[4..], 10)?; ParamType::Uint(len) }, s if s.starts_with("bytes") => { - let len = try!(usize::from_str_radix(&s[5..], 10)); + let len = usize::from_str_radix(&s[5..], 10)?; ParamType::FixedBytes(len) }, _ => { diff --git a/ethabi/src/token/lenient.rs b/ethabi/src/token/lenient.rs index 8571b6e2f..c16aec752 100644 --- a/ethabi/src/token/lenient.rs +++ b/ethabi/src/token/lenient.rs @@ -32,7 +32,7 @@ impl Tokenizer for LenientTokenizer { return result; } - let uint = try!(u32::from_str_radix(value, 10)); + let uint = u32::from_str_radix(value, 10)?; Ok(pad_u32(uint)) } @@ -42,7 +42,7 @@ impl Tokenizer for LenientTokenizer { return result; } - let int = try!(i32::from_str_radix(value, 10)); + let int = i32::from_str_radix(value, 10)?; Ok(pad_i32(int)) } } diff --git a/ethabi/src/token/mod.rs b/ethabi/src/token/mod.rs index e8c5efdd5..76c5b3329 100644 --- a/ethabi/src/token/mod.rs +++ b/ethabi/src/token/mod.rs @@ -28,7 +28,7 @@ pub trait Tokenizer { /// Tries to parse a value as a vector of tokens of fixed size. fn tokenize_fixed_array(value: &str, param: &ParamType, len: usize) -> Result, Error> { - let result = try!(Self::tokenize_array(value, param)); + let result = Self::tokenize_array(value, param)?; match result.len() == len { true => Ok(result), false => Err(ErrorKind::InvalidData.into()), @@ -60,7 +60,7 @@ pub trait Tokenizer { return Err(ErrorKind::InvalidData.into()); } else if nested == 0 { let sub = &value[last_item..i]; - let token = try!(Self::tokenize(param, sub)); + let token = Self::tokenize(param, sub)?; result.push(token); last_item = i + 1; } @@ -70,7 +70,7 @@ pub trait Tokenizer { }, ',' if nested == 1 && ignore == false => { let sub = &value[last_item..i]; - let token = try!(Self::tokenize(param, sub)); + let token = Self::tokenize(param, sub)?; result.push(token); last_item = i + 1; }, diff --git a/ethabi/src/token/strict.rs b/ethabi/src/token/strict.rs index d07bb12dc..4ae9bc81b 100644 --- a/ethabi/src/token/strict.rs +++ b/ethabi/src/token/strict.rs @@ -7,7 +7,7 @@ pub struct StrictTokenizer; impl Tokenizer for StrictTokenizer { fn tokenize_address(value: &str) -> Result<[u8; 20], Error> { - let hex : Vec = try!(value.from_hex()); + let hex: Vec = value.from_hex()?; match hex.len() == 20 { false => Err(ErrorKind::InvalidData.into()), true => { @@ -31,12 +31,11 @@ impl Tokenizer for StrictTokenizer { } fn tokenize_bytes(value: &str) -> Result, Error> { - let hex = try!(value.from_hex()); - Ok(hex) + value.from_hex().map_err(Into::into) } fn tokenize_fixed_bytes(value: &str, len: usize) -> Result, Error> { - let hex : Vec = try!(value.from_hex()); + let hex: Vec = value.from_hex()?; match hex.len() == len { true => Ok(hex), false => Err(ErrorKind::InvalidData.into()), @@ -44,7 +43,7 @@ impl Tokenizer for StrictTokenizer { } fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> { - let hex : Vec = try!(value.from_hex()); + let hex: Vec = value.from_hex()?; match hex.len() == 32 { true => { let mut uint = [0u8; 32]; @@ -56,7 +55,7 @@ impl Tokenizer for StrictTokenizer { } fn tokenize_int(value: &str) -> Result<[u8; 32], Error> { - let hex : Vec = try!(value.from_hex()); + let hex: Vec = value.from_hex()?; match hex.len() == 32 { true => { let mut int = [0u8; 32]; From 87c9eff91a89c69a74d94cd27757e507cf618f44 Mon Sep 17 00:00:00 2001 From: Niklas Adolfsson Date: Mon, 30 Sep 2019 15:29:32 +0200 Subject: [PATCH 2/5] [ethabi]: bump version --- ethabi/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ethabi/Cargo.toml b/ethabi/Cargo.toml index 69413f949..85579f94f 100644 --- a/ethabi/Cargo.toml +++ b/ethabi/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ethabi" -version = "9.0.0" +version = "9.0.1" authors = ["Parity Technologies "] homepage = "https://github.com/paritytech/ethabi" license = "MIT/Apache-2.0" From 0de95212758a657ea9f3155df9c9ab784f50c200 Mon Sep 17 00:00:00 2001 From: Niklas Adolfsson Date: Mon, 30 Sep 2019 15:29:55 +0200 Subject: [PATCH 3/5] [cli]: bump version --- cli/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 875cbcf9a..e72fd2768 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ethabi-cli" -version = "9.0.0" +version = "9.0.1" authors = ["Parity Technologies "] keywords = ["ethereum", "eth", "abi", "solidity", "cli"] description = "Easy to use cli for conversion of ethereum contract calls to bytecode." @@ -12,7 +12,7 @@ rustc-hex = "2.0" serde = "1.0" serde_derive = "1.0" docopt = "1.0" -ethabi = { version = "9.0.0", path = "../ethabi" } +ethabi = { version = "9.0.1", path = "../ethabi" } error-chain = { version = "0.12.1", default-features = false } tiny-keccak = "1.0" From 8944ebe644f0c9d077fdf93fee060cbe5d496c6a Mon Sep 17 00:00:00 2001 From: Niklas Adolfsson Date: Mon, 30 Sep 2019 15:30:10 +0200 Subject: [PATCH 4/5] [derive]: bump version --- derive/Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/derive/Cargo.toml b/derive/Cargo.toml index 39590ba34..0cbd2e2df 100644 --- a/derive/Cargo.toml +++ b/derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ethabi-derive" -version = "9.0.0" +version = "9.0.1" authors = ["Parity Technologies "] homepage = "https://github.com/paritytech/ethabi" license = "MIT/Apache-2.0" @@ -11,7 +11,7 @@ description = "Easy to use conversion of ethereum contract calls to bytecode." proc-macro = true [dependencies] -ethabi = { path = "../ethabi", version = "9.0.0" } +ethabi = { path = "../ethabi", version = "9.0.1" } heck = "0.3" syn = "0.15" quote = "0.6" From 4e08afeec9e0f3af93d8ae7a1ac6863f245945cd Mon Sep 17 00:00:00 2001 From: Niklas Adolfsson Date: Tue, 1 Oct 2019 17:50:03 +0200 Subject: [PATCH 5/5] bump `Cargo.lock` --- Cargo.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5a5fe8817..860907ce9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -104,7 +104,7 @@ dependencies = [ [[package]] name = "ethabi" -version = "9.0.0" +version = "9.0.1" dependencies = [ "error-chain 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -119,11 +119,11 @@ dependencies = [ [[package]] name = "ethabi-cli" -version = "9.0.0" +version = "9.0.1" dependencies = [ "docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)", - "ethabi 9.0.0", + "ethabi 9.0.1", "rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.78 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.78 (registry+https://github.com/rust-lang/crates.io-index)", @@ -136,9 +136,9 @@ version = "9.0.0" [[package]] name = "ethabi-derive" -version = "9.0.0" +version = "9.0.1" dependencies = [ - "ethabi 9.0.0", + "ethabi 9.0.1", "heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", "quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", @@ -149,9 +149,9 @@ dependencies = [ name = "ethabi-tests" version = "0.1.1" dependencies = [ - "ethabi 9.0.0", + "ethabi 9.0.1", "ethabi-contract 9.0.0", - "ethabi-derive 9.0.0", + "ethabi-derive 9.0.1", "hex-literal 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", ]