Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

use ? instead of try! #158

Merged
merged 5 commits into from
Oct 1, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "ethabi-cli"
version = "9.0.0"
version = "9.0.1"
authors = ["Parity Technologies <[email protected]>"]
keywords = ["ethereum", "eth", "abi", "solidity", "cli"]
description = "Easy to use cli for conversion of ethereum contract calls to bytecode."
Expand All @@ -12,7 +12,7 @@ rustc-hex = "2.0"
serde = "1.0"
serde_derive = "1.0"
docopt = "1.0"
ethabi = { version = "9.0.0", path = "../ethabi" }
ethabi = { version = "9.0.1", path = "../ethabi" }
error-chain = { version = "0.12.1", default-features = false }
tiny-keccak = "1.0"

Expand Down
4 changes: 2 additions & 2 deletions derive/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "ethabi-derive"
version = "9.0.0"
version = "9.0.1"
authors = ["Parity Technologies <[email protected]>"]
homepage = "https://github.com/paritytech/ethabi"
license = "MIT/Apache-2.0"
Expand All @@ -11,7 +11,7 @@ description = "Easy to use conversion of ethereum contract calls to bytecode."
proc-macro = true

[dependencies]
ethabi = { path = "../ethabi", version = "9.0.0" }
ethabi = { path = "../ethabi", version = "9.0.1" }
heck = "0.3"
syn = "0.15"
quote = "0.6"
Expand Down
2 changes: 1 addition & 1 deletion ethabi/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "ethabi"
version = "9.0.0"
version = "9.0.1"
authors = ["Parity Technologies <[email protected]>"]
homepage = "https://github.com/paritytech/ethabi"
license = "MIT/Apache-2.0"
Expand Down
46 changes: 23 additions & 23 deletions ethabi/src/decoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ fn take_bytes(slices: &[Word], position: usize, len: usize) -> Result<BytesTaken

let mut bytes_slices = Vec::with_capacity(slices_len);
for i in 0..slices_len {
let slice = try!(peek(slices, position + i));
let slice = peek(slices, position + i)?;
bytes_slices.push(slice);
}

Expand All @@ -80,7 +80,7 @@ fn take_bytes(slices: &[Word], position: usize, len: usize) -> Result<BytesTaken
fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<DecodeResult, Error> {
match *param {
ParamType::Address => {
let slice = try!(peek(slices, offset));
let slice = peek(slices, offset)?;
let mut address = [0u8; 20];
address.copy_from_slice(&slice[12..]);

Expand All @@ -92,7 +92,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
Ok(result)
},
ParamType::Int(_) => {
let slice = try!(peek(slices, offset));
let slice = peek(slices, offset)?;

let result = DecodeResult {
token: Token::Int(slice.clone().into()),
Expand All @@ -102,7 +102,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
Ok(result)
},
ParamType::Uint(_) => {
let slice = try!(peek(slices, offset));
let slice = peek(slices, offset)?;

let result = DecodeResult {
token: Token::Uint(slice.clone().into()),
Expand All @@ -112,9 +112,9 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
Ok(result)
},
ParamType::Bool => {
let slice = try!(peek(slices, offset));
let slice = peek(slices, offset)?;

let b = try!(as_bool(slice));
let b = as_bool(slice)?;

let result = DecodeResult {
token: Token::Bool(b),
Expand All @@ -124,7 +124,7 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
Ok(result)
},
ParamType::FixedBytes(len) => {
let taken = try!(take_bytes(slices, offset, len));
let taken = take_bytes(slices, offset, len)?;

let result = DecodeResult {
token: Token::FixedBytes(taken.bytes),
Expand All @@ -134,13 +134,13 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
Ok(result)
},
ParamType::Bytes => {
let offset_slice = try!(peek(slices, offset));
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
let offset_slice = peek(slices, offset)?;
let len_offset = (as_u32(offset_slice)? / 32) as usize;

let len_slice = try!(peek(slices, len_offset));
let len = try!(as_u32(len_slice)) as usize;
let len_slice = peek(slices, len_offset)?;
let len = as_u32(len_slice)? as usize;

let taken = try!(take_bytes(slices, len_offset + 1, len));
let taken = take_bytes(slices, len_offset + 1, len)?;

let result = DecodeResult {
token: Token::Bytes(taken.bytes),
Expand All @@ -150,33 +150,33 @@ fn decode_param(param: &ParamType, slices: &[Word], offset: usize) -> Result<Dec
Ok(result)
},
ParamType::String => {
let offset_slice = try!(peek(slices, offset));
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
let offset_slice = peek(slices, offset)?;
let len_offset = (as_u32(offset_slice)? / 32) as usize;

let len_slice = try!(peek(slices, len_offset));
let len = try!(as_u32(len_slice)) as usize;
let len_slice = peek(slices, len_offset)?;
let len = as_u32(len_slice)? as usize;

let taken = try!(take_bytes(slices, len_offset + 1, len));
let taken = take_bytes(slices, len_offset + 1, len)?;

let result = DecodeResult {
token: Token::String(try!(String::from_utf8(taken.bytes))),
token: Token::String(String::from_utf8(taken.bytes)?),
new_offset: offset + 1,
};

Ok(result)
},
ParamType::Array(ref t) => {
let offset_slice = try!(peek(slices, offset));
let len_offset = (try!(as_u32(offset_slice)) / 32) as usize;
let offset_slice = peek(slices, offset)?;
let len_offset = (as_u32(offset_slice)? / 32) as usize;

let len_slice = try!(peek(slices, len_offset));
let len = try!(as_u32(len_slice)) as usize;
let len_slice = peek(slices, len_offset)?;
let len = as_u32(len_slice)? as usize;

let sub_slices = &slices[len_offset + 1..];
let mut tokens = Vec::with_capacity(len);
let mut new_offset = 0;
for _ in 0..len {
let res = try!(decode_param(t, &sub_slices, new_offset));
let res = decode_param(t, &sub_slices, new_offset)?;
new_offset = res.new_offset;
tokens.push(res.token);
}
Expand Down
4 changes: 2 additions & 2 deletions ethabi/src/event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ impl Event {
.flat_map(|t| t.as_ref().to_vec())
.collect::<Vec<u8>>();

let topic_tokens = try!(decode(&topic_types, &flat_topics));
let topic_tokens = decode(&topic_types, &flat_topics)?;

// topic may be only a 32 bytes encoded token
if topic_tokens.len() != topics_len - to_skip {
Expand All @@ -161,7 +161,7 @@ impl Event {
.map(|p| p.kind.clone())
.collect::<Vec<ParamType>>();

let data_tokens = try!(decode(&data_types, &data));
let data_tokens = decode(&data_types, &data)?;

let data_named_tokens = data_params.into_iter()
.map(|p| p.name)
Expand Down
7 changes: 3 additions & 4 deletions ethabi/src/operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,9 @@ pub enum Operation {

impl<'a> Deserialize<'a> for Operation {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'a> {
let v: Value = try!(Deserialize::deserialize(deserializer));
let cloned = v.clone();
let map = try!(cloned.as_object().ok_or_else(|| SerdeError::custom("Invalid operation")));
let s = try!(map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type")));
let v: Value = Deserialize::deserialize(deserializer)?;
let map = v.as_object().ok_or_else(|| SerdeError::custom("Invalid operation"))?;
let s = map.get("type").and_then(Value::as_str).ok_or_else(|| SerdeError::custom("Invalid operation type"))?;

// This is a workaround to support non-spec compliant function and event names,
// see: https://github.com/paritytech/parity/issues/4122
Expand Down
12 changes: 6 additions & 6 deletions ethabi/src/param_type/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ impl Reader {
let count = name.chars().count();
if num.is_empty() {
// we already know it's a dynamic array!
let subtype = try!(Reader::read(&name[..count - 2]));
let subtype = Reader::read(&name[..count - 2])?;
return Ok(ParamType::Array(Box::new(subtype)));
} else {
// it's a fixed array.
let len = try!(usize::from_str_radix(&num, 10));
let subtype = try!(Reader::read(&name[..count - num.len() - 2]));
let len = usize::from_str_radix(&num, 10)?;
let subtype = Reader::read(&name[..count - num.len() - 2])?;
return Ok(ParamType::FixedArray(Box::new(subtype), len));
}
}
Expand All @@ -39,15 +39,15 @@ impl Reader {
"int" => ParamType::Int(256),
"uint" => ParamType::Uint(256),
s if s.starts_with("int") => {
let len = try!(usize::from_str_radix(&s[3..], 10));
let len = usize::from_str_radix(&s[3..], 10)?;
ParamType::Int(len)
},
s if s.starts_with("uint") => {
let len = try!(usize::from_str_radix(&s[4..], 10));
let len = usize::from_str_radix(&s[4..], 10)?;
ParamType::Uint(len)
},
s if s.starts_with("bytes") => {
let len = try!(usize::from_str_radix(&s[5..], 10));
let len = usize::from_str_radix(&s[5..], 10)?;
ParamType::FixedBytes(len)
},
_ => {
Expand Down
4 changes: 2 additions & 2 deletions ethabi/src/token/lenient.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl Tokenizer for LenientTokenizer {
return result;
}

let uint = try!(u32::from_str_radix(value, 10));
let uint = u32::from_str_radix(value, 10)?;
Ok(pad_u32(uint))
}

Expand All @@ -42,7 +42,7 @@ impl Tokenizer for LenientTokenizer {
return result;
}

let int = try!(i32::from_str_radix(value, 10));
let int = i32::from_str_radix(value, 10)?;
Ok(pad_i32(int))
}
}
6 changes: 3 additions & 3 deletions ethabi/src/token/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ pub trait Tokenizer {

/// Tries to parse a value as a vector of tokens of fixed size.
fn tokenize_fixed_array(value: &str, param: &ParamType, len: usize) -> Result<Vec<Token>, Error> {
let result = try!(Self::tokenize_array(value, param));
let result = Self::tokenize_array(value, param)?;
match result.len() == len {
true => Ok(result),
false => Err(ErrorKind::InvalidData.into()),
Expand Down Expand Up @@ -60,7 +60,7 @@ pub trait Tokenizer {
return Err(ErrorKind::InvalidData.into());
} else if nested == 0 {
let sub = &value[last_item..i];
let token = try!(Self::tokenize(param, sub));
let token = Self::tokenize(param, sub)?;
result.push(token);
last_item = i + 1;
}
Expand All @@ -70,7 +70,7 @@ pub trait Tokenizer {
},
',' if nested == 1 && ignore == false => {
let sub = &value[last_item..i];
let token = try!(Self::tokenize(param, sub));
let token = Self::tokenize(param, sub)?;
result.push(token);
last_item = i + 1;
},
Expand Down
11 changes: 5 additions & 6 deletions ethabi/src/token/strict.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ pub struct StrictTokenizer;

impl Tokenizer for StrictTokenizer {
fn tokenize_address(value: &str) -> Result<[u8; 20], Error> {
let hex : Vec<u8> = try!(value.from_hex());
let hex: Vec<u8> = value.from_hex()?;
match hex.len() == 20 {
false => Err(ErrorKind::InvalidData.into()),
true => {
Expand All @@ -31,20 +31,19 @@ impl Tokenizer for StrictTokenizer {
}

fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error> {
let hex = try!(value.from_hex());
Ok(hex)
value.from_hex().map_err(Into::into)
}

fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error> {
let hex : Vec<u8> = try!(value.from_hex());
let hex: Vec<u8> = value.from_hex()?;
match hex.len() == len {
true => Ok(hex),
false => Err(ErrorKind::InvalidData.into()),
}
}

fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> {
let hex : Vec<u8> = try!(value.from_hex());
let hex: Vec<u8> = value.from_hex()?;
match hex.len() == 32 {
true => {
let mut uint = [0u8; 32];
Expand All @@ -56,7 +55,7 @@ impl Tokenizer for StrictTokenizer {
}

fn tokenize_int(value: &str) -> Result<[u8; 32], Error> {
let hex : Vec<u8> = try!(value.from_hex());
let hex: Vec<u8> = value.from_hex()?;
match hex.len() == 32 {
true => {
let mut int = [0u8; 32];
Expand Down