1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
use token::{Tokenizer, StrictTokenizer};
use util::{pad_u32, pad_i32};
use errors::Error;
pub struct LenientTokenizer;
impl Tokenizer for LenientTokenizer {
fn tokenize_address(value: &str) -> Result<[u8; 20], Error> {
StrictTokenizer::tokenize_address(value)
}
fn tokenize_string(value: &str) -> Result<String, Error> {
StrictTokenizer::tokenize_string(value)
}
fn tokenize_bool(value: &str) -> Result<bool, Error> {
StrictTokenizer::tokenize_bool(value)
}
fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error> {
StrictTokenizer::tokenize_bytes(value)
}
fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error> {
StrictTokenizer::tokenize_fixed_bytes(value, len)
}
fn tokenize_uint(value: &str) -> Result<[u8; 32], Error> {
let result = StrictTokenizer::tokenize_uint(value);
if result.is_ok() {
return result;
}
let uint = try!(u32::from_str_radix(value, 10));
Ok(pad_u32(uint))
}
fn tokenize_int(value: &str) -> Result<[u8; 32], Error> {
let result = StrictTokenizer::tokenize_int(value);
if result.is_ok() {
return result;
}
let int = try!(i32::from_str_radix(value, 10));
Ok(pad_i32(int))
}
}