[][src]Trait rustabi::token::Tokenizer

pub trait Tokenizer {
    fn tokenize_address(value: &str) -> Result<[u8; 20], Error>;
fn tokenize_string(value: &str) -> Result<String, Error>;
fn tokenize_bool(value: &str) -> Result<bool, Error>;
fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error>;
fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error>;
fn tokenize_uint(value: &str) -> Result<[u8; 32], Error>;
fn tokenize_int(value: &str) -> Result<[u8; 32], Error>; fn tokenize(param: &ParamType, value: &str) -> Result<Token, Error> { ... }
fn tokenize_fixed_array(
        value: &str,
        param: &ParamType,
        len: usize
    ) -> Result<Vec<Token>, Error> { ... }
fn tokenize_array(
        value: &str,
        param: &ParamType
    ) -> Result<Vec<Token>, Error> { ... } }

This trait should be used to parse string values as tokens.

Required methods

fn tokenize_address(value: &str) -> Result<[u8; 20], Error>

Tries to parse a value as an address.

fn tokenize_string(value: &str) -> Result<String, Error>

Tries to parse a value as a string.

fn tokenize_bool(value: &str) -> Result<bool, Error>

Tries to parse a value as a bool.

fn tokenize_bytes(value: &str) -> Result<Vec<u8>, Error>

Tries to parse a value as bytes.

fn tokenize_fixed_bytes(value: &str, len: usize) -> Result<Vec<u8>, Error>

Tries to parse a value as bytes.

fn tokenize_uint(value: &str) -> Result<[u8; 32], Error>

Tries to parse a value as unsigned integer.

fn tokenize_int(value: &str) -> Result<[u8; 32], Error>

Tries to parse a value as signed integer.

Loading content...

Provided methods

fn tokenize(param: &ParamType, value: &str) -> Result<Token, Error>

Tries to parse a string as a token of given type.

fn tokenize_fixed_array(
    value: &str,
    param: &ParamType,
    len: usize
) -> Result<Vec<Token>, Error>

Tries to parse a value as a vector of tokens of fixed size.

fn tokenize_array(value: &str, param: &ParamType) -> Result<Vec<Token>, Error>

Tries to parse a value as a vector of tokens.

Loading content...

Implementors

impl Tokenizer for LenientTokenizer[src]

impl Tokenizer for StrictTokenizer[src]

Loading content...