use crate::abi_decoder::ABIDecoder;
use crate::types::Bits256;
use core::fmt;
use fuel_types::bytes::padded_len;
use fuels_types::enum_variants::EnumVariants;
use fuels_types::{
errors::{CodecError, Error},
param_types::ParamType,
};
use strum_macros::EnumString;
pub mod abi_decoder;
pub mod abi_encoder;
pub mod code_gen;
pub mod constants;
pub mod parameters;
pub mod rustfmt;
pub mod source;
pub mod tokenizer;
pub mod types;
pub mod utils;
pub mod tx {
#[doc(no_inline)]
pub use fuel_tx::*;
}
pub type ByteArray = [u8; 8];
pub type Selector = ByteArray;
pub type EnumSelector = (u8, Token, EnumVariants);
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Identity {
Address(fuel_tx::Address),
ContractId(fuel_tx::ContractId),
}
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct StringToken {
data: String,
expected_len: usize,
}
impl StringToken {
pub fn new(data: String, expected_len: usize) -> Self {
StringToken { data, expected_len }
}
pub fn get_encodable_str(&self) -> Result<&str, CodecError> {
if !self.data.is_ascii() {
return Err(CodecError::InvalidData(
"String data can only have ascii values".into(),
));
}
if self.data.len() != self.expected_len {
return Err(CodecError::InvalidData(format!(
"String data has len {}, but the expected len is {}",
self.data.len(),
self.expected_len
)));
}
Ok(self.data.as_str())
}
}
#[derive(Debug, Clone, PartialEq, EnumString)]
#[strum(ascii_case_insensitive)]
pub enum Token {
Unit,
U8(u8),
U16(u16),
U32(u32),
U64(u64),
Bool(bool),
Byte(u8),
B256([u8; 32]),
Array(Vec<Token>),
Vector(Vec<Token>),
String(StringToken),
Struct(Vec<Token>),
#[strum(disabled)]
Enum(Box<EnumSelector>),
Tuple(Vec<Token>),
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Default for Token {
fn default() -> Self {
Token::U8(0)
}
}
pub trait Tokenizable {
fn from_token(token: Token) -> Result<Self, Error>
where
Self: Sized;
fn into_token(self) -> Token;
}
pub fn try_from_bytes<T>(bytes: &[u8]) -> Result<T, Error>
where
T: Parameterize + Tokenizable,
{
let token = ABIDecoder::decode_single(&T::param_type(), bytes)?;
T::from_token(token)
}
impl Tokenizable for Token {
fn from_token(token: Token) -> Result<Self, Error> {
Ok(token)
}
fn into_token(self) -> Token {
self
}
}
pub trait Parameterize {
fn param_type() -> ParamType;
}
pub fn pad_u8(value: u8) -> ByteArray {
let mut padded = ByteArray::default();
padded[7] = value;
padded
}
pub fn pad_u16(value: u16) -> ByteArray {
let mut padded = ByteArray::default();
padded[6..].copy_from_slice(&value.to_be_bytes());
padded
}
pub fn pad_u32(value: u32) -> ByteArray {
let mut padded = [0u8; 8];
padded[4..].copy_from_slice(&value.to_be_bytes());
padded
}
pub fn pad_string(s: &str) -> Vec<u8> {
let pad = padded_len(s.as_bytes()) - s.len();
let mut padded = s.as_bytes().to_owned();
padded.extend_from_slice(&vec![0; pad]);
padded
}
#[cfg(test)]
mod tests {
use crate::try_from_bytes;
use fuel_types::{Address, AssetId, ContractId};
use fuels_types::{constants::WORD_SIZE, errors::Error};
#[test]
fn can_convert_bytes_into_tuple() -> Result<(), Error> {
let tuple_in_bytes: Vec<u8> = vec![0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2];
let the_tuple: (u64, u32) = try_from_bytes(&tuple_in_bytes)?;
assert_eq!(the_tuple, (1, 2));
Ok(())
}
#[test]
fn can_convert_all_from_bool_to_u64() -> Result<(), Error> {
let bytes: Vec<u8> = vec![0xFF; WORD_SIZE];
assert!(try_from_bytes::<bool>(&bytes)?);
assert_eq!(try_from_bytes::<u8>(&bytes)?, u8::MAX);
assert_eq!(try_from_bytes::<u16>(&bytes)?, u16::MAX);
assert_eq!(try_from_bytes::<u32>(&bytes)?, u32::MAX);
assert_eq!(try_from_bytes::<u64>(&bytes)?, u64::MAX);
Ok(())
}
#[test]
fn can_convert_native_types() -> anyhow::Result<()> {
let bytes = [0xFF; 32];
assert_eq!(
try_from_bytes::<Address>(&bytes)?,
Address::new(bytes.as_slice().try_into()?)
);
assert_eq!(
try_from_bytes::<ContractId>(&bytes)?,
ContractId::new(bytes.as_slice().try_into()?)
);
assert_eq!(
try_from_bytes::<AssetId>(&bytes)?,
AssetId::new(bytes.as_slice().try_into()?)
);
Ok(())
}
}