diff --git a/crates/antelope/src/api/client.rs b/crates/antelope/src/api/client.rs index 3dd4fa8..5350f9c 100644 --- a/crates/antelope/src/api/client.rs +++ b/crates/antelope/src/api/client.rs @@ -1,16 +1,21 @@ -use std::fmt::{Display, Formatter}; use crate::api::default_provider::DefaultProvider; use crate::api::v1::chain::ChainAPI; +use std::fmt::{Display, Formatter}; pub enum HTTPMethod { - GET, POST + GET, + POST, } impl Display for HTTPMethod { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - HTTPMethod::GET => { write!(f, "GET") } - HTTPMethod::POST => { write!(f, "POST") } + HTTPMethod::GET => { + write!(f, "GET") + } + HTTPMethod::POST => { + write!(f, "POST") + } } } } @@ -22,7 +27,7 @@ pub trait Provider { } pub struct APIClient { - pub v1_chain: ChainAPI + pub v1_chain: ChainAPI, } impl APIClient { @@ -33,7 +38,7 @@ impl APIClient { pub fn custom_provider(provider: Box) -> Result { Ok(APIClient { - v1_chain: ChainAPI::new(provider) + v1_chain: ChainAPI::new(provider), }) } -} \ No newline at end of file +} diff --git a/crates/antelope/src/api/default_provider.rs b/crates/antelope/src/api/default_provider.rs index 0f3844f..81fad48 100644 --- a/crates/antelope/src/api/default_provider.rs +++ b/crates/antelope/src/api/default_provider.rs @@ -1,9 +1,9 @@ +use crate::api::client::Provider; use reqwest::blocking::Client; -use crate::api::client::{Provider}; pub struct DefaultProvider { base_url: String, - client: Client + client: Client, } impl DefaultProvider { @@ -18,18 +18,16 @@ impl DefaultProvider { return Err(err_message); } - let url = base_url.trim_end_matches("/"); + let url = base_url.trim_end_matches('/'); Ok(Self { base_url: String::from(url), - client: client.unwrap() + client: client.unwrap(), }) } - } impl Provider for DefaultProvider { - fn get(&self, path: String) -> Result { let res = self.client.get(self.base_url.to_string() + &path).send(); if res.is_err() { @@ -51,4 +49,4 @@ impl Provider for DefaultProvider { Ok(res.unwrap().text().unwrap()) } -} \ No newline at end of file +} diff --git a/crates/antelope/src/api/mod.rs b/crates/antelope/src/api/mod.rs index e9cee4f..6c4424a 100644 --- a/crates/antelope/src/api/mod.rs +++ b/crates/antelope/src/api/mod.rs @@ -1,3 +1,3 @@ -pub mod v1; pub mod client; mod default_provider; +pub mod v1; diff --git a/crates/antelope/src/api/v1/chain.rs b/crates/antelope/src/api/v1/chain.rs index a8a1e22..c6d0fc3 100644 --- a/crates/antelope/src/api/v1/chain.rs +++ b/crates/antelope/src/api/v1/chain.rs @@ -1,24 +1,24 @@ -use serde_json::Value; -use crate::api::client::{Provider}; +use crate::api::client::Provider; +use crate::api::v1::structs::{ + ClientError, GetInfoResponse, ProcessedTransaction, ProcessedTransactionReceipt, + SendTransactionResponse, SendTransactionResponseError, +}; use crate::chain::block_id::BlockId; -use crate::api::v1::structs::{ClientError, GetInfoResponse, ProcessedTransaction, ProcessedTransactionReceipt, SendTransactionError, SendTransactionResponse}; use crate::chain::checksum::Checksum256; -use crate::chain::time::TimePoint; use crate::chain::name::Name; +use crate::chain::time::TimePoint; use crate::chain::transaction::{CompressionType, PackedTransaction, SignedTransaction}; use crate::name; -use crate::serializer::formatter::{JSONObject}; +use crate::serializer::formatter::JSONObject; +use serde_json::Value; pub struct ChainAPI { - provider: Box + provider: Box, } impl ChainAPI { - pub fn new(provider: Box) -> Self { - ChainAPI { - provider - } + ChainAPI { provider } } pub fn get_info(&self) -> Result> { @@ -33,8 +33,12 @@ impl ChainAPI { chain_id: Checksum256::from_hex(obj.get_string("chain_id")?.as_str())?, head_block_num: obj.get_u32("head_block_num")?, last_irreversible_block_num: obj.get_u32("last_irreversible_block_num")?, - last_irreversible_block_id: BlockId { bytes: obj.get_hex_bytes("last_irreversible_block_id")? }, - head_block_id: BlockId { bytes: obj.get_hex_bytes("head_block_id")? }, + last_irreversible_block_id: BlockId { + bytes: obj.get_hex_bytes("last_irreversible_block_id")?, + }, + head_block_id: BlockId { + bytes: obj.get_hex_bytes("head_block_id")?, + }, head_block_time: TimePoint::from_timestamp(obj.get_str("head_block_time")?)?, head_block_producer: name!(obj.get_str("head_block_producer")?), virtual_block_cpu_limit: obj.get_u64("virtual_block_cpu_limit")?, @@ -43,22 +47,39 @@ impl ChainAPI { block_net_limit: obj.get_u64("block_net_limit")?, server_version_string: obj.get_string("server_version_string").ok(), fork_db_head_block_num: obj.get_u32("fork_db_head_block_num").ok(), - fork_db_head_block_id: BlockId::from_bytes(&obj.get_hex_bytes("fork_db_head_block_id")?).ok() + fork_db_head_block_id: BlockId::from_bytes( + &obj.get_hex_bytes("fork_db_head_block_id")?, + ) + .ok(), }) } - pub fn send_transaction(&self, trx: SignedTransaction) -> Result> { + pub fn send_transaction( + &self, + trx: SignedTransaction, + ) -> Result> { let packed_result = PackedTransaction::from_signed(trx, CompressionType::ZLIB); if packed_result.is_err() { - return Err(ClientError::server(SendTransactionError { - message: String::from("Failed to pack transaction"), - })); + return Err(ClientError::encoding("Failed to pack transaction".into())); } let packed = packed_result.unwrap(); let trx_json = packed.to_json(); - let result = self.provider.post(String::from("/v1/chain/send_transaction"), Some(trx_json)); + let result = self + .provider + .post(String::from("/v1/chain/send_transaction"), Some(trx_json)); let json: Value = serde_json::from_str(result.unwrap().as_str()).unwrap(); let response_obj = JSONObject::new(json); + if response_obj.has("code") { + let error_value = response_obj.get_value("error").unwrap(); + let error_json = error_value.to_string(); + let error_obj = JSONObject::new(error_value); + return Err(ClientError::server(SendTransactionResponseError { + code: error_obj.get_u32("code")?, + name: error_obj.get_string("name")?, + message: error_json, + stack: vec![], + })); + } let processed_obj = JSONObject::new(response_obj.get_value("processed").unwrap()); let receipt_obj = JSONObject::new(processed_obj.get_value("receipt").unwrap()); @@ -77,7 +98,7 @@ impl ChainAPI { except: None, net_usage: processed_obj.get_u32("net_usage")?, scheduled: false, - action_traces: "".to_string(), // TODO: Properly encode this + action_traces: "".to_string(), // TODO: Properly encode this account_ram_delta: "".to_string(), // TODO: Properly encode this }, }) diff --git a/crates/antelope/src/api/v1/mod.rs b/crates/antelope/src/api/v1/mod.rs index 09e8d52..83f3648 100644 --- a/crates/antelope/src/api/v1/mod.rs +++ b/crates/antelope/src/api/v1/mod.rs @@ -1,2 +1,2 @@ +pub mod chain; pub mod structs; -pub mod chain; \ No newline at end of file diff --git a/crates/antelope/src/api/v1/structs.rs b/crates/antelope/src/api/v1/structs.rs index 7312394..e15c787 100644 --- a/crates/antelope/src/api/v1/structs.rs +++ b/crates/antelope/src/api/v1/structs.rs @@ -1,8 +1,8 @@ use crate::chain::{ + block_id::BlockId, checksum::Checksum256, name::Name, time::{TimePoint, TimePointSec}, - block_id::BlockId, transaction::TransactionHeader, varint::VarUint32, }; @@ -10,9 +10,9 @@ use crate::chain::{ #[derive(Debug)] pub enum ClientError { SIMPLE(SimpleError), - SERVER(T), + SERVER(ServerError), HTTP(HTTPError), - ENCODING(EncodingError) + ENCODING(EncodingError), } impl ClientError { @@ -24,8 +24,8 @@ impl ClientError { ClientError::ENCODING(EncodingError { message }) } - pub fn server(server_error: T) -> Self { - ClientError::SERVER(server_error) + pub fn server(error: T) -> Self { + ClientError::SERVER(ServerError { error }) } } @@ -43,23 +43,23 @@ impl From for ClientError { #[derive(Debug)] pub struct SimpleError { - pub message: String + pub message: String, } #[derive(Debug)] pub struct ServerError { - error: T + pub error: T, } #[derive(Debug)] pub struct HTTPError { pub code: u16, - pub message: String + pub message: String, } #[derive(Debug)] pub struct EncodingError { - pub message: String + pub message: String, } impl EncodingError { @@ -97,14 +97,14 @@ pub struct GetInfoResponse { pub block_net_limit: u64, pub server_version_string: Option, pub fork_db_head_block_num: Option, - pub fork_db_head_block_id: Option + pub fork_db_head_block_id: Option, } impl GetInfoResponse { pub fn get_transaction_header(&self, seconds_ahead: u32) -> TransactionHeader { let expiration = TimePointSec { // head_block_time.elapsed is microseconds, convert to seconds - seconds: (self.head_block_time.elapsed / 1000 / 1000) as u32 + seconds_ahead + seconds: (self.head_block_time.elapsed / 1000 / 1000) as u32 + seconds_ahead, }; let id = self.last_irreversible_block_id.bytes.to_vec(); let prefix_array = &id[8..12]; @@ -115,7 +115,7 @@ impl GetInfoResponse { delay_sec: VarUint32::default(), expiration, ref_block_num: (self.last_irreversible_block_num & 0xffff) as u16, - ref_block_prefix: prefix + ref_block_prefix: prefix, } } } @@ -123,7 +123,7 @@ impl GetInfoResponse { pub struct ProcessedTransactionReceipt { pub status: String, pub cpu_usage_us: u32, - pub net_usage_words: u32 + pub net_usage_words: u32, } pub struct ProcessedTransaction { @@ -132,14 +132,14 @@ pub struct ProcessedTransaction { pub block_time: String, pub receipt: ProcessedTransactionReceipt, pub elapsed: u64, - pub except: Option, + pub except: Option, pub net_usage: u32, pub scheduled: bool, - pub action_traces: String, // TODO: create a type for this? - pub account_ram_delta: String // TODO: create a type for this? - + pub action_traces: String, // TODO: create a type for this? + pub account_ram_delta: String, // TODO: create a type for this? } +#[derive(Debug)] pub struct SendTransactionResponseExceptionStackContext { pub level: String, pub file: String, @@ -147,37 +147,25 @@ pub struct SendTransactionResponseExceptionStackContext { pub method: String, pub hostname: String, pub thread_name: String, - pub timestamp: String + pub timestamp: String, } +#[derive(Debug)] pub struct SendTransactionResponseExceptionStack { pub context: SendTransactionResponseExceptionStackContext, pub format: String, - pub data: String // TODO: create a type for this? + pub data: String, // TODO: create a type for this? } -pub struct SendTransactionResponseException { +#[derive(Debug)] +pub struct SendTransactionResponseError { pub code: u32, pub name: String, pub message: String, - pub stack: Vec + pub stack: Vec, } pub struct SendTransactionResponse { pub transaction_id: String, - pub processed: ProcessedTransaction + pub processed: ProcessedTransaction, } - - -#[derive(Debug)] -pub struct SendTransactionError { - pub message: String -} -// -// impl From for SendTransactionError { -// fn from(value: ClientError) -> Self { -// Self { -// message: value.message -// } -// } -// } \ No newline at end of file diff --git a/crates/antelope/src/base58.rs b/crates/antelope/src/base58.rs index cd598a8..9d711a2 100644 --- a/crates/antelope/src/base58.rs +++ b/crates/antelope/src/base58.rs @@ -1,10 +1,10 @@ -use ripemd::{Digest as RipeDigest, Ripemd160}; -use sha2::Sha256; use crate::base58; use crate::chain::key_type::KeyType; +use ripemd::{Digest as RipeDigest, Ripemd160}; +use sha2::Sha256; pub fn encode(data: Vec) -> String { - return bs58::encode(data).into_string(); + bs58::encode(data).into_string() } pub fn decode(encoded: &str, size: Option) -> Result, String> { @@ -18,11 +18,18 @@ pub fn decode(encoded: &str, size: Option) -> Result, String> { return Err(String::from("Size did not match")); } - return Ok(decoded); + Ok(decoded) } -pub fn decode_ripemd160_check(encoded: &str, size: Option, key_type: Option, ignore_checksum: bool) -> Result, String> { - let decoded = bs58::decode(encoded).into_vec().map_err(|e| e.to_string())?; +pub fn decode_ripemd160_check( + encoded: &str, + size: Option, + key_type: Option, + ignore_checksum: bool, +) -> Result, String> { + let decoded = bs58::decode(encoded) + .into_vec() + .map_err(|e| e.to_string())?; if decoded.len() < 5 { return Err("Data is too short".to_string()); @@ -43,11 +50,13 @@ pub fn decode_ripemd160_check(encoded: &str, size: Option, key_type: Opti } } - return Ok(data.to_vec()); + Ok(data.to_vec()) } pub fn decode_check(encoded: &str, ignore_checksum: bool) -> Result, String> { - let decoded = bs58::decode(encoded).into_vec().map_err(|e| e.to_string())?; + let decoded = bs58::decode(encoded) + .into_vec() + .map_err(|e| e.to_string())?; if decoded.len() < 4 { return Err("Data too short for checksum".to_string()); @@ -60,7 +69,7 @@ pub fn decode_check(encoded: &str, ignore_checksum: bool) -> Result, Str return Err("Checksum mismatch".to_string()); } - return Ok(data.to_vec()); + Ok(data.to_vec()) } pub fn decode_public_key(value: &str) -> Result<(KeyType, Vec), String> { @@ -72,21 +81,21 @@ pub fn decode_public_key(value: &str) -> Result<(KeyType, Vec), String> { let key_type = match parts[1] { "K1" => KeyType::K1, "R1" => KeyType::R1, -// ... handle other key types ... + // ... handle other key types ... _ => return Err("Invalid key type".to_string()), }; let size = match key_type { KeyType::K1 | KeyType::R1 => Some(32), -// ... other cases ... + // ... other cases ... }; let data = decode_ripemd160_check(parts[2], size, Option::from(key_type), false).unwrap(); - return Ok((key_type, data)); + Ok((key_type, data)) } else if value.len() > 50 { let without_prefix = value.chars().skip(value.len() - 50).collect::(); let data = base58::decode_ripemd160_check(without_prefix.as_str(), None, None, false); - return Ok((KeyType::K1, data.unwrap().to_vec())); + Ok((KeyType::K1, data.unwrap().to_vec())) } else { - return Err(String::from("Public key format invalid")); + Err(String::from("Public key format invalid")) } } @@ -108,7 +117,9 @@ pub fn decode_key(value: &str, ignore_checksum: bool) -> Result<(KeyType, Vec Result<(KeyType, Vec) -> String { let double_hash = double_sha_checksum(data.to_vec()); let mut with_checksum = data.to_vec(); with_checksum.append(&mut double_hash.to_vec()); - return bs58::encode(with_checksum).into_string(); + bs58::encode(with_checksum).into_string() } pub fn encode_ripemd160_check(data: Vec, suffix: Option<&str>) -> String { @@ -143,21 +156,21 @@ pub fn encode_ripemd160_check(data: Vec, suffix: Option<&str>) -> String { let mut with_ripe_checksum = data.to_vec(); with_ripe_checksum.append(&mut ripe_checksum.to_vec()); - return bs58::encode(with_ripe_checksum).into_string(); + bs58::encode(with_ripe_checksum).into_string() } fn ripemd160_checksum(data: Vec, suffix: Option<&str>) -> Vec { let mut hasher = Ripemd160::new(); - hasher.update(data.to_vec()); + hasher.update(&data); if let Some(s) = suffix { hasher.update(s); } let ripe_hash = hasher.finalize(); - return ripe_hash.as_slice()[0..4].to_vec(); + ripe_hash.as_slice()[0..4].to_vec() } fn double_sha_checksum(data: Vec) -> Vec { let data_hash = Sha256::digest(Sha256::digest(data)); let checksum = &data_hash[..4]; - return checksum.to_vec(); -} \ No newline at end of file + checksum.to_vec() +} diff --git a/crates/antelope/src/chain/action.rs b/crates/antelope/src/chain/action.rs index 1e326ad..af571a9 100644 --- a/crates/antelope/src/chain/action.rs +++ b/crates/antelope/src/chain/action.rs @@ -1,17 +1,8 @@ -use crate::chain::{ - varint::VarUint32, - name::Name, -}; use crate::chain::checksum::Checksum256; +use crate::chain::{name::Name, varint::VarUint32}; -use crate::serializer::serializer::{ - Packer, - Encoder, - Decoder, -}; +use crate::serializer::{Decoder, Encoder, Packer}; -/// A structure representing a permission level for an action in a smart contract system. -#[cfg_attr(feature = "std", derive(eosio_scale_info::TypeInfo))] #[derive(Copy, Clone, Debug, Eq, PartialEq, Default)] pub struct PermissionLevel { /// The account holding the permission. @@ -31,7 +22,7 @@ impl PermissionLevel { impl Packer for PermissionLevel { /// Returns the packed size of the PermissionLevel structure. fn size(&self) -> usize { - return 16; + 16 } /// Packs the PermissionLevel structure into the provided Encoder. @@ -44,16 +35,17 @@ impl Packer for PermissionLevel { /// Unpacks the PermissionLevel structure from the provided data slice. fn unpack(&mut self, data: &[u8]) -> usize { - assert!(data.len() >= self.size(), "PermissionLevel.unpack: buffer overflow"); + assert!( + data.len() >= self.size(), + "PermissionLevel.unpack: buffer overflow" + ); let mut dec = Decoder::new(data); dec.unpack(&mut self.actor); dec.unpack(&mut self.permission); - return 16; + 16 } } -/// A structure representing an action to be executed in a smart contract system. -#[cfg_attr(feature = "std", derive(eosio_scale_info::TypeInfo))] #[derive(Clone, Eq, PartialEq)] pub struct Action { /// The account on which the action is executed. @@ -68,34 +60,48 @@ pub struct Action { impl Action { /// Creates an action by specifying contract account, action name, authorization and data. - pub fn new(account: Name, name: Name, authorization: PermissionLevel, data: &dyn Packer) -> Self { + pub fn new( + account: Name, + name: Name, + authorization: PermissionLevel, + data: &dyn Packer, + ) -> Self { let mut enc = Encoder::new(data.size()); data.pack(&mut enc); Self { account, name, authorization: vec![authorization], - data: enc.get_bytes().to_vec() + data: enc.get_bytes().to_vec(), } } - pub fn new_ex(account: Name, name: Name, authorizations: Vec, data: &dyn Packer) -> Self { + pub fn new_ex( + account: Name, + name: Name, + authorizations: Vec, + data: &dyn Packer, + ) -> Self { let mut enc = Encoder::new(data.size()); data.pack(&mut enc); Self { account, name, authorization: authorizations, - data: enc.get_bytes().to_vec() + data: enc.get_bytes().to_vec(), } } - } /// Implements the Default trait for Action. impl Default for Action { fn default() -> Self { - Self { account: Name{n: 0}, name: Name{n: 0}, authorization: Vec::new(), data: Vec::new() } + Self { + account: Name { n: 0 }, + name: Name { n: 0 }, + authorization: Vec::new(), + data: Vec::new(), + } } } @@ -105,9 +111,10 @@ impl Packer for Action { fn size(&self) -> usize { let mut size: usize; size = 16; - size += VarUint32::new(self.authorization.len() as u32).size()+ self.authorization.len() * 16; + size += + VarUint32::new(self.authorization.len() as u32).size() + self.authorization.len() * 16; size += VarUint32::new(self.data.len() as u32).size() + self.data.len(); - return size + size } /// Packs the Action structure into the provided Encoder. diff --git a/crates/antelope/src/chain/asset.rs b/crates/antelope/src/chain/asset.rs index 42fe387..1d610c4 100644 --- a/crates/antelope/src/chain/asset.rs +++ b/crates/antelope/src/chain/asset.rs @@ -1,14 +1,11 @@ use core::ops; +use std::fmt::{Display, Formatter}; -use crate::chain::{ - Packer, - Encoder, - Decoder, -}; use crate::chain::name::Name; +use crate::chain::{Decoder, Encoder, Packer}; const MAX_AMOUNT: i64 = (1 << 62) - 1; -const MAX_PRECISION: u8 = 18; +const MAX_PRECISION: u8 = 18; /// Check if the given symbol code is valid. pub fn is_valid_symbol_code(sym: u64) -> bool { @@ -20,7 +17,7 @@ pub fn is_valid_symbol_code(sym: u64) -> bool { for j in 0..7 { let c = (tmp & 0xFF) as u8; - if !(c >= 'A' as u8 && c <= 'Z' as u8) { + if !c.is_ascii_uppercase() { return false; } @@ -38,11 +35,9 @@ pub fn is_valid_symbol_code(sym: u64) -> bool { return false; } } - return true; + true } -/// A struct representing the symbol code of an asset. -#[cfg_attr(feature = "std", derive(eosio_scale_info::TypeInfo))] #[derive(Copy, Clone, Default, Eq, PartialEq)] pub struct SymbolCode { /// @@ -50,148 +45,142 @@ pub struct SymbolCode { } impl SymbolCode { - /// pub fn new(sym: &str) -> Self { let raw = sym.as_bytes(); - assert!(raw.len() < 7 && raw.len() > 0, "bad symbol name"); + assert!(raw.len() < 7 && !raw.is_empty(), "bad symbol name"); let mut value: u64 = 0; for i in (0..raw.len()).rev() { let c = raw[i]; - assert!(c >= 'A' as u8 && c <= 'Z' as u8, "invald symbol character"); + assert!(c.is_ascii_uppercase(), "invalid symbol code character"); value <<= 8; value |= c as u64; } - Self{value} + Self { value } } - /// pub fn value(&self) -> u64 { self.value } - /// - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { let mut v: Vec = Vec::with_capacity(7); let mut tmp = self.value; for _ in 0..7 { let c = (tmp & 0xff) as u8; - assert!(c >= 'A' as u8 && c <= 'Z' as u8, "invald symbol character"); + assert!(c.is_ascii_uppercase(), "invalid symbol character"); v.push(c); tmp >>= 8; - if tmp <= 0 { + if tmp == 0 { break; } } String::from_utf8(v).unwrap() } - /// pub fn is_valid(&self) -> bool { - return is_valid_symbol_code(self.value); + is_valid_symbol_code(self.value) + } +} + +impl Display for SymbolCode { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) } } impl Packer for SymbolCode { - /// fn size(&self) -> usize { - return 8; + 8 } - /// fn pack(&self, enc: &mut Encoder) -> usize { self.value.pack(enc) } - /// fn unpack(&mut self, data: &[u8]) -> usize { - assert!(data.len() >= self.size(), "SymbolCode.unpack: buffer overflow"); + assert!( + data.len() >= self.size(), + "SymbolCode.unpack: buffer overflow" + ); self.value.unpack(data); assert!(self.is_valid(), "SymbolCode.unpack:: bad symbol code"); - return 8; + 8 } } -/// A struct representing the symbol of an asset. -#[cfg_attr(feature = "std", derive(eosio_scale_info::TypeInfo))] #[derive(Copy, Clone, Default, Eq, PartialEq)] pub struct Symbol { - /// value: u64, } impl Symbol { - /// pub fn new(name: &str, precision: u8) -> Self { let raw = name.as_bytes(); - assert!(raw.len() < 7 && raw.len() > 0, "bad symbol name"); + assert!(raw.len() < 7 && !raw.is_empty(), "bad symbol name"); let mut value: u64 = 0; for i in (0..raw.len()).rev() { let c = raw[i]; - assert!(c >= 'A' as u8 && c <= 'Z' as u8, "invald symbol character"); + assert!(c.is_ascii_uppercase(), "invalid symbol character"); value <<= 8; value |= c as u64; } value <<= 8; value |= precision as u64; - Self{value} + Self { value } } - /// pub fn value(&self) -> u64 { self.value } - /// pub fn code(&self) -> SymbolCode { - SymbolCode{value: self.value >> 8} + SymbolCode { + value: self.value >> 8, + } } - /// pub fn precision(&self) -> usize { (self.value & 0xFF) as usize } - /// - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { self.precision().to_string() + "," + &self.code().to_string() } - /// pub fn is_valid(&self) -> bool { - return self.code().is_valid(); + self.code().is_valid() + } +} + +impl Display for Symbol { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) } } impl Packer for Symbol { - /// fn size(&self) -> usize { - return 8; + 8 } - /// fn pack(&self, enc: &mut Encoder) -> usize { self.value.pack(enc) } - /// fn unpack(&mut self, data: &[u8]) -> usize { assert!(data.len() >= self.size(), "Symbol.unpack: buffer overflow"); self.value.unpack(data); assert!(self.code().is_valid(), "Symbol.unpack: bad symbol value"); - return 8; + 8 } } -/// A struct representing an asset with an amount and symbol. -#[cfg_attr(feature = "std", derive(eosio_scale_info::TypeInfo))] #[derive(Copy, Clone, Default, Eq, PartialEq)] pub struct Asset { - /// amount: i64, - /// symbol: Symbol, } @@ -203,20 +192,21 @@ enum AssetStringParseStatus { } fn is_amount_within_range(amount: i64) -> bool { - return -MAX_AMOUNT <= amount && amount <= MAX_AMOUNT; + (-MAX_AMOUNT..=MAX_AMOUNT).contains(&amount) } impl Asset { - /// pub fn new(amount: i64, symbol: Symbol) -> Self { - assert!(is_amount_within_range(amount), "magnitude of asset amount must be less than 2^62"); + assert!( + is_amount_within_range(amount), + "magnitude of asset amount must be less than 2^62" + ); assert!(symbol.is_valid(), "invalid symbol name"); - Self{amount, symbol} + Self { amount, symbol } } - /// pub fn from_string(s: &str) -> Self { - assert!(s.len() > 0, "Asset.from_string: empty string"); + assert!(!s.is_empty(), "Asset.from_string: empty string"); let mut status = AssetStringParseStatus::Initial; let mut raw = s.as_bytes(); @@ -226,18 +216,25 @@ impl Asset { let mut precision: u8 = 0; let mut raw_symbol: Vec = Vec::with_capacity(7); - if raw[0] == '-' as u8 { + if raw[0] == b'-' { minus = true; raw = &raw[1..]; } for &c in raw { - if c == '.' as u8 { - assert!(status == AssetStringParseStatus::Initial, "Asset.from_string: invalid dot character"); + if c == b'.' { + assert!( + status == AssetStringParseStatus::Initial, + "Asset.from_string: invalid dot character" + ); status = AssetStringParseStatus::FoundDot; continue; - } else if c == ' ' as u8 { - assert!(status == AssetStringParseStatus::Initial || status == AssetStringParseStatus::FoundDot, "Asset.from_string: invalid space character"); + } else if c == b' ' { + assert!( + status == AssetStringParseStatus::Initial + || status == AssetStringParseStatus::FoundDot, + "Asset.from_string: invalid space character" + ); // if status == AssetStringParseStatus::FoundDot { // assert!(precision > 0, "Asset.from_string: invalid precision"); // } @@ -247,28 +244,31 @@ impl Asset { match status { AssetStringParseStatus::Initial => { - assert!(c >= '0' as u8 && c <= '9' as u8, "Asset.from_string: bad amount"); + assert!(c.is_ascii_digit(), "Asset.from_string: bad amount"); amount *= 10; - amount += (c - '0' as u8) as i64; + amount += (c - b'0') as i64; assert!(is_amount_within_range(amount), "bad amount"); } AssetStringParseStatus::FoundDot => { - assert!(c >= '0' as u8 && c <= '9' as u8, "Asset.from_string: bad amount"); + assert!(c.is_ascii_digit(), "Asset.from_string: bad amount"); amount *= 10; - amount += (c - '0' as u8) as i64; + amount += (c - b'0') as i64; precision += 1; - assert!(precision <= MAX_PRECISION, "Asset.from_string: bad precision"); + assert!( + precision <= MAX_PRECISION, + "Asset.from_string: bad precision" + ); assert!(is_amount_within_range(amount), "bad amount"); } AssetStringParseStatus::FoundSpace => { - assert!(c >= 'A' as u8 && c <= 'Z' as u8, "Asset.from_string: bad symbol"); + assert!(c.is_ascii_uppercase(), "Asset.from_string: bad symbol"); raw_symbol.push(c); assert!(raw_symbol.len() < 7, "Asset.from_string: bad symbol"); } } } - assert!(raw_symbol.len() != 0, "Asset.from_string: bad symbol"); + assert!(!raw_symbol.is_empty(), "Asset.from_string: bad symbol"); if minus { amount = -amount; @@ -283,49 +283,50 @@ impl Asset { symbol <<= 8; symbol |= precision as u64; - Self{ - amount: amount, - symbol: Symbol{value: symbol} + Self { + amount, + symbol: Symbol { value: symbol }, } } - /// pub fn amount(&self) -> i64 { self.amount } - /// pub fn symbol(&self) -> Symbol { self.symbol } - /// - pub fn to_string(self) -> String { + pub fn as_string(self) -> String { let mut part1: i64 = self.amount; for _ in 0..self.symbol.precision() { part1 /= 10; } - let mut part2:Vec = Vec::with_capacity(self.symbol.precision()); - part2.resize(self.symbol.precision(), 0u8); + let mut part2: Vec = vec![0u8; self.symbol.precision()]; let mut tmp: i64 = self.amount; for i in (0..self.symbol.precision()).rev() { - part2[i] = '0' as u8 + (tmp % 10) as u8; + part2[i] = b'0' + (tmp % 10) as u8; tmp /= 10; } let mut decimal = String::from_utf8(part2).unwrap(); - if decimal.len() > 0 { + if !decimal.is_empty() { decimal = String::from(".") + decimal.as_str(); } part1.to_string() + decimal.as_str() + " " + &self.symbol.code().to_string() } - /// pub fn is_valid(&self) -> bool { - return is_amount_within_range(self.amount) && self.symbol().is_valid(); + is_amount_within_range(self.amount) && self.symbol().is_valid() + } +} + +impl Display for Asset { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) } } @@ -344,8 +345,8 @@ impl ops::Add for Asset { assert!(amount >= -MAX_AMOUNT, "addition underflow"); assert!(amount <= MAX_AMOUNT, "addition overflow"); Self { - amount: amount, - symbol: self.symbol + amount, + symbol: self.symbol, } } } @@ -365,8 +366,8 @@ impl ops::Sub for Asset { assert!(amount >= -MAX_AMOUNT, "subtraction underflow"); assert!(amount <= MAX_AMOUNT, "subtraction overflow"); Self { - amount: amount, - symbol: self.symbol + amount, + symbol: self.symbol, } } } @@ -378,12 +379,10 @@ impl ops::SubAssign for Asset { } impl Packer for Asset { - /// fn size(&self) -> usize { - return 16; + 16 } - /// fn pack(&self, enc: &mut Encoder) -> usize { let pos = enc.get_size(); @@ -393,52 +392,45 @@ impl Packer for Asset { enc.get_size() - pos } - /// fn unpack(&mut self, data: &[u8]) -> usize { assert!(data.len() >= self.size(), "Asset.unpack: buffer overflow"); let mut dec = Decoder::new(data); dec.unpack(&mut self.amount); - assert!(self.amount >= -MAX_AMOUNT && self.amount <= MAX_AMOUNT, "Asset.unpack: bad asset amount"); + assert!( + self.amount >= -MAX_AMOUNT && self.amount <= MAX_AMOUNT, + "Asset.unpack: bad asset amount" + ); dec.unpack(&mut self.symbol); dec.get_pos() } } -/// A struct representing an extended asset with an associated contract. -#[cfg_attr(feature = "std", derive(eosio_scale_info::TypeInfo))] #[derive(Copy, Clone, Default, Eq, PartialEq)] pub struct ExtendedAsset { - /// quantity: Asset, - /// contract: Name, } impl ExtendedAsset { - /// pub fn new(quantity: Asset, contract: Name) -> Self { - Self{quantity, contract} + Self { quantity, contract } } - /// pub fn quantity(&self) -> Asset { self.quantity } - /// pub fn contract(&self) -> Name { self.contract } } impl Packer for ExtendedAsset { - /// fn size(&self) -> usize { - return 16 + 8; + 16 + 8 } - /// fn pack(&self, enc: &mut Encoder) -> usize { let pos = enc.get_size(); @@ -448,13 +440,15 @@ impl Packer for ExtendedAsset { enc.get_size() - pos } - /// fn unpack(&mut self, data: &[u8]) -> usize { - assert!(data.len() >= self.size(), "ExtendedAsset.unpack: buffer overflow"); + assert!( + data.len() >= self.size(), + "ExtendedAsset.unpack: buffer overflow" + ); let mut dec = Decoder::new(data); dec.unpack(&mut self.quantity); dec.unpack(&mut self.contract); dec.get_pos() } -} \ No newline at end of file +} diff --git a/crates/antelope/src/chain/blob.rs b/crates/antelope/src/chain/blob.rs index fc697ed..82bf77b 100644 --- a/crates/antelope/src/chain/blob.rs +++ b/crates/antelope/src/chain/blob.rs @@ -1,51 +1,55 @@ -use crate::util::array_equals; - -#[derive(Debug, PartialEq, Eq)] -pub enum BlobType { - Bytes(Vec), - String(String), -} - -pub struct Blob { - pub array: Vec, -} - -impl Blob { - pub fn from(value: BlobType) -> Result { - match value { - BlobType::Bytes(bytes) => Ok(Blob { array: bytes }), - BlobType::String(string) => Self::from_string(&string), - } - } - - pub fn from_string(value: &str) -> Result { - // Remove padding characters '=' from the end of the string - let value_without_padding: String = value.trim_end_matches('=').to_string(); - - // Convert base64 string to bytes - match base64::decode(&value_without_padding) { - Ok(bytes) => Ok(Blob { array: bytes }), - Err(_) => Err("Invalid base64 string"), - } - } - - - pub fn equals(&self, other: &BlobType) -> bool { - if let BlobType::Bytes(bytes) = other { - array_equals(&self.array, bytes) - } else { - false - } - } - - pub fn base64_string(&self) -> String { - base64::encode(&self.array) - } - - pub fn utf8_string(&self) -> Result { - match String::from_utf8(self.array.clone()) { - Ok(utf8_string) => Ok(utf8_string), - Err(_) => Err("Invalid UTF-8 string"), - } - } -} \ No newline at end of file +use crate::util::array_equals; +use base64::engine::general_purpose::PAD; +use base64::engine::GeneralPurpose; +use base64::{alphabet, Engine as _}; + +#[derive(Debug, PartialEq, Eq)] +pub enum BlobType { + Bytes(Vec), + String(String), +} + +pub struct Blob { + pub array: Vec, +} + +pub const STANDARD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, PAD); + +impl Blob { + pub fn from(value: BlobType) -> Result { + match value { + BlobType::Bytes(bytes) => Ok(Blob { array: bytes }), + BlobType::String(string) => Self::from_string(&string), + } + } + + pub fn from_string(value: &str) -> Result { + // Remove padding characters '=' from the end of the string + let value_without_padding: String = value.trim_end_matches('=').to_string(); + + // Convert base64 string to bytes + match STANDARD.decode(value_without_padding) { + Ok(bytes) => Ok(Blob { array: bytes }), + Err(_) => Err("Invalid base64 string"), + } + } + + pub fn equals(&self, other: &BlobType) -> bool { + if let BlobType::Bytes(bytes) = other { + array_equals(&self.array, bytes) + } else { + false + } + } + + pub fn base64_string(&self) -> String { + STANDARD.encode(&self.array) + } + + pub fn utf8_string(&self) -> Result { + match String::from_utf8(self.array.clone()) { + Ok(utf8_string) => Ok(utf8_string), + Err(_) => Err("Invalid UTF-8 string"), + } + } +} diff --git a/crates/antelope/src/chain/block_id.rs b/crates/antelope/src/chain/block_id.rs index 32ea29b..a4869cb 100644 --- a/crates/antelope/src/chain/block_id.rs +++ b/crates/antelope/src/chain/block_id.rs @@ -1,6 +1,6 @@ -use crate::chain::{ Encoder, Decoder, Packer }; -use crate::chain::checksum::Checksum256; +use crate::chain::{Decoder, Encoder, Packer}; use antelope_macros::StructPacker; +use std::fmt::{Display, Formatter}; #[derive(Clone, Eq, PartialEq, StructPacker)] pub struct BlockId { @@ -10,7 +10,9 @@ pub struct BlockId { impl BlockId { pub fn from_bytes(bytes: &Vec) -> Result { if bytes.len() != 32 { - return Err(String::from("BlockId.from_bytes expected bytes length of 32")); + return Err(String::from( + "BlockId.from_bytes expected bytes length of 32", + )); } Ok(Self { bytes: bytes.to_vec(), @@ -19,15 +21,19 @@ impl BlockId { pub fn block_num(&self) -> u32 { let num_bytes = &self.bytes[0..4]; - let num = (u32::from(num_bytes[0]) << 24) + (u32::from(num_bytes[0]) << 24) | (u32::from(num_bytes[1]) << 16) | (u32::from(num_bytes[2]) << 8) - | u32::from(num_bytes[3]); - - u32::from(num) + | u32::from(num_bytes[3]) } - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { self.block_num().to_string() } } + +impl Display for BlockId { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} diff --git a/crates/antelope/src/chain/checksum.rs b/crates/antelope/src/chain/checksum.rs index be45094..dfff86c 100644 --- a/crates/antelope/src/chain/checksum.rs +++ b/crates/antelope/src/chain/checksum.rs @@ -1,7 +1,8 @@ -use ripemd::{Digest as Ripemd160Digest, Ripemd160}; -use sha2::{Sha256, Sha512}; use crate::chain::{Encoder, Packer}; use crate::util::{bytes_to_hex, hex_to_bytes, slice_copy}; +use ripemd::{Digest as Ripemd160Digest, Ripemd160}; +use sha2::{Sha256, Sha512}; +use std::fmt::{Display, Formatter}; #[derive(Clone, Copy, Eq, PartialEq, Default)] pub struct Checksum160 { @@ -10,13 +11,17 @@ pub struct Checksum160 { impl Checksum160 { pub fn from_hex(s: &str) -> Result { - if s.len() != 40 { return Err(String::from("Checksum160: bad hex string length")) } + if s.len() != 40 { + return Err(String::from("Checksum160: bad hex string length")); + } let data = hex_to_bytes(s); Self::from_bytes(data.as_slice()) } pub fn from_bytes(b: &[u8]) -> Result { - if b.len() != 20 { return Err(String::from("Checksum160: bad byte array length")) } + if b.len() != 20 { + return Err(String::from("Checksum160: bad byte array length")); + } let mut ret = Self::default(); slice_copy(&mut ret.data, b); Ok(ret) @@ -29,27 +34,31 @@ impl Checksum160 { Checksum160::from_bytes(ripe_hash.as_slice()).unwrap() } - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { bytes_to_hex(&self.data.to_vec()) } } +impl Display for Checksum160 { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} + impl Packer for Checksum160 { fn size(&self) -> usize { - return 20; + 20 } fn pack(&self, enc: &mut Encoder) -> usize { - let data = enc.alloc(self.size()); - slice_copy(data, &self.data); - self.size() + pack_checksum(self.size(), &self.data, enc) } fn unpack(&mut self, raw: &[u8]) -> usize { let size = self.size(); assert!(raw.len() >= size, "Checksum160.unpack: buffer overflow!"); slice_copy(&mut self.data, &raw[..size]); - return size; + size } } @@ -60,13 +69,17 @@ pub struct Checksum256 { impl Checksum256 { pub fn from_hex(s: &str) -> Result { - if s.len() != 64 { return Err(String::from("Checksum256: bad hex string length")) } + if s.len() != 64 { + return Err(String::from("Checksum256: bad hex string length")); + } let data = hex_to_bytes(s); Self::from_bytes(data.as_slice()) } pub fn from_bytes(b: &[u8]) -> Result { - if b.len() != 32 { return Err(String::from("Checksum256: bad byte array length")) } + if b.len() != 32 { + return Err(String::from("Checksum256: bad byte array length")); + } let mut ret = Self::default(); slice_copy(&mut ret.data, b); Ok(ret) @@ -76,27 +89,31 @@ impl Checksum256 { return Checksum256::from_bytes(Sha256::digest(bytes).as_slice()).unwrap(); } - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { bytes_to_hex(&self.data.to_vec()) } } +impl Display for Checksum256 { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} + impl Packer for Checksum256 { fn size(&self) -> usize { - return 32; + 32 } fn pack(&self, enc: &mut Encoder) -> usize { - let data = enc.alloc(self.size()); - slice_copy(data, &self.data); - self.size() + pack_checksum(self.size(), &self.data, enc) } fn unpack(&mut self, raw: &[u8]) -> usize { let size = self.size(); assert!(raw.len() >= size, "Checksum256.unpack: buffer overflow!"); slice_copy(&mut self.data, &raw[..size]); - return self.size(); + size } } @@ -107,13 +124,15 @@ pub struct Checksum512 { impl Checksum512 { pub fn from_hex(s: &str) -> Result { - if s.len() != 128 { return Err(String::from("Checksum512: bad hex string length")) } + if s.len() != 128 { + return Err(String::from("Checksum512: bad hex string length")); + } let data = hex_to_bytes(s); Ok(Self::from_bytes(data.as_slice())) } pub fn from_bytes(b: &[u8]) -> Self { - assert!(b.len() == 64, "Checksum512: bad byte array length"); + assert_eq!(b.len(), 64, "Checksum512: bad byte array length"); let mut ret = Self::default(); slice_copy(&mut ret.data, b); ret @@ -123,32 +142,42 @@ impl Checksum512 { return Checksum512::from_bytes(Sha512::digest(bytes).as_slice()); } - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { bytes_to_hex(&self.data.to_vec()) } } +impl Display for Checksum512 { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} + impl Default for Checksum512 { fn default() -> Self { - Checksum512 {data: [0; 64]} + Checksum512 { data: [0; 64] } } } impl Packer for Checksum512 { fn size(&self) -> usize { - return 64; + 64 } fn pack(&self, enc: &mut Encoder) -> usize { - let data = enc.alloc(self.size()); - slice_copy(data, &self.data); - self.size() + pack_checksum(self.size(), &self.data, enc) } fn unpack(&mut self, raw: &[u8]) -> usize { let size = self.size(); assert!(raw.len() >= size, "Checksum512.unpack: buffer overflow!"); slice_copy(&mut self.data, &raw[..size]); - return size; + size } -} \ No newline at end of file +} + +fn pack_checksum(size: usize, data: &[u8], enc: &mut Encoder) -> usize { + let allocated = enc.alloc(size); + slice_copy(allocated, data); + size +} diff --git a/crates/antelope/src/chain/key_type.rs b/crates/antelope/src/chain/key_type.rs index 80a365d..2cbc30d 100644 --- a/crates/antelope/src/chain/key_type.rs +++ b/crates/antelope/src/chain/key_type.rs @@ -1,8 +1,9 @@ -use std::fmt::{Display, Formatter}; use crate::chain::{Encoder, Packer}; +use std::fmt::{Display, Formatter}; -#[derive(Clone, Copy, Eq, PartialEq)] +#[derive(Clone, Copy, Eq, PartialEq, Default)] pub enum KeyType { + #[default] K1, R1, // ... other variants ... @@ -24,7 +25,7 @@ impl KeyTypeTrait for KeyType { return Ok(KeyType::R1); } - return Err(format!("Unknown key type {s}")); + Err(format!("Unknown key type {s}")) } fn from_index(i: u8) -> Result { @@ -35,13 +36,13 @@ impl KeyTypeTrait for KeyType { if i == 1 { return Ok(KeyType::R1); } - return Err(String::from(format!("Unknown KeyType index {i}"))); + Err(format!("Unknown KeyType index {i}")) } fn to_index(&self) -> u8 { match self { - KeyType::K1 => { 0 } - KeyType::R1 => { 1 } + KeyType::K1 => 0, + KeyType::R1 => 1, } } } @@ -49,17 +50,16 @@ impl KeyTypeTrait for KeyType { impl Display for KeyType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - KeyType::K1 => { write!(f, "K1") } - KeyType::R1 => { write!(f, "R1") } + KeyType::K1 => { + write!(f, "K1") + } + KeyType::R1 => { + write!(f, "R1") + } } } } - -impl Default for KeyType { - fn default() -> Self { KeyType::K1 } -} - impl Packer for KeyType { fn size(&self) -> usize { 1usize @@ -68,16 +68,18 @@ impl Packer for KeyType { fn pack(&self, enc: &mut Encoder) -> usize { let data = enc.alloc(self.size()); match self { - KeyType::K1 => { data[0] = 0u8 } - KeyType::R1 => { data[0] = 1u8 } + KeyType::K1 => data[0] = 0u8, + KeyType::R1 => data[0] = 1u8, } self.size() } fn unpack(&mut self, data: &[u8]) -> usize { - assert!(data.len() >= self.size(), "KeyType::unpack: buffer overflow"); + assert!( + data.len() >= self.size(), + "KeyType::unpack: buffer overflow" + ); *self = KeyType::from_index(data[0]).unwrap(); self.size() } } - diff --git a/crates/antelope/src/chain/mod.rs b/crates/antelope/src/chain/mod.rs index 3963e1e..ee7fdff 100644 --- a/crates/antelope/src/chain/mod.rs +++ b/crates/antelope/src/chain/mod.rs @@ -1,4 +1,4 @@ -pub use crate::serializer::serializer::{ Decoder, Encoder, Packer }; +pub use crate::serializer::{Decoder, Encoder, Packer}; pub mod action; pub mod asset; @@ -10,13 +10,13 @@ pub mod name; pub mod private_key; pub mod public_key; pub mod signature; +pub mod time; pub mod transaction; pub mod varint; -pub mod time; #[macro_export] macro_rules! name { ($str:expr) => { - Name::from_str($str) + Name::new_from_str($str) }; -} \ No newline at end of file +} diff --git a/crates/antelope/src/chain/name.rs b/crates/antelope/src/chain/name.rs index 37986fc..a8ca71b 100644 --- a/crates/antelope/src/chain/name.rs +++ b/crates/antelope/src/chain/name.rs @@ -1,8 +1,5 @@ -use crate::serializer::serializer::{ - Packer, - Encoder, -}; -use crate::util::memcpy; +use crate::serializer::{Encoder, Packer}; +use std::fmt::{Display, Formatter}; const INVALID_NAME_CHAR: u8 = 0xffu8; @@ -11,24 +8,15 @@ const INVALID_NAME_CHAR: u8 = 0xffu8; /// ".12345abcdefghijklmnopqrstuvwxyz" pub const fn char_to_index(c: u8) -> u8 { match c as char { - 'a'..='z' => { - return (c - 'a' as u8) + 6; - } - '1'..='5' => { - return (c - '1' as u8) + 1; - } - '.' => { - return 0; - } - _ => { - return INVALID_NAME_CHAR; - } + 'a'..='z' => (c - b'a') + 6, + '1'..='5' => (c - b'1') + 1, + '.' => 0, + _ => INVALID_NAME_CHAR, } } const INVALID_NAME: u64 = 0xFFFF_FFFF_FFFF_FFFFu64; - // converts a static string to an `name` object. pub const fn static_str_to_name(s: &'static str) -> u64 { let mut value: u64 = 0; @@ -38,7 +26,7 @@ pub const fn static_str_to_name(s: &'static str) -> u64 { return INVALID_NAME; } - if _s.len() == 0 { + if _s.is_empty() { return 0; } @@ -62,7 +50,7 @@ pub const fn static_str_to_name(s: &'static str) -> u64 { i += 1; } - value <<= 4 + 5*(12 - n); + value <<= 4 + 5 * (12 - n); if _s.len() == 13 { let tmp = char_to_index(_s[12]) as u64; @@ -75,26 +63,27 @@ pub const fn static_str_to_name(s: &'static str) -> u64 { value |= tmp; } - return value; + value } - /// similar to static_str_to_name, /// but also checks the validity of the resulting `name` object. pub fn static_str_to_name_checked(s: &'static str) -> u64 { let n = static_str_to_name(s); - assert!(n != INVALID_NAME, "bad name"); - return n; + assert_ne!(n, INVALID_NAME, "bad name"); + n } - // a shorthand for static_str_to_name_checked. pub fn s2n(s: &'static str) -> u64 { - return static_str_to_name_checked(s); + static_str_to_name_checked(s) } // ".12345abcdefghijklmnopqrstuvwxyz" -pub const CHAR_MAP: [u8; 32] = [46,49,50,51,52,53,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122]; +pub const CHAR_MAP: [u8; 32] = [ + 46, 49, 50, 51, 52, 53, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, +]; /// converts an `name` object to a string. pub fn n2s(value: u64) -> String { @@ -102,13 +91,12 @@ pub fn n2s(value: u64) -> String { let mut s: [u8; 13] = [46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46]; //'.' let mut tmp = value; for i in 0..13 { - let c: u8; - if i == 0 { - c = CHAR_MAP[(tmp&0x0f) as usize]; + let c: u8 = if i == 0 { + CHAR_MAP[(tmp & 0x0f) as usize] } else { - c = CHAR_MAP[(tmp&0x1f) as usize]; - } - s[12-i] = c; + CHAR_MAP[(tmp & 0x1f) as usize] + }; + s[12 - i] = c; if i == 0 { tmp >>= 4 } else { @@ -118,15 +106,14 @@ pub fn n2s(value: u64) -> String { let mut i = s.len() - 1; while i != 0 { - if s[i] != '.' as u8 { - break + if s[i] != b'.' { + break; } i -= 1; } - return String::from_utf8(s[0..i+1].to_vec()).unwrap(); + String::from_utf8(s[0..i + 1].to_vec()).unwrap() } - /// fn str_to_name(s: &str) -> u64 { let mut value: u64 = 0; @@ -136,7 +123,7 @@ fn str_to_name(s: &str) -> u64 { return INVALID_NAME; } - if _s.len() == 0 { + if _s.is_empty() { return 0; } @@ -160,7 +147,7 @@ fn str_to_name(s: &str) -> u64 { i += 1; } - value <<= 4 + 5*(12 - n); + value <<= 4 + 5 * (12 - n); if _s.len() == 13 { let tmp = char_to_index(_s[12]) as u64; @@ -173,54 +160,56 @@ fn str_to_name(s: &str) -> u64 { value |= tmp; } - return value; + value } fn str_to_name_checked(s: &str) -> u64 { let n = str_to_name(s); - assert!(n != INVALID_NAME, "bad name string"); - return n; + assert_ne!(n, INVALID_NAME, "bad name string"); + n } -/// a wrapper around a 64-bit unsigned integer that represents a name in the EOSIO blockchain +/// a wrapper around a 64-bit unsigned integer that represents a name in the Antelope blockchain #[repr(C, align(8))] #[derive(Copy, Clone, Debug, Default, Eq, PartialEq)] -#[cfg_attr(feature = "std", derive(TypeInfo))] pub struct Name { - /// pub n: u64, } impl Name { - /// pub fn new(s: &'static str) -> Self { Name { n: s2n(s) } } pub fn value(&self) -> u64 { - return self.n + self.n } - /// pub fn from_u64(n: u64) -> Self { - assert!(n != INVALID_NAME, "bad name value"); - Name { n: n } + assert_ne!(n, INVALID_NAME, "bad name value"); + Name { n } } - /// - pub fn from_str(s: &str) -> Self { - return Name{ n: str_to_name_checked(s) }; + pub fn new_from_str(s: &str) -> Self { + Name { + n: str_to_name_checked(s), + } } - /// - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { n2s(self.n) } } +impl Display for Name { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} + impl Packer for Name { fn size(&self) -> usize { - return 8; + 8 } fn pack(&self, enc: &mut Encoder) -> usize { @@ -229,13 +218,18 @@ impl Packer for Name { fn unpack(&mut self, raw: &[u8]) -> usize { assert!(raw.len() >= 8, "Name.unpack: buffer overflow!"); - self.n = 0; - memcpy(&self.n as *const u64 as *mut u8, raw.as_ptr(), 8); - return 8; + self.n = u64::from_ne_bytes(raw[0..8].try_into().unwrap()); + 8 } } -pub const SAME_PAYER: Name = Name{n: 0}; -pub const ACTIVE: Name = Name{n: static_str_to_name("active")}; -pub const OWNER: Name = Name{n: static_str_to_name("owner")}; -pub const CODE: Name = Name{n: static_str_to_name("eosio.code")}; \ No newline at end of file +pub const SAME_PAYER: Name = Name { n: 0 }; +pub const ACTIVE: Name = Name { + n: static_str_to_name("active"), +}; +pub const OWNER: Name = Name { + n: static_str_to_name("owner"), +}; +pub const CODE: Name = Name { + n: static_str_to_name("eosio.code"), +}; diff --git a/crates/antelope/src/chain/private_key.rs b/crates/antelope/src/chain/private_key.rs index 80d8686..03aec19 100644 --- a/crates/antelope/src/chain/private_key.rs +++ b/crates/antelope/src/chain/private_key.rs @@ -7,7 +7,7 @@ use crate::crypto::generate::generate; use crate::crypto::get_public::get_public; use crate::crypto::shared_secrets::shared_secret; use crate::crypto::sign::sign; - +use std::fmt::{Display, Formatter}; pub struct PrivateKey { pub key_type: KeyType, @@ -15,22 +15,24 @@ pub struct PrivateKey { } impl PrivateKey { - // TODO: should this be done via the ToString trait? // If so, should other structs also do that? // Also if so, should from on this and other structs use the From trait? - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { let type_str = self.key_type.to_string(); - let encoded = encode_ripemd160_check(self.value.to_vec(), Option::from(self.key_type.to_string().as_str())); - return format!("PVT_{type_str}_{encoded}"); + let encoded = encode_ripemd160_check( + self.value.to_vec(), + Option::from(self.key_type.to_string().as_str()), + ); + format!("PVT_{type_str}_{encoded}") } pub fn to_bytes(&self) -> Vec { - return self.value.to_vec(); + self.value.to_vec() } pub fn to_hex(&self) -> String { - return hex::encode(&self.value); + hex::encode(&self.value) } pub fn to_wif(&self) -> Result { @@ -41,18 +43,18 @@ impl PrivateKey { to_encode.push(0x80); to_encode.append(&mut self.value.to_vec()); - return Ok(encode_check(to_encode)); + Ok(encode_check(to_encode)) } pub fn to_public(&self) -> PublicKey { let compressed = get_public(self.value.to_vec(), self.key_type).unwrap(); - return PublicKey::from_bytes(compressed, self.key_type); + PublicKey::from_bytes(compressed, self.key_type) } pub fn from_bytes(bytes: Vec, key_type: KeyType) -> Self { - return PrivateKey { + PrivateKey { key_type, - value: bytes + value: bytes, } } @@ -64,23 +66,28 @@ impl PrivateKey { } let decoded = decode_result.unwrap(); - return Ok(PrivateKey { + Ok(PrivateKey { key_type: decoded.0, value: decoded.1, - }); + }) } pub fn sign_message(&self, message: &Vec) -> Signature { - return sign(self.value.to_vec(), message, self.key_type).unwrap(); + sign(self.value.to_vec(), message, self.key_type).unwrap() } pub fn shared_secret(&self, their_pub: &PublicKey) -> Checksum512 { - return Checksum512::hash(shared_secret(&self.to_bytes(), &their_pub.value, self.key_type).unwrap()); + Checksum512::hash(shared_secret(&self.to_bytes(), &their_pub.value, self.key_type).unwrap()) } pub fn random(key_type: KeyType) -> Result { let secret_bytes = generate(key_type); - return Ok(Self::from_bytes(secret_bytes.unwrap(), key_type)); + Ok(Self::from_bytes(secret_bytes.unwrap(), key_type)) } +} -} \ No newline at end of file +impl Display for PrivateKey { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} diff --git a/crates/antelope/src/chain/public_key.rs b/crates/antelope/src/chain/public_key.rs index 13a6bb5..fbbd34e 100644 --- a/crates/antelope/src/chain/public_key.rs +++ b/crates/antelope/src/chain/public_key.rs @@ -1,7 +1,8 @@ -use antelope_macros::StructPacker; use crate::base58::{decode_public_key, encode_ripemd160_check}; use crate::chain::{key_type::KeyType, Decoder, Encoder, Packer}; use crate::util::bytes_to_hex; +use antelope_macros::StructPacker; +use std::fmt::{Display, Formatter}; #[derive(Clone, Eq, PartialEq, Default, StructPacker)] pub struct PublicKey { @@ -10,10 +11,12 @@ pub struct PublicKey { } impl PublicKey { - - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { let type_str = self.key_type.to_string(); - let encoded = encode_ripemd160_check(self.value.to_vec(), Option::from(self.key_type.to_string().as_str())); + let encoded = encode_ripemd160_check( + self.value.to_vec(), + Option::from(self.key_type.to_string().as_str()), + ); format!("PUB_{type_str}_{encoded}") } @@ -30,25 +33,23 @@ impl PublicKey { Ok(format!("{key_prefix}{encoded}")) } - pub fn from_str(value: &str) -> Result { + pub fn new_from_str(value: &str) -> Result { match decode_public_key(value) { - Ok(decoded) => { - Ok(PublicKey { - key_type: decoded.0, - value: decoded.1 - }) - } - Err(err_string) => { - Err(err_string) - } + Ok(decoded) => Ok(PublicKey { + key_type: decoded.0, + value: decoded.1, + }), + Err(err_string) => Err(err_string), } } pub fn from_bytes(value: Vec, key_type: KeyType) -> Self { - PublicKey { - key_type, - value - } + PublicKey { key_type, value } } +} -} \ No newline at end of file +impl Display for PublicKey { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } +} diff --git a/crates/antelope/src/chain/signature.rs b/crates/antelope/src/chain/signature.rs index 15e2897..319a81a 100644 --- a/crates/antelope/src/chain/signature.rs +++ b/crates/antelope/src/chain/signature.rs @@ -1,37 +1,36 @@ -use ecdsa::RecoveryId; -use k256::elliptic_curve::Curve; -use k256::Secp256k1; -use p256::NistP256; use crate::base58; use crate::base58::encode_ripemd160_check; -use crate::chain::key_type::KeyTypeTrait; -use crate::chain::{Encoder, Packer}; use crate::chain::key_type::KeyType; +use crate::chain::key_type::KeyTypeTrait; use crate::chain::public_key::PublicKey; +use crate::chain::{Encoder, Packer}; use crate::crypto::recover::recover_message; -use crate::crypto::verify::{verify_message}; +use crate::crypto::verify::verify_message; use crate::util::slice_copy; +use ecdsa::RecoveryId; +use k256::Secp256k1; +use p256::NistP256; +use std::fmt::{Display, Formatter}; -#[derive(Clone, Eq, PartialEq )] +#[derive(Clone, Eq, PartialEq)] pub struct Signature { pub key_type: KeyType, value: Vec, } impl Signature { - pub const RECOVERY_ID_ADDITION: u8 = 27; pub fn recovery_id(&self) -> u8 { - return self.value[0]; + self.value[0] } pub fn r(&self) -> Vec { - return self.value[1..33].to_vec(); + self.value[1..33].to_vec() } pub fn s(&self) -> Vec { - return self.value[33..65].to_vec(); + self.value[33..65].to_vec() } /* @@ -44,40 +43,45 @@ impl Signature { */ pub fn verify_message(&self, message: &Vec, public_key: &PublicKey) -> bool { - return verify_message(self, message, &public_key.value); + verify_message(self, message, &public_key.value) } pub fn recover_message(&self, message: &Vec) -> PublicKey { - return recover_message(&self, &message); + recover_message(self, message) } - pub fn to_string(&self) -> String { + pub fn as_string(&self) -> String { let type_str = self.key_type.to_string(); - let encoded = encode_ripemd160_check(self.value.to_vec(), Option::from(self.key_type.to_string().as_str())); - return format!("SIG_{type_str}_{encoded}"); + let encoded = encode_ripemd160_check( + self.value.to_vec(), + Option::from(self.key_type.to_string().as_str()), + ); + format!("SIG_{type_str}_{encoded}") } pub fn from_string(s: &str) -> Result { if !s.starts_with("SIG_") { return Err(format!("String did not start with SIG_: {s}")); } - let parts: Vec<&str> = s.split("_").collect(); + let parts: Vec<&str> = s.split('_').collect(); let key_type = KeyType::from_string(parts[1]).unwrap(); - let mut size: Option = None; - match key_type { - KeyType::K1 | KeyType::R1 => { - size = Some(65); - } - } - - let value = base58::decode_ripemd160_check(parts[2], size, Option::from(key_type), false).unwrap(); - return Ok(Signature { - key_type, - value - }) - } - - pub fn from_k1_signature(signature: ecdsa::Signature, recovery: RecoveryId) -> Result { + let size: Option = Some(65); + // TODO: add back this logic when other key types are supported and have a different length + // match key_type { + // KeyType::K1 | KeyType::R1 => { + // size = Some(65); + // } + // } + + let value = + base58::decode_ripemd160_check(parts[2], size, Option::from(key_type), false).unwrap(); + Ok(Signature { key_type, value }) + } + + pub fn from_k1_signature( + signature: ecdsa::Signature, + recovery: RecoveryId, + ) -> Result { let r = signature.r().to_bytes().to_vec(); let s = signature.s().to_bytes().to_vec(); let mut data: Vec = Vec::new(); @@ -95,13 +99,16 @@ impl Signature { data.extend(r.to_vec()); data.extend(s.to_vec()); - return Ok(Signature { + Ok(Signature { key_type: KeyType::K1, - value: data + value: data, }) } - pub fn from_r1_signature(signature: ecdsa::Signature, recovery: RecoveryId) -> Result { + pub fn from_r1_signature( + signature: ecdsa::Signature, + recovery: RecoveryId, + ) -> Result { let r = signature.r().to_bytes().to_vec(); let s = signature.s().to_bytes().to_vec(); let mut data: Vec = Vec::new(); @@ -115,43 +122,51 @@ impl Signature { data.extend(r.to_vec()); data.extend(s.to_vec()); - return Ok(Signature { + Ok(Signature { key_type: KeyType::R1, - value: data + value: data, }) } pub fn from_bytes(bytes: Vec, key_type: KeyType) -> Self { - return Signature { + Signature { key_type, - value: bytes + value: bytes, } } - pub fn is_canonical(r: &Vec, s: &Vec) -> bool { - return !(r[0] & 0x80 != 0) - && !(r[0] == 0 && r[1] & 0x80 == 0) - && !(s[0] & 0x80 != 0) - && !(s[0] == 0 && s[1] & 0x80 == 0); + pub fn is_canonical(r: &[u8], s: &[u8]) -> bool { + !((r[0] & 0x80 != 0) + || (s[0] & 0x80 != 0) + || r[0] == 0 && r[1] & 0x80 == 0 + || s[0] == 0 && s[1] & 0x80 == 0) } +} +impl Display for Signature { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_string()) + } } impl Default for Signature { fn default() -> Self { - Self { key_type: KeyType::K1, value: vec![0; 65] } + Self { + key_type: KeyType::K1, + value: vec![0; 65], + } } } impl Packer for Signature { fn size(&self) -> usize { - return 66; + 66 } fn pack(&self, enc: &mut Encoder) -> usize { self.key_type.pack(enc); let data = enc.alloc(self.value.len()); - slice_copy(data, &self.value.as_slice()); + slice_copy(data, &self.value); self.size() } @@ -160,6 +175,6 @@ impl Packer for Signature { assert!(data.len() >= size, "Signature::unpack: buffer overflow"); self.key_type = KeyType::from_index(data[0]).unwrap(); self.value = data[1..size].to_vec(); - return self.size(); + self.size() } } diff --git a/crates/antelope/src/chain/time.rs b/crates/antelope/src/chain/time.rs index 12f1792..58cf165 100644 --- a/crates/antelope/src/chain/time.rs +++ b/crates/antelope/src/chain/time.rs @@ -1,7 +1,7 @@ use crate::chain::{Encoder, Packer}; -use chrono::{DateTime, NaiveDateTime, ParseError, TimeZone, Utc}; +use chrono::{NaiveDateTime, TimeZone, Utc}; - #[derive(Copy, Clone, Default, PartialEq)] +#[derive(Copy, Clone, Default, PartialEq)] pub struct TimePoint { /// elapsed in microseconds pub elapsed: u64, @@ -13,22 +13,20 @@ impl TimePoint { let naive_date_time = NaiveDateTime::parse_from_str(t, "%Y-%m-%dT%H:%M:%S%.f"); if naive_date_time.is_err() { - return Err( - String::from("Failed to parse datetime ") + - naive_date_time.err().unwrap().to_string().as_str() - ); + return Err(String::from("Failed to parse datetime ") + + naive_date_time.err().unwrap().to_string().as_str()); } let date_time = Utc.from_utc_datetime(&naive_date_time.unwrap()); Ok(Self { - elapsed: (date_time.timestamp_millis() * 1000) as u64 + elapsed: (date_time.timestamp_millis() * 1000) as u64, }) } } impl Packer for TimePoint { fn size(&self) -> usize { - return 8; + 8 } fn pack(&self, enc: &mut Encoder) -> usize { @@ -36,8 +34,11 @@ impl Packer for TimePoint { } fn unpack(&mut self, raw: &[u8]) -> usize { - assert!(raw.len() >= self.size(), "TimePoint.unpack: buffer overflow!"); - return self.elapsed.unpack(raw); + assert!( + raw.len() >= self.size(), + "TimePoint.unpack: buffer overflow!" + ); + self.elapsed.unpack(raw) } } @@ -48,18 +49,18 @@ pub struct TimePointSec { } impl TimePointSec { - pub fn new(seconds: u32) -> Self{ - Self{ seconds } + pub fn new(seconds: u32) -> Self { + Self { seconds } } pub fn seconds(&self) -> u32 { - return self.seconds; + self.seconds } } impl Packer for TimePointSec { fn size(&self) -> usize { - return 4; + 4 } fn pack(&self, enc: &mut Encoder) -> usize { @@ -67,7 +68,10 @@ impl Packer for TimePointSec { } fn unpack(&mut self, raw: &[u8]) -> usize { - assert!(raw.len() >= self.size(), "TimePointSec.unpack: buffer overflow!"); - return self.seconds.unpack(raw); + assert!( + raw.len() >= self.size(), + "TimePointSec.unpack: buffer overflow!" + ); + self.seconds.unpack(raw) } -} \ No newline at end of file +} diff --git a/crates/antelope/src/chain/transaction.rs b/crates/antelope/src/chain/transaction.rs index c804160..0558cc0 100644 --- a/crates/antelope/src/chain/transaction.rs +++ b/crates/antelope/src/chain/transaction.rs @@ -1,42 +1,43 @@ -use std::collections::HashMap; -use serde_json::{json, Value}; -use antelope_macros::StructPacker; -use crate::chain::signature::Signature; -use crate::chain::{action::Action, Encoder, Decoder, Packer, time::TimePointSec, varint::VarUint32}; use crate::chain::checksum::Checksum256; +use crate::chain::signature::Signature; +use crate::chain::{ + action::Action, time::TimePointSec, varint::VarUint32, Decoder, Encoder, Packer, +}; use crate::util::{bytes_to_hex, zlib_compress}; +use antelope_macros::StructPacker; +use serde_json::{json, Value}; +use std::collections::HashMap; #[derive(Clone, Eq, PartialEq, Default, StructPacker)] pub struct TransactionExtension { - pub ty: u16, - pub data: Vec, + pub ty: u16, + pub data: Vec, } #[derive(Clone, Eq, PartialEq, Default, StructPacker)] pub struct TransactionHeader { - pub expiration: TimePointSec, - pub ref_block_num: u16, - pub ref_block_prefix: u32, - pub max_net_usage_words: VarUint32, - pub max_cpu_usage_ms: u8, - pub delay_sec: VarUint32, + pub expiration: TimePointSec, + pub ref_block_num: u16, + pub ref_block_prefix: u32, + pub max_net_usage_words: VarUint32, + pub max_cpu_usage_ms: u8, + pub delay_sec: VarUint32, } #[derive(Clone, Eq, PartialEq, Default, StructPacker)] pub struct Transaction { - pub header: TransactionHeader, - pub context_free_actions: Vec, - pub actions: Vec, - pub extension: Vec, + pub header: TransactionHeader, + pub context_free_actions: Vec, + pub actions: Vec, + pub extension: Vec, } impl Transaction { - pub fn id(&self) -> Vec { Checksum256::hash(Encoder::pack(self)).data.to_vec() } - pub fn signing_data(&self, chain_id: &Vec) -> Vec { + pub fn signing_data(&self, chain_id: &[u8]) -> Vec { let mut bytes = chain_id.to_vec(); let encoded = &mut Encoder::pack(self); bytes.append(encoded); @@ -44,22 +45,22 @@ impl Transaction { bytes } - pub fn signing_digest(&self, chain_id: &Vec) -> Vec { + pub fn signing_digest(&self, chain_id: &[u8]) -> Vec { Checksum256::hash(self.signing_data(chain_id)).data.to_vec() } } #[derive(Clone, Eq, PartialEq, Default, StructPacker)] pub struct SignedTransaction { - pub transaction: Transaction, - pub signatures: Vec, - pub context_free_data: Vec> + pub transaction: Transaction, + pub signatures: Vec, + pub context_free_data: Vec>, } #[derive(PartialEq)] pub enum CompressionType { ZLIB, - NONE + NONE, } impl CompressionType { @@ -73,14 +74,17 @@ impl CompressionType { #[derive(Clone, Eq, PartialEq, Default, StructPacker)] pub struct PackedTransaction { - signatures: Vec, - compression: Option, - packed_context_free_data: Vec, - packed_transaction: Vec + signatures: Vec, + compression: Option, + packed_context_free_data: Vec, + packed_transaction: Vec, } impl PackedTransaction { - pub fn from_signed(signed: SignedTransaction, compression: CompressionType) -> Result { + pub fn from_signed( + signed: SignedTransaction, + compression: CompressionType, + ) -> Result { let mut packed_transaction = Encoder::pack(&signed.transaction); let mut packed_context_free_data = Encoder::pack(&signed.context_free_data); if compression == CompressionType::ZLIB { @@ -92,7 +96,7 @@ impl PackedTransaction { signatures: signed.signatures, compression: Some(compression.index() as u8), packed_transaction, - packed_context_free_data + packed_context_free_data, }) } @@ -101,11 +105,19 @@ impl PackedTransaction { let signatures: Vec = self.signatures.iter().map(|sig| sig.to_string()).collect(); trx.insert("signatures", json!(signatures)); if self.compression.is_some() { - trx.insert("compression", Value::Number(self.compression.unwrap().into())); + trx.insert( + "compression", + Value::Number(self.compression.unwrap().into()), + ); } - trx.insert("packed_context_free_data", Value::String(bytes_to_hex(&self.packed_context_free_data))); - trx.insert("packed_trx", Value::String(bytes_to_hex(&self.packed_transaction))); + trx.insert( + "packed_context_free_data", + Value::String(bytes_to_hex(&self.packed_context_free_data)), + ); + trx.insert( + "packed_trx", + Value::String(bytes_to_hex(&self.packed_transaction)), + ); json!(trx).to_string() } - -} \ No newline at end of file +} diff --git a/crates/antelope/src/chain/varint.rs b/crates/antelope/src/chain/varint.rs index 837ad46..6df825c 100644 --- a/crates/antelope/src/chain/varint.rs +++ b/crates/antelope/src/chain/varint.rs @@ -1,10 +1,5 @@ -use crate::serializer::serializer::{ - Packer, - Encoder, -}; +use crate::serializer::{Encoder, Packer}; -/// A variable-length unsigned integer structure. -#[cfg_attr(feature = "std", derive(crate::eosio_scale_info::TypeInfo))] #[derive(Copy, Clone, Eq, PartialEq, Default, Debug)] pub struct VarUint32 { /// The unsigned integer value. @@ -14,12 +9,12 @@ pub struct VarUint32 { impl VarUint32 { /// Create a new VarUint32 instance with the given value. pub fn new(n: u32) -> Self { - Self { n: n } + Self { n } } /// Get the value of the VarUint32 instance. pub fn value(&self) -> u32 { - return self.n; + self.n } } @@ -36,7 +31,7 @@ impl Packer for VarUint32 { val >>= 7; size += 1; } - return size; + size } /// Serialize the VarUint32 value. @@ -76,14 +71,14 @@ impl Packer for VarUint32 { assert!(by < 32, "malformed varuint32 data"); } self.n = value; - return length; + length } } #[cfg(test)] mod tests { - use crate::chain::Encoder; use super::*; + use crate::chain::Encoder; #[test] fn test_varuint32_pack_unpack() { diff --git a/crates/antelope/src/crypto/curves.rs b/crates/antelope/src/crypto/curves.rs index 220ccdd..3af26e2 100644 --- a/crates/antelope/src/crypto/curves.rs +++ b/crates/antelope/src/crypto/curves.rs @@ -1,8 +1,7 @@ - -pub fn create_k1_field_bytes(bytes: &Vec) -> k256::elliptic_curve::FieldBytes { - return *k256::elliptic_curve::FieldBytes::::from_slice(&bytes); +pub fn create_k1_field_bytes(bytes: &[u8]) -> k256::elliptic_curve::FieldBytes { + return *k256::elliptic_curve::FieldBytes::::from_slice(bytes); } -pub fn create_r1_field_bytes(bytes: &Vec) -> p256::elliptic_curve::FieldBytes { - return *p256::elliptic_curve::FieldBytes::::from_slice(&bytes); -} \ No newline at end of file +pub fn create_r1_field_bytes(bytes: &[u8]) -> p256::elliptic_curve::FieldBytes { + return *p256::elliptic_curve::FieldBytes::::from_slice(bytes); +} diff --git a/crates/antelope/src/crypto/generate.rs b/crates/antelope/src/crypto/generate.rs index ac9d09e..e15556d 100644 --- a/crates/antelope/src/crypto/generate.rs +++ b/crates/antelope/src/crypto/generate.rs @@ -1,7 +1,7 @@ +use crate::chain::key_type::KeyType; use k256; use p256; use p256::elliptic_curve::sec1::ToEncodedPoint; -use crate::chain::key_type::KeyType; pub fn generate(curve_type: KeyType) -> Result, String> { // TODO: maybe these can use generic types to deduplicate code? @@ -11,14 +11,14 @@ pub fn generate(curve_type: KeyType) -> Result, String> { let scalar = k256::NonZeroScalar::from(secret_key); let public_key = k256::PublicKey::from_secret_scalar(&scalar); let encoded_point = public_key.to_encoded_point(true); - return Ok(encoded_point.as_bytes().to_vec()); - }, + Ok(encoded_point.as_bytes().to_vec()) + } KeyType::R1 => { let secret_key = p256::SecretKey::random(&mut rand::thread_rng()); let scalar = p256::NonZeroScalar::from(secret_key); let public_key = p256::PublicKey::from_secret_scalar(&scalar); let encoded_point = public_key.to_encoded_point(true); - return Ok(encoded_point.as_bytes().to_vec()); - }, + Ok(encoded_point.as_bytes().to_vec()) + } } } diff --git a/crates/antelope/src/crypto/get_public.rs b/crates/antelope/src/crypto/get_public.rs index 01cd1df..facb783 100644 --- a/crates/antelope/src/crypto/get_public.rs +++ b/crates/antelope/src/crypto/get_public.rs @@ -1,25 +1,27 @@ -use k256; -use p256; -use ecdsa::elliptic_curve::sec1::ToEncodedPoint; use crate::chain::key_type::KeyType; use crate::crypto::curves::{create_k1_field_bytes, create_r1_field_bytes}; +use ecdsa::elliptic_curve::sec1::ToEncodedPoint; +use k256; +use p256; pub fn get_public(priv_key: Vec, curve_type: KeyType) -> Result, String> { // TODO: maybe these can use generic types to deduplicate code? match curve_type { KeyType::K1 => { - let secret_key = k256::SecretKey::from_bytes(&create_k1_field_bytes(&priv_key)).expect("invalid private key"); + let secret_key = k256::SecretKey::from_bytes(&create_k1_field_bytes(&priv_key)) + .expect("invalid private key"); let scalar = k256::NonZeroScalar::from(secret_key); let public_key = k256::PublicKey::from_secret_scalar(&scalar); let encoded_point = public_key.to_encoded_point(true); - return Ok(encoded_point.as_bytes().to_vec()); - }, + Ok(encoded_point.as_bytes().to_vec()) + } KeyType::R1 => { - let secret_key = p256::SecretKey::from_bytes(&create_r1_field_bytes(&priv_key)).expect("invalid private key"); + let secret_key = p256::SecretKey::from_bytes(&create_r1_field_bytes(&priv_key)) + .expect("invalid private key"); let scalar = p256::NonZeroScalar::from(secret_key); let public_key = p256::elliptic_curve::PublicKey::from_secret_scalar(&scalar); let encoded_point = public_key.to_encoded_point(true); - return Ok(encoded_point.as_bytes().to_vec()); - }, + Ok(encoded_point.as_bytes().to_vec()) + } } } diff --git a/crates/antelope/src/crypto/hmac_drbg_rng.rs b/crates/antelope/src/crypto/hmac_drbg_rng.rs deleted file mode 100644 index e0305be..0000000 --- a/crates/antelope/src/crypto/hmac_drbg_rng.rs +++ /dev/null @@ -1,46 +0,0 @@ -use digest::{Digest}; -use hmac_drbg::HmacDRBG; -use hmac::Hmac; -use k256::{ - elliptic_curve::{rand_core::{RngCore, CryptoRng, Error as RandError}}, -}; -use p256::U32; -use sha2::Sha256; - -pub struct HmacDRBGRng { - drbg: HmacDRBG -} - -impl HmacDRBGRng { - pub(crate) fn create(entropy: &[u8], message: &Vec, pers: &[u8]) -> Self { - let drbg = HmacDRBG::::new(entropy, message, pers); - HmacDRBGRng { drbg } - } -} - -impl RngCore for HmacDRBGRng { - fn next_u32(&mut self) -> u32 { - let mut buf = [0u8; 4]; - self.fill_bytes(&mut buf); - u32::from_ne_bytes(buf) - } - - fn next_u64(&mut self) -> u64 { - let mut buf = [0u8; 8]; - self.fill_bytes(&mut buf); - u64::from_ne_bytes(buf) - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - assert_eq!(dest.len(), 32, "fill_bytes only expecting a destination array of 32 bytes"); - self.drbg.generate::(Some(dest)); - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), RandError> { - assert_eq!(dest.len(), 32, "try_fill_bytes only expecting a destination array of 32 bytes"); - self.drbg.generate::(Some(dest)); - Ok(()) - } -} - -impl CryptoRng for HmacDRBGRng {} \ No newline at end of file diff --git a/crates/antelope/src/crypto/recover.rs b/crates/antelope/src/crypto/recover.rs index 80d574c..3135124 100644 --- a/crates/antelope/src/crypto/recover.rs +++ b/crates/antelope/src/crypto/recover.rs @@ -1,9 +1,9 @@ -use ecdsa::RecoveryId; -use sha2::{Sha256, Digest}; use crate::chain::key_type::KeyType; use crate::chain::public_key::PublicKey; use crate::chain::signature::Signature; use crate::crypto::curves::{create_k1_field_bytes, create_r1_field_bytes}; +use ecdsa::RecoveryId; +use sha2::{Digest, Sha256}; pub fn recover_message(signature: &Signature, message_bytes: &Vec) -> PublicKey { // TODO: This more generic @@ -13,31 +13,27 @@ pub fn recover_message(signature: &Signature, message_bytes: &Vec) -> Public let r_scalar = create_k1_field_bytes(&signature.r()); let s_scalar = create_k1_field_bytes(&signature.s()); let sig = k256::ecdsa::Signature::from_scalars(r_scalar, s_scalar).unwrap(); - let digest = Sha256::new().chain_update(&message_bytes); - let recovery_id= RecoveryId::from_byte(signature.recovery_id() - Signature::RECOVERY_ID_ADDITION).unwrap(); - let verifying_key = k256::ecdsa::VerifyingKey::recover_from_digest( - digest, - &sig, - recovery_id - ).unwrap(); + let digest = Sha256::new().chain_update(message_bytes); + let recovery_id = + RecoveryId::from_byte(signature.recovery_id() - Signature::RECOVERY_ID_ADDITION) + .unwrap(); + let verifying_key = + k256::ecdsa::VerifyingKey::recover_from_digest(digest, &sig, recovery_id).unwrap(); let compressed = verifying_key.to_encoded_point(true); let compressed_bytes = compressed.as_bytes(); - return PublicKey::from_bytes(compressed_bytes.to_vec(), key_type); + PublicKey::from_bytes(compressed_bytes.to_vec(), key_type) } KeyType::R1 => { let r_scalar = create_r1_field_bytes(&signature.r()); let s_scalar = create_r1_field_bytes(&signature.s()); let sig = p256::ecdsa::Signature::from_scalars(r_scalar, s_scalar).unwrap(); - let digest = Sha256::new().chain_update(&message_bytes); - let recovery_id= RecoveryId::from_byte(signature.recovery_id()).unwrap(); - let verifying_key = p256::ecdsa::VerifyingKey::recover_from_digest( - digest, - &sig, - recovery_id - ).unwrap(); + let digest = Sha256::new().chain_update(message_bytes); + let recovery_id = RecoveryId::from_byte(signature.recovery_id()).unwrap(); + let verifying_key = + p256::ecdsa::VerifyingKey::recover_from_digest(digest, &sig, recovery_id).unwrap(); let compressed = verifying_key.to_encoded_point(true); let compressed_bytes = compressed.as_bytes(); - return PublicKey::from_bytes(compressed_bytes.to_vec(), key_type); + PublicKey::from_bytes(compressed_bytes.to_vec(), key_type) } } } diff --git a/crates/antelope/src/crypto/shared_secrets.rs b/crates/antelope/src/crypto/shared_secrets.rs index ea74a85..3dca19a 100644 --- a/crates/antelope/src/crypto/shared_secrets.rs +++ b/crates/antelope/src/crypto/shared_secrets.rs @@ -1,27 +1,35 @@ use crate::chain::key_type::KeyType; use crate::crypto::curves::{create_k1_field_bytes, create_r1_field_bytes}; -pub fn shared_secret(my_secret: &Vec, their_pub_key: &Vec, key_type: KeyType) -> Result, String> { +pub fn shared_secret( + my_secret: &[u8], + their_pub_key: &Vec, + key_type: KeyType, +) -> Result, String> { match key_type { KeyType::K1 => { - let secret_key = k256::SecretKey::from_bytes(&create_k1_field_bytes(my_secret)).expect("invalid private key"); - let their_public_key = k256::PublicKey::from_sec1_bytes(their_pub_key.as_slice()).unwrap(); + let secret_key = k256::SecretKey::from_bytes(&create_k1_field_bytes(my_secret)) + .expect("invalid private key"); + let their_public_key = + k256::PublicKey::from_sec1_bytes(their_pub_key.as_slice()).unwrap(); let shared_secret = k256::elliptic_curve::ecdh::diffie_hellman( secret_key.to_nonzero_scalar(), - their_public_key.as_affine() + their_public_key.as_affine(), ); - return Ok(shared_secret.raw_secret_bytes().to_vec()); + Ok(shared_secret.raw_secret_bytes().to_vec()) } KeyType::R1 => { - let secret_key = p256::SecretKey::from_bytes(&create_r1_field_bytes(my_secret)).expect("invalid private key"); - let their_public_key = p256::PublicKey::from_sec1_bytes(their_pub_key.as_slice()).unwrap(); + let secret_key = p256::SecretKey::from_bytes(&create_r1_field_bytes(my_secret)) + .expect("invalid private key"); + let their_public_key = + p256::PublicKey::from_sec1_bytes(their_pub_key.as_slice()).unwrap(); let shared_secret = p256::elliptic_curve::ecdh::diffie_hellman( secret_key.to_nonzero_scalar(), - their_public_key.as_affine() + their_public_key.as_affine(), ); - return Ok(shared_secret.raw_secret_bytes().to_vec()); + Ok(shared_secret.raw_secret_bytes().to_vec()) } } -} \ No newline at end of file +} diff --git a/crates/antelope/src/crypto/sign.rs b/crates/antelope/src/crypto/sign.rs index 1aab4d4..1c1ab00 100644 --- a/crates/antelope/src/crypto/sign.rs +++ b/crates/antelope/src/crypto/sign.rs @@ -1,34 +1,34 @@ +use crate::chain::key_type::KeyType; +use crate::chain::signature::Signature; +use crate::crypto::curves::create_k1_field_bytes; use digest::consts::U32; use digest::core_api::{CoreWrapper, CtVariableCoreWrapper}; use digest::generic_array::ArrayLength; -use ecdsa::hazmat::{bits2field, DigestPrimitive, SignPrimitive}; -use ecdsa::{PrimeCurve, RecoveryId, SignatureSize}; -use ecdsa::elliptic_curve::{CurveArithmetic, Scalar}; use ecdsa::elliptic_curve::ops::Invert; use ecdsa::elliptic_curve::subtle::CtOption; -use sha2::{Sha256, Digest, Sha256VarCore, OidSha256}; -use crate::chain::key_type::KeyType; +use ecdsa::elliptic_curve::{CurveArithmetic, Scalar}; +use ecdsa::hazmat::{bits2field, DigestPrimitive, SignPrimitive}; +use ecdsa::{PrimeCurve, RecoveryId, SignatureSize}; +use k256::ecdsa::signature::DigestSigner; use k256::Secp256k1; use p256::NistP256; -use crate::chain::signature::Signature; -use crate::crypto::curves::create_k1_field_bytes; -use k256::{ - ecdsa::{signature::{DigestSigner}} -}; +use sha2::{Digest, OidSha256, Sha256, Sha256VarCore}; use signature::Error; - pub fn sign(secret: Vec, message: &Vec, key_type: KeyType) -> Result { match key_type { KeyType::K1 => { let mut attempt = 1i8; loop { - let signing_key = k256::ecdsa::SigningKey::from_bytes(&create_k1_field_bytes(&secret.to_vec())).expect("invalid private key"); + let signing_key = + k256::ecdsa::SigningKey::from_bytes(&create_k1_field_bytes(&secret.to_vec())) + .expect("invalid private key"); let pers = &attempt.to_be_bytes(); - let digest = Sha256::new().chain_update(&message); + let digest = Sha256::new().chain_update(message); - let signed: (ecdsa::Signature, RecoveryId) = k1_sign_with_pers(signing_key, digest, pers).unwrap(); + let signed: (ecdsa::Signature, RecoveryId) = + k1_sign_with_pers(signing_key, digest, pers).unwrap(); let signature = signed.0; let recovery = signed.1; @@ -40,20 +40,25 @@ pub fn sign(secret: Vec, message: &Vec, key_type: KeyType) -> Result 100 { - return Err(format!("Reached max canonical signature checks: {}", attempt.to_string())); + return Err(format!( + "Reached max canonical signature checks: {}", + attempt + )); } attempt += 1; } } KeyType::R1 => { - let signing_key = p256::ecdsa::SigningKey::from_bytes(&create_k1_field_bytes(&secret.to_vec())).expect("invalid private key"); + let signing_key = + p256::ecdsa::SigningKey::from_bytes(&create_k1_field_bytes(&secret.to_vec())) + .expect("invalid private key"); - let digest = Sha256::new().chain_update(&message); + let digest = Sha256::new().chain_update(message); // TODO: Explore further how to follow more closely the typescript model with canonical flag // and personalization string being passed to sign method: @@ -63,12 +68,16 @@ pub fn sign(secret: Vec, message: &Vec, key_type: KeyType) -> Result(signing_key: ecdsa::SigningKey, digest: CoreWrapper>, pers: &[u8]) -> signature::Result<(ecdsa::Signature, RecoveryId)> +fn k1_sign_with_pers( + signing_key: ecdsa::SigningKey, + digest: CoreWrapper>, + pers: &[u8], +) -> signature::Result<(ecdsa::Signature, RecoveryId)> where C: PrimeCurve + CurveArithmetic + DigestPrimitive, Scalar: Invert>> + SignPrimitive, @@ -120,4 +129,4 @@ function isCanonical(r: Uint8Array, s: Uint8Array) { !(s[0] === 0 && !(s[1] & 0x80)) ) } - */ \ No newline at end of file + */ diff --git a/crates/antelope/src/crypto/verify.rs b/crates/antelope/src/crypto/verify.rs index 5490d1b..8c2f9c9 100644 --- a/crates/antelope/src/crypto/verify.rs +++ b/crates/antelope/src/crypto/verify.rs @@ -1,30 +1,36 @@ -use ecdsa::signature::{Verifier}; -use k256::elliptic_curve::sec1::{ToEncodedPoint}; use crate::chain::key_type::KeyType; use crate::chain::signature::Signature; use crate::crypto::curves::{create_k1_field_bytes, create_r1_field_bytes}; +use ecdsa::signature::Verifier; +use k256::elliptic_curve::sec1::ToEncodedPoint; pub fn verify_message(signature: &Signature, message_bytes: &Vec, pub_key: &Vec) -> bool { // TODO: This more generic let key_type = signature.key_type; match key_type { KeyType::K1 => { - let public_key_point = k256::PublicKey::from_sec1_bytes(pub_key.as_slice()).unwrap().to_encoded_point(false); - let verifying_key = k256::ecdsa::VerifyingKey::from_encoded_point(&public_key_point).unwrap(); + let public_key_point = k256::PublicKey::from_sec1_bytes(pub_key.as_slice()) + .unwrap() + .to_encoded_point(false); + let verifying_key = + k256::ecdsa::VerifyingKey::from_encoded_point(&public_key_point).unwrap(); let r_scalar = create_k1_field_bytes(&signature.r()); let s_scalar = create_k1_field_bytes(&signature.s()); let sig_result = k256::ecdsa::Signature::from_scalars(r_scalar, s_scalar).unwrap(); let verification = verifying_key.verify(message_bytes.as_slice(), &sig_result); - return verification.is_ok(); + verification.is_ok() } KeyType::R1 => { - let public_key_point = p256::PublicKey::from_sec1_bytes(pub_key.as_slice()).unwrap().to_encoded_point(false); - let verifying_key = p256::ecdsa::VerifyingKey::from_encoded_point(&public_key_point).unwrap(); + let public_key_point = p256::PublicKey::from_sec1_bytes(pub_key.as_slice()) + .unwrap() + .to_encoded_point(false); + let verifying_key = + p256::ecdsa::VerifyingKey::from_encoded_point(&public_key_point).unwrap(); let r_scalar = create_r1_field_bytes(&signature.r()); let s_scalar = create_r1_field_bytes(&signature.s()); let sig_result = p256::ecdsa::Signature::from_scalars(r_scalar, s_scalar).unwrap(); let verification = verifying_key.verify(message_bytes.as_slice(), &sig_result); - return verification.is_ok(); + verification.is_ok() } } } diff --git a/crates/antelope/src/lib.rs b/crates/antelope/src/lib.rs index 53a04e3..622a25f 100644 --- a/crates/antelope/src/lib.rs +++ b/crates/antelope/src/lib.rs @@ -2,5 +2,5 @@ pub mod api; pub mod base58; pub mod chain; pub mod crypto; -pub mod util; pub mod serializer; +pub mod util; diff --git a/crates/antelope/src/serializer/formatter.rs b/crates/antelope/src/serializer/formatter.rs index cc1261b..d97f15b 100644 --- a/crates/antelope/src/serializer/formatter.rs +++ b/crates/antelope/src/serializer/formatter.rs @@ -1,12 +1,10 @@ -use serde_json::{from_str, Value}; -use crate::api::v1::structs::{ClientError, EncodingError}; +use crate::api::v1::structs::EncodingError; use crate::util::hex_to_bytes; +use serde_json::Value; -pub struct ValueTo { -} +pub struct ValueTo {} impl ValueTo { - pub fn str(v: Option<&Value>) -> Result<&str, EncodingError> { check_some(v, "str")?; let value = v.unwrap(); @@ -51,25 +49,28 @@ impl ValueTo { Ok(value.as_number().unwrap().as_u64().unwrap()) } - } pub struct JSONObject { - value: Value + value: Value, } impl JSONObject { - pub fn new(value: Value) -> Self { - JSONObject { - value - } + JSONObject { value } + } + + pub fn has(&self, property: &str) -> bool { + self.value.get(property).is_some() } pub fn get_value(&self, property: &str) -> Result { let value = self.value.get(property); if value.is_none() { - return Err(EncodingError::new(format!("Unable to get property {}", property))); + return Err(EncodingError::new(format!( + "Unable to get property {}", + property + ))); } Ok(value.unwrap().clone()) @@ -94,14 +95,15 @@ impl JSONObject { pub fn get_u64(&self, property: &str) -> Result { ValueTo::u64(self.value.get(property)) } - - } pub fn check_some(o: Option<&Value>, type_name: &str) -> Result { if o.is_none() { - return Err(EncodingError::new(format!("Value is None, cannot convert to {}", type_name))); + return Err(EncodingError::new(format!( + "Value is None, cannot convert to {}", + type_name + ))); } Ok(String::from("")) -} \ No newline at end of file +} diff --git a/crates/antelope/src/serializer/mod.rs b/crates/antelope/src/serializer/mod.rs index dc1d95f..1226161 100644 --- a/crates/antelope/src/serializer/mod.rs +++ b/crates/antelope/src/serializer/mod.rs @@ -1,2 +1,4 @@ pub mod formatter; -pub mod serializer; \ No newline at end of file +pub mod packer; + +pub use packer::{Decoder, Encoder, Packer}; diff --git a/crates/antelope/src/serializer/serializer.rs b/crates/antelope/src/serializer/packer.rs similarity index 89% rename from crates/antelope/src/serializer/serializer.rs rename to crates/antelope/src/serializer/packer.rs index 7d46407..ca2061e 100644 --- a/crates/antelope/src/serializer/serializer.rs +++ b/crates/antelope/src/serializer/packer.rs @@ -1,9 +1,7 @@ -use core::{ - mem::size_of, -}; +use core::mem::size_of; use crate::chain::varint::VarUint32; -use crate::util::{slice_copy}; +use crate::util::slice_copy; /// /// The `Packer` trait provides methods for packing and unpacking values to and from byte arrays. @@ -11,7 +9,7 @@ use crate::util::{slice_copy}; /// # Examples /// /// ``` -/// use crate::antelope::serializer::serializer::{Encoder, Decoder, Packer}; +/// use crate::antelope::serializer::{Encoder, Decoder, Packer}; /// /// let mut encoder = Encoder::new(4); /// let value = 123u32; @@ -55,7 +53,7 @@ pub trait Packer { /// # Examples /// /// ``` -/// use antelope::serializer::serializer::{Encoder, Packer}; +/// use antelope::serializer::{Encoder, Packer}; /// /// let mut encoder = Encoder::new(4); /// let value = 123u32; @@ -82,7 +80,7 @@ impl Encoder { /// A new `Encoder` instance with the given initial capacity. pub fn new(size: usize) -> Self { Self { - buf: Vec::with_capacity(size) + buf: Vec::with_capacity(size), } } @@ -113,10 +111,9 @@ impl Encoder { /// # Returns /// /// A mutable reference to the allocated - pub fn alloc(&mut self, size: usize) -> &mut [u8] - { + pub fn alloc(&mut self, size: usize) -> &mut [u8] { let old_size = self.buf.len(); - self.buf.resize(old_size+size, 0u8); + self.buf.resize(old_size + size, 0u8); &mut self.buf[old_size..] } @@ -129,13 +126,12 @@ impl Encoder { /// # Examples /// /// ``` - /// use antelope::serializer::serializer::{Encoder, Packer}; + /// use antelope::serializer::{Encoder, Packer}; /// /// let data = Encoder::pack(&1234u32); /// assert_eq!(data, vec![210, 4, 0, 0]); /// ``` - pub fn pack(value: &T) -> Vec - { + pub fn pack(value: &T) -> Vec { // Create a new Encoder with the size of the value being packed let mut enc = Self::new(value.size()); // Pack the value using the encoder @@ -143,7 +139,6 @@ impl Encoder { // Return the packed data as a vector of bytes enc.get_bytes().to_vec() } - } /// A struct for unpacking packed data @@ -151,7 +146,7 @@ impl Encoder { /// # Examples /// /// ``` -/// use crate::antelope::serializer::serializer::{Decoder, Packer}; +/// use crate::antelope::serializer::{Decoder, Packer}; /// /// let data = &vec![210, 4, 0, 0]; /// let mut decoder = Decoder::new(&data); @@ -161,22 +156,20 @@ impl Encoder { /// ``` pub struct Decoder<'a> { buf: &'a [u8], - pos: usize + pos: usize, } /// A struct for unpacking packed data impl<'a> Decoder<'a> { - /// Creates a new `Decoder` instance from the given byte array. pub fn new(data: &'a [u8]) -> Self { - Self { - buf: data, pos: 0 - } + Self { buf: data, pos: 0 } } /// Unpacks the given value from the decoder pub fn unpack(&mut self, packer: &mut T) -> usize - where T: Packer, + where + T: Packer, { let size = packer.unpack(&self.buf[self.pos..]); self.pos += size; @@ -187,7 +180,6 @@ impl<'a> Decoder<'a> { pub fn get_pos(&self) -> usize { self.pos } - } /// A trait for packing and unpacking values @@ -243,7 +235,7 @@ impl Packer for bool { } else if data[0] == 0 { *self = false; } else { - assert!(false, "bool::unpack: invalid raw bool value"); + panic!("bool::unpack: invalid raw bool value"); } self.size() } @@ -251,7 +243,6 @@ impl Packer for bool { /// Implement `Packer` for i8 type. impl Packer for i8 { - /// Returns the size of this value in bytes. fn size(&self) -> usize { 1usize @@ -274,7 +265,6 @@ impl Packer for i8 { /// Implement `Packer` for u8 type. impl Packer for u8 { - /// Returns the size of this value in bytes. fn size(&self) -> usize { 1usize @@ -308,7 +298,6 @@ impl_packed!(f64); /// Implement `Packer` for `String` type. impl Packer for String { - /// Returns the size of this value in bytes. fn size(&self) -> usize { VarUint32::new(self.len() as u32).size() + self.len() @@ -331,28 +320,31 @@ impl Packer for String { /// Unpacks this value from the given data. fn unpack(&mut self, data: &[u8]) -> usize { - let mut length = VarUint32{n: 0}; + let mut length = VarUint32 { n: 0 }; let size = length.unpack(data); - if let Ok(s) = String::from_utf8(data[size..size+length.value() as usize].to_vec()) { + if let Ok(s) = String::from_utf8(data[size..size + length.value() as usize].to_vec()) { *self = s; } else { - assert!(false, "invalid utf8 string"); + panic!("invalid utf8 string"); } size + length.value() as usize } } /// Implement `Packer` for `Vec` type. -impl Packer for Vec where T: Packer + Default { +impl Packer for Vec +where + T: Packer + Default, +{ /// Returns the size of this value in bytes. fn size(&self) -> usize { - if self.len() == 0 { + if self.is_empty() { return 1; } let mut size: usize = 0; - for i in 0..self.len() { - size += self[i].size(); + for i in self { + size += i.size(); } VarUint32::new(size as u32).size() + size } @@ -360,7 +352,9 @@ impl Packer for Vec where T: Packer + Default { /// Packs this value into the given encoder. fn pack(&self, enc: &mut Encoder) -> usize { let pos = enc.get_size(); - let len = VarUint32{n: self.len() as u32}; + let len = VarUint32 { + n: self.len() as u32, + }; len.pack(enc); for v in self { v.pack(enc); @@ -371,7 +365,7 @@ impl Packer for Vec where T: Packer + Default { /// Unpacks this value from the given data. fn unpack(&mut self, data: &[u8]) -> usize { let mut dec = Decoder::new(data); - let mut size = VarUint32{n: 0}; + let mut size = VarUint32 { n: 0 }; dec.unpack(&mut size); self.reserve(size.value() as usize); for _ in 0..size.value() { @@ -384,8 +378,10 @@ impl Packer for Vec where T: Packer + Default { } /// Implement `Packer` for `Option` type. -impl Packer for Option where T: Packer + Default { - +impl Packer for Option +where + T: Packer + Default, +{ /// Returns the size of this value in bytes. fn size(&self) -> usize { match self { @@ -426,4 +422,4 @@ impl Packer for Option where T: Packer + Default { *self = Some(value); dec.get_pos() } -} \ No newline at end of file +} diff --git a/crates/antelope/src/util.rs b/crates/antelope/src/util.rs index 54b2fc4..9fa42e7 100644 --- a/crates/antelope/src/util.rs +++ b/crates/antelope/src/util.rs @@ -1,15 +1,14 @@ -use std::io::Write; -use hex::{decode, encode}; -use std::slice; -use flate2::Compression; use flate2::write::ZlibEncoder; +use flate2::Compression; +use hex::{decode, encode}; +use std::io::Write; pub fn hex_to_bytes(hex: &str) -> Vec { - return decode(hex).unwrap(); + decode(hex).unwrap() } pub fn bytes_to_hex(bytes: &Vec) -> String { - return encode(bytes); + encode(bytes) } pub fn array_equals(a: &[T], b: &[T]) -> bool { @@ -17,32 +16,24 @@ pub fn array_equals(a: &[T], b: &[T]) -> bool { } pub fn array_to_hex(bytes: &[u8]) -> String { - bytes.iter().map(|b| format!("{:02x}", b)).collect() -} - -pub fn memcpy( dst: *mut u8, src: *const u8, length: usize) -> *mut u8 { - let mut _dst = unsafe { - slice::from_raw_parts_mut(dst, length) - }; - - let _src = unsafe { - slice::from_raw_parts(src, length) - }; - _dst.copy_from_slice(_src); - dst + //bytes.iter().map(|b| format!("{:02x}", b)).collect() + encode(bytes) } pub fn slice_copy(dst: &mut [u8], src: &[u8]) { - assert!(dst.len() == src.len(), "copy_slice: length not the same!"); - memcpy(dst.as_mut_ptr(), src.as_ptr(), dst.len()); + dst.copy_from_slice(src); + // assert!(dst.len() == src.len(), "copy_slice: length not the same!"); + // unsafe { memcpy(dst.as_mut_ptr(), src.as_ptr(), dst.len()); } } pub fn zlib_compress(bytes: &[u8]) -> Result, String> { let mut e = ZlibEncoder::new(Vec::new(), Compression::default()); - e.write_all(bytes); + if e.write_all(bytes).is_err() { + return Err("Error during compression".into()); + } let compressed_bytes = e.finish(); if compressed_bytes.is_err() { - return Err(String::from("Error during compression")); + return Err("Error during compression".into()); } Ok(compressed_bytes.unwrap()) -} \ No newline at end of file +} diff --git a/crates/antelope/tests/base58.rs b/crates/antelope/tests/base58.rs index 21ec308..d301ee7 100644 --- a/crates/antelope/tests/base58.rs +++ b/crates/antelope/tests/base58.rs @@ -1,11 +1,17 @@ -use antelope::{base58, util}; use antelope::chain::key_type::KeyType; use antelope::util::hex_to_bytes; +use antelope::{base58, util}; #[test] -fn decode () { - assert_eq!(base58::decode("StV1DL6CwTryKyV", None).unwrap(), hex::decode("68656c6c6f20776f726c64").unwrap()); - assert_eq!(base58::decode("1111", None).unwrap(), hex::decode("00000000").unwrap()); +fn decode() { + assert_eq!( + base58::decode("StV1DL6CwTryKyV", None).unwrap(), + hex::decode("68656c6c6f20776f726c64").unwrap() + ); + assert_eq!( + base58::decode("1111", None).unwrap(), + hex::decode("00000000").unwrap() + ); let d1 = base58::decode("000", None); assert!(d1.is_err()); let d2 = base58::decode("0", Some(1)); @@ -16,8 +22,14 @@ fn decode () { #[test] fn encode() { - assert_eq!(base58::encode(String::from("hello world").into_bytes()), String::from("StV1DL6CwTryKyV")); - assert_eq!(base58::encode(util::hex_to_bytes("0000")), String::from("11")); + assert_eq!( + base58::encode(String::from("hello world").into_bytes()), + String::from("StV1DL6CwTryKyV") + ); + assert_eq!( + base58::encode(util::hex_to_bytes("0000")), + String::from("11") + ); } #[test] @@ -26,17 +38,29 @@ fn decode_check() { base58::decode_check("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(), hex_to_bytes("80d25968ebfce6e617bdb839b5a66cfc1fdd051d79a91094f7baceded449f84333") ); - let decode_result = base58::decode_check("5KQVfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false); + let decode_result = + base58::decode_check("5KQVfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false); assert!(decode_result.is_err()); } #[test] fn decode_ripemd160_check() { assert_eq!( - base58::decode_ripemd160_check("6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin", None, None, false).unwrap(), + base58::decode_ripemd160_check( + "6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin", + None, + None, + false + ) + .unwrap(), hex_to_bytes("02caee1a02910b18dfd5d9db0e8a4bc90f8dd34cedbbfb00c6c841a2abb2fa28cc"), ); - let decode_result_1 = base58::decode_ripemd160_check("6RrVujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin", None, None, false); + let decode_result_1 = base58::decode_ripemd160_check( + "6RrVujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin", + None, + None, + false, + ); assert!(decode_result_1.is_err()); assert_eq!( base58::decode_ripemd160_check( @@ -44,7 +68,8 @@ fn decode_ripemd160_check() { Some(33), Some(KeyType::K1), false - ).unwrap(), + ) + .unwrap(), hex_to_bytes("02caee1a02910b18dfd5d9db0e8a4bc90f8dd34cedbbfb00c6c841a2abb2fa28cc") ); assert_eq!( @@ -53,24 +78,25 @@ fn decode_ripemd160_check() { Some(33), Some(KeyType::K1), false - ).unwrap(), + ) + .unwrap(), hex_to_bytes("02caee1a02910b18dfd5d9db0e8a4bc90f8dd34cedbbfb00c6c841a2abb2fa28cc") ); let decode_result_2 = base58::decode_ripemd160_check( "6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs", None, None, - false + false, ); assert!(decode_result_2.is_err()); } #[test] -fn encode_check () { +fn encode_check() { assert_eq!( - base58::encode_check( - hex_to_bytes("80d25968ebfce6e617bdb839b5a66cfc1fdd051d79a91094f7baceded449f84333") - ), + base58::encode_check(hex_to_bytes( + "80d25968ebfce6e617bdb839b5a66cfc1fdd051d79a91094f7baceded449f84333" + )), "5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu" ); } @@ -91,4 +117,4 @@ fn encode_ripemd160_check() { ), "6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs" ) -} \ No newline at end of file +} diff --git a/crates/antelope/tests/chain.rs b/crates/antelope/tests/chain.rs index 2cb20e6..6c499d7 100644 --- a/crates/antelope/tests/chain.rs +++ b/crates/antelope/tests/chain.rs @@ -1,12 +1,13 @@ +use antelope::chain::asset::Symbol; use antelope::chain::block_id::BlockId; use antelope::chain::checksum::{Checksum160, Checksum256, Checksum512}; -use antelope::chain::{action::Action, asset::Asset, Encoder, Decoder, Packer, action::PermissionLevel}; -use antelope::chain::asset::Symbol; -use antelope::util::{bytes_to_hex, hex_to_bytes}; -use antelope::chain::blob::{Blob, BlobType}; use antelope::chain::name::Name; use antelope::chain::transaction::{Transaction, TransactionHeader}; +use antelope::chain::{ + action::Action, action::PermissionLevel, asset::Asset, Decoder, Encoder, Packer, +}; use antelope::name; +use antelope::util::{bytes_to_hex, hex_to_bytes}; use antelope_macros::StructPacker; #[test] @@ -17,8 +18,14 @@ fn asset() { //assert.equal(Asset.from('-0.2345 NEGS').toString(), '-0.2345 NEGS') //assert.equal(Asset.from('-99999999999 DUCKS').toString(), '-99999999999 DUCKS') //assert.equal(Asset.from('-0.0000000000001 DUCKS').toString(), '-0.0000000000001 DUCKS') - assert_eq!(Asset::from_string("0.0000000000000 DUCKS").to_string(), "0.0000000000000 DUCKS"); - assert_eq!(Asset::from_string("99999999999 DUCKS").to_string(), "99999999999 DUCKS"); + assert_eq!( + Asset::from_string("0.0000000000000 DUCKS").to_string(), + "0.0000000000000 DUCKS" + ); + assert_eq!( + Asset::from_string("99999999999 DUCKS").to_string(), + "99999999999 DUCKS" + ); let asset = Asset::from(Asset::from_string("1.000000000 FOO")); assert_eq!(asset.amount(), 1000000000); @@ -41,54 +48,52 @@ fn asset() { symbol_decoder.unpack(symbol_unpacked); assert_eq!(symbol.to_string(), symbol_unpacked.to_string()); /* - // test null asset - asset = Asset.from('0 ') - assert.equal(Number(asset.value), 0) - assert.equal(String(asset), '0 ') - - asset = Asset.from(10, '4,POX') - assert.equal(asset.value, 10) - assert.equal(Number(asset.units), 100000) - - asset = Asset.fromUnits(1, '10,KEK') - assert.equal(asset.value, 0.0000000001) - asset.value += 0.0000000001 - assert.equal(Number(asset.units), 2) - - asset = Asset.from(3.004, '4,RAR') - asset.value += 1 - assert.equal(asset.toString(), '4.0040 RAR') - assert.equal(asset.value, 4.004) - - asset = Asset.from(3.004, '8,RAR') - asset.value += 1 - assert.equal(asset.units.toNumber(), 400400000) - assert.equal(asset.toString(), '4.00400000 RAR') - assert.equal(asset.value, 4.004) - - assert.throws(() => { - symbol.convertUnits(Int64.from('9223372036854775807')) - }) - assert.throws(() => { - Asset.from('') - }) - assert.throws(() => { - Asset.from('1POP') - }) - assert.throws(() => { - Asset.from('1.0000000000000000000000 BIGS') - }) - assert.throws(() => { - Asset.from('1.2 horse') - }) - assert.throws(() => { - Asset.Symbol.from('12') - }) - */ + // test null asset + asset = Asset.from('0 ') + assert.equal(Number(asset.value), 0) + assert.equal(String(asset), '0 ') + + asset = Asset.from(10, '4,POX') + assert.equal(asset.value, 10) + assert.equal(Number(asset.units), 100000) + + asset = Asset.fromUnits(1, '10,KEK') + assert.equal(asset.value, 0.0000000001) + asset.value += 0.0000000001 + assert.equal(Number(asset.units), 2) + + asset = Asset.from(3.004, '4,RAR') + asset.value += 1 + assert.equal(asset.toString(), '4.0040 RAR') + assert.equal(asset.value, 4.004) + + asset = Asset.from(3.004, '8,RAR') + asset.value += 1 + assert.equal(asset.units.toNumber(), 400400000) + assert.equal(asset.toString(), '4.00400000 RAR') + assert.equal(asset.value, 4.004) + + assert.throws(() => { + symbol.convertUnits(Int64.from('9223372036854775807')) + }) + assert.throws(() => { + Asset.from('') + }) + assert.throws(() => { + Asset.from('1POP') + }) + assert.throws(() => { + Asset.from('1.0000000000000000000000 BIGS') + }) + assert.throws(() => { + Asset.from('1.2 horse') + }) + assert.throws(() => { + Asset.Symbol.from('12') + }) + */ } - - #[test] fn block_id() { let string = "048865fb643bca3b644647177f0cf363f7956794d0a7ec3bc6d29d93d9637308"; @@ -106,7 +111,7 @@ fn block_id() { return; } }; - + //assert_eq!(block_id.to_string(), string); assert_eq!(block_id.block_num().to_string(), "76047867"); assert!(block_id.block_num() == 76047867); @@ -125,27 +130,26 @@ fn block_id() { // assert!(block_id2.block_num().equals(7)); } +/* test('block id', function () { + const string = '048865fb643bca3b644647177f0cf363f7956794d0a7ec3bc6d29d93d9637308' + const blockId = BlockId.from(string) + assert.equal(String(blockId), string) + assert.equal(Number(blockId.blockNum), 76047867) + assert.equal(blockId.blockNum.equals(76047867), true) + assert.equal(blockId.blockNum.equals(UInt32.from(76047867)), true) + const blockId2 = BlockId.fromBlockChecksum( + '61375f2d5fbe6bbad86e424962a190e8309394b7bff4bf3e16b0a2a71e5a617c', + 7 + ) + assert.equal( + String(blockId2), + '000000075fbe6bbad86e424962a190e8309394b7bff4bf3e16b0a2a71e5a617c' + ) + assert.equal(blockId2.blockNum.equals(7), true) +})*/ - /* test('block id', function () { - const string = '048865fb643bca3b644647177f0cf363f7956794d0a7ec3bc6d29d93d9637308' - const blockId = BlockId.from(string) - assert.equal(String(blockId), string) - assert.equal(Number(blockId.blockNum), 76047867) - assert.equal(blockId.blockNum.equals(76047867), true) - assert.equal(blockId.blockNum.equals(UInt32.from(76047867)), true) - const blockId2 = BlockId.fromBlockChecksum( - '61375f2d5fbe6bbad86e424962a190e8309394b7bff4bf3e16b0a2a71e5a617c', - 7 - ) - assert.equal( - String(blockId2), - '000000075fbe6bbad86e424962a190e8309394b7bff4bf3e16b0a2a71e5a617c' - ) - assert.equal(blockId2.blockNum.equals(7), true) - })*/ - - -//#[test] +/* +#[test] fn blob() { let expected = Blob::from(BlobType::Bytes(vec![0xbe, 0xef, 0xfa, 0xce])).unwrap(); @@ -163,32 +167,30 @@ fn blob() { let blob4 = Blob::from(BlobType::String("vu/6zg===".to_string())).unwrap(); assert_eq!(blob4.array, expected.array); } - +*/ /* test('blob', function () { - const expected = Bytes.from([0xbe, 0xef, 0xfa, 0xce]) - - // Correct - const string = 'vu/6zg==' - const blob = Blob.from(string) - assert.isTrue(Bytes.from(blob.array).equals(expected)) + const expected = Bytes.from([0xbe, 0xef, 0xfa, 0xce]) - // Wrong padding, ensure it still works - const string2 = 'vu/6zg=' - const blob2 = Blob.from(string2) - assert.isTrue(Bytes.from(blob2.array).equals(expected)) + // Correct + const string = 'vu/6zg==' + const blob = Blob.from(string) + assert.isTrue(Bytes.from(blob.array).equals(expected)) - const string3 = 'vu/6zg' - const blob3 = Blob.from(string3) - assert.isTrue(Bytes.from(blob3.array).equals(expected)) - - const string4 = 'vu/6zg===' - const blob4 = Blob.from(string4) - assert.isTrue(Bytes.from(blob4.array).equals(expected)) - }) - */ + // Wrong padding, ensure it still works + const string2 = 'vu/6zg=' + const blob2 = Blob.from(string2) + assert.isTrue(Bytes.from(blob2.array).equals(expected)) + const string3 = 'vu/6zg' + const blob3 = Blob.from(string3) + assert.isTrue(Bytes.from(blob3.array).equals(expected)) + const string4 = 'vu/6zg===' + const blob4 = Blob.from(string4) + assert.isTrue(Bytes.from(blob4.array).equals(expected)) +}) +*/ #[test] fn bytes() { @@ -209,63 +211,65 @@ fn bytes() { "98c615784ccb5fe5936fbc0cbe9dfdb408d92f0f" ); /* - // TODO: add zeropadded support - assert.equal(Bytes.from('beef').zeropadded(4).toString('hex'), '0000beef') - assert.equal(Bytes.from('beef').zeropadded(2).toString('hex'), 'beef') - assert.equal(Bytes.from('beef').zeropadded(1).toString('hex'), 'beef') - assert.equal(Bytes.from('beef').zeropadded(1, true).toString('hex'), 'be') - assert.equal(Bytes.from('beef').zeropadded(2, true).toString('hex'), 'beef') - assert.equal(Bytes.from('beef').zeropadded(3, true).toString('hex'), '00beef') + // TODO: add zeropadded support + assert.equal(Bytes.from('beef').zeropadded(4).toString('hex'), '0000beef') + assert.equal(Bytes.from('beef').zeropadded(2).toString('hex'), 'beef') + assert.equal(Bytes.from('beef').zeropadded(1).toString('hex'), 'beef') + assert.equal(Bytes.from('beef').zeropadded(1, true).toString('hex'), 'be') + assert.equal(Bytes.from('beef').zeropadded(2, true).toString('hex'), 'beef') + assert.equal(Bytes.from('beef').zeropadded(3, true).toString('hex'), '00beef') - */ + */ } /* - test('time', function () { - const now = new Date() - assert.equal(TimePoint.from(now).toMilliseconds(), now.getTime()) - assert.equal( - TimePointSec.from(TimePointSec.from(now)).toMilliseconds() / 1000, - Math.round(now.getTime() / 1000) - ) - assert.throws(() => { - TimePoint.from('blah') - }) - assert.equal(BlockTimestamp.from('2021-08-25T02:37:24.500'), '2021-08-25T02:37:24.500') - assert.equal( - Math.round(BlockTimestamp.from(now).toMilliseconds() / 500), - Math.round(now.getTime() / 500) - ) +test('time', function () { + const now = new Date() + assert.equal(TimePoint.from(now).toMilliseconds(), now.getTime()) + assert.equal( + TimePointSec.from(TimePointSec.from(now)).toMilliseconds() / 1000, + Math.round(now.getTime() / 1000) + ) + assert.throws(() => { + TimePoint.from('blah') }) - */ + assert.equal(BlockTimestamp.from('2021-08-25T02:37:24.500'), '2021-08-25T02:37:24.500') + assert.equal( + Math.round(BlockTimestamp.from(now).toMilliseconds() / 500), + Math.round(now.getTime() / 500) + ) +}) +*/ #[test] fn transaction() { - #[derive(Clone, Eq, PartialEq, Default, StructPacker)] struct Transfer { from: Name, to: Name, quantity: Asset, - memo: String + memo: String, } let transfer_data = Transfer { from: name!("foo"), to: name!("bar"), quantity: Asset::from_string("1.0000 EOS"), - memo: String::from("hello") + memo: String::from("hello"), }; let transfer_data_packed = Encoder::pack(&transfer_data); - assert_eq!(bytes_to_hex(&transfer_data_packed), "000000000000285d000000000000ae39102700000000000004454f53000000000568656c6c6f"); + assert_eq!( + bytes_to_hex(&transfer_data_packed), + "000000000000285d000000000000ae39102700000000000004454f53000000000568656c6c6f" + ); let action = Action::new_ex( name!("eosio.token"), name!("transfer"), vec![], - &transfer_data + &transfer_data, ); let action_packed = Encoder::pack(&action); @@ -420,17 +424,17 @@ fn transaction() { #[test] fn permission_level() { // Create PermissionLevel from 'foo@bar' - let perm = PermissionLevel::new(Name::from_str("foo"), Name::from_str("bar")); + let perm = PermissionLevel::new(Name::new_from_str("foo"), Name::new_from_str("bar")); // Test equals with itself assert_eq!(perm, perm.clone()); // Test equals with equivalent ActorPermission - let other_perm = PermissionLevel::new(Name::from_str("foo"), Name::from_str("bar")); + let other_perm = PermissionLevel::new(Name::new_from_str("foo"), Name::new_from_str("bar")); assert_eq!(perm, other_perm); // Test equals with different PermissionLevel - let different_perm = PermissionLevel::new(Name::from_str("bar"), Name::from_str("moo")); + let different_perm = PermissionLevel::new(Name::new_from_str("bar"), Name::new_from_str("moo")); assert_ne!(perm, different_perm); } @@ -552,17 +556,16 @@ fn transaction_signing_data_and_digest() { extension: vec![], }; let chain_id = Checksum256::from_bytes( - hex_to_bytes("2a02a0053e5a8cf73a56ba0fda11e4d92e0238a4a2aa74fccf46d5a910746840").as_slice() - ).unwrap(); + hex_to_bytes("2a02a0053e5a8cf73a56ba0fda11e4d92e0238a4a2aa74fccf46d5a910746840").as_slice(), + ) + .unwrap(); let data = trx.signing_data(&chain_id.data.to_vec()); let expected_data_hex= "2a02a0053e5a8cf73a56ba0fda11e4d92e0238a4a2aa74fccf46d5a91074684000000000000000000000000000000100a6823403ea3055000000572d3ccdcd01a02e45ea52a42e4500000000a8ed32323aa02e45ea52a42e4580b1915e5d268dcaba0100000000000004454f530000000019656f73696f2d636f7265206973207468652062657374203c33000000000000000000000000000000000000000000000000000000000000000000"; assert_eq!(bytes_to_hex(&data), expected_data_hex); let digest = trx.signing_digest(&chain_id.data.to_vec()); - let expected_digest_hex= "59fa6b615e3ce1b539ae27bc2398448c1374d2d3c97fe2bbba2c37c118631848"; + let expected_digest_hex = "59fa6b615e3ce1b539ae27bc2398448c1374d2d3c97fe2bbba2c37c118631848"; assert_eq!(bytes_to_hex(&digest), expected_digest_hex); - - } /* diff --git a/crates/antelope/tests/client.rs b/crates/antelope/tests/client.rs index 638f93c..4654ae3 100644 --- a/crates/antelope/tests/client.rs +++ b/crates/antelope/tests/client.rs @@ -1,43 +1,84 @@ use antelope::api::client::APIClient; +use antelope::api::v1::structs::ClientError; +use antelope::chain::asset::Asset; use antelope::chain::block_id::BlockId; -use antelope::name; use antelope::chain::name::Name; +use antelope::name; use antelope::util::{bytes_to_hex, hex_to_bytes}; mod utils; -use utils::mock_provider::MockProvider; use crate::utils::mock_provider; +use utils::mock_provider::MockProvider; #[test] fn chain_get_info() { - let mock_provider = MockProvider{}; + let mock_provider = MockProvider {}; let client = APIClient::custom_provider(Box::new(mock_provider)); //let client = APIClient::default_provider(String::from("https://telos.caleos.io")); let info = client.unwrap().v1_chain.get_info().unwrap(); assert_eq!(info.head_block_producer, name!("bp.boid")); - assert_eq!(info.last_irreversible_block_id.bytes, BlockId::from_bytes(&hex_to_bytes("12cf00e89773c8497415c368960b9c57ba6ee076283f71df14aeee2daefbb2a6")).unwrap().bytes); + assert_eq!( + info.last_irreversible_block_id.bytes, + BlockId::from_bytes(&hex_to_bytes( + "12cf00e89773c8497415c368960b9c57ba6ee076283f71df14aeee2daefbb2a6" + )) + .unwrap() + .bytes + ); assert_eq!(info.last_irreversible_block_num, 315556072); } #[test] fn chain_send_transaction() { - let mock_provider = MockProvider{}; + let mock_provider = MockProvider {}; let client = APIClient::custom_provider(Box::new(mock_provider)).unwrap(); //let client = APIClient::default_provider(String::from("https://testnet.telos.caleos.io")).unwrap(); let info = client.v1_chain.get_info().unwrap(); - let transaction = mock_provider::make_mock_transaction(&info); + let transaction = + mock_provider::make_mock_transaction(&info, Asset::from_string("0.0420 TLOS")); let signed_transaction = mock_provider::sign_mock_transaction(&transaction, &info); let result = client.v1_chain.send_transaction(signed_transaction); + assert!(result.is_ok(), "Transaction result should be ok"); let send_trx_response = result.unwrap(); // NOTE: Don't bother testing the transaction id from the mock transaction, it will not match because the // get_info that was mocked isn't the same get_info used for the mocked response value from send_transaction //assert_eq!(send_trx_response.transaction_id, bytes_to_hex(&transaction.id())); - assert_eq!(send_trx_response.transaction_id, "57dcff5a6dd9eed1a9a4b4554ed6aa69b4caf5f73b6abdf466ee61829cfaed49"); - assert_eq!(send_trx_response.processed.id, "57dcff5a6dd9eed1a9a4b4554ed6aa69b4caf5f73b6abdf466ee61829cfaed49"); - assert_eq!(send_trx_response.processed.block_time, "2024-01-02T19:01:00.000"); + assert_eq!( + send_trx_response.transaction_id, + "57dcff5a6dd9eed1a9a4b4554ed6aa69b4caf5f73b6abdf466ee61829cfaed49" + ); + assert_eq!( + send_trx_response.processed.id, + "57dcff5a6dd9eed1a9a4b4554ed6aa69b4caf5f73b6abdf466ee61829cfaed49" + ); + assert_eq!( + send_trx_response.processed.block_time, + "2024-01-02T19:01:00.000" + ); assert_eq!(send_trx_response.processed.receipt.cpu_usage_us, 185); assert_eq!(send_trx_response.processed.elapsed, 185); // TODO: Create a failed send_transaction response in the mock_data, properly detect errors in v1_chain.send_transaction and test for the error struct values -} \ No newline at end of file + let invalid_transaction = + mock_provider::make_mock_transaction(&info, Asset::from_string("0.0420 NUNYA")); + let signed_invalid_transaction = + mock_provider::sign_mock_transaction(&invalid_transaction, &info); + let failed_result = client.v1_chain.send_transaction(signed_invalid_transaction); + assert!( + failed_result.is_err(), + "Failed transaction result should be err" + ); + let failure_response = failed_result.err().unwrap(); + match failure_response { + ClientError::SERVER(err) => { + assert_eq!(err.error.code, 3050003); + } + _ => { + assert!( + false, + "Failure response should be of type ClientError::SERVER" + ) + } + } +} diff --git a/crates/antelope/tests/crypto.rs b/crates/antelope/tests/crypto.rs index 99d4ac8..d0790d7 100644 --- a/crates/antelope/tests/crypto.rs +++ b/crates/antelope/tests/crypto.rs @@ -1,18 +1,35 @@ +use antelope::chain::key_type::KeyType; use antelope::chain::private_key::PrivateKey; use antelope::chain::public_key::PublicKey; -use antelope::chain::key_type::KeyType; -use antelope::util::{hex_to_bytes}; +use antelope::util::hex_to_bytes; #[test] fn private_key_encoding() { - let k1_key = PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); + let k1_key = + PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); assert!(matches!(k1_key.key_type, KeyType::K1)); - assert_eq!(k1_key.to_wif().unwrap(), String::from("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu")); - assert_eq!(k1_key.to_string(), "PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Aux"); - assert_eq!(k1_key.to_hex(), "d25968ebfce6e617bdb839b5a66cfc1fdd051d79a91094f7baceded449f84333"); + assert_eq!( + k1_key.to_wif().unwrap(), + String::from("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu") + ); + assert_eq!( + k1_key.to_string(), + "PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Aux" + ); + assert_eq!( + k1_key.to_hex(), + "d25968ebfce6e617bdb839b5a66cfc1fdd051d79a91094f7baceded449f84333" + ); - let r1_key = PrivateKey::from_str("PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", false).unwrap(); - assert_eq!(r1_key.to_string(), "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm"); + let r1_key = PrivateKey::from_str( + "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", + false, + ) + .unwrap(); + assert_eq!( + r1_key.to_string(), + "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm" + ); let result = r1_key.to_wif(); assert!(result.is_err(), "R1 Key should Err when to_wif is called"); @@ -20,28 +37,48 @@ fn private_key_encoding() { #[test] fn public_key_encoding() { - let k1_key = PublicKey::from_str("PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs").unwrap(); + let k1_key = + PublicKey::new_from_str("PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs") + .unwrap(); assert!(matches!(k1_key.key_type, KeyType::K1)); - assert_eq!(k1_key.to_string(), "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs"); - assert_eq!(k1_key.to_legacy_string(Option::from("EOS")).unwrap(), "EOS6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin"); assert_eq!( - PublicKey::from_str("EOS6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin").unwrap().to_string(), + k1_key.to_string(), "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs" ); - assert_eq!(k1_key.to_hex_string(), "02caee1a02910b18dfd5d9db0e8a4bc90f8dd34cedbbfb00c6c841a2abb2fa28cc"); - let r1_key = PublicKey::from_str("PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu").unwrap(); - assert_eq!(r1_key.to_string(), "PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu"); + assert_eq!( + k1_key.to_legacy_string(Option::from("EOS")).unwrap(), + "EOS6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin" + ); + assert_eq!( + PublicKey::new_from_str("EOS6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin") + .unwrap() + .to_string(), + "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs" + ); + assert_eq!( + k1_key.to_hex_string(), + "02caee1a02910b18dfd5d9db0e8a4bc90f8dd34cedbbfb00c6c841a2abb2fa28cc" + ); + let r1_key = + PublicKey::new_from_str("PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu") + .unwrap(); + assert_eq!( + r1_key.to_string(), + "PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu" + ); let legacy_result = r1_key.to_legacy_string(None); assert!(legacy_result.is_err()); } - - #[test] fn public_key_prefix() { - let priv_key = PrivateKey::from_str("5J4zo6Af9QnAeJmNEQeAR4MNhaG7SKVReAYgZC8655hpkbbBscr", false).unwrap(); + let priv_key = + PrivateKey::from_str("5J4zo6Af9QnAeJmNEQeAR4MNhaG7SKVReAYgZC8655hpkbbBscr", false).unwrap(); let pub_key = priv_key.to_public(); - assert_eq!(pub_key.to_string(), "PUB_K1_87DUhBcZrLhyFfBVDyu1iWZJUGURqbk6CQxwv5g6iWUD2X45Hv"); + assert_eq!( + pub_key.to_string(), + "PUB_K1_87DUhBcZrLhyFfBVDyu1iWZJUGURqbk6CQxwv5g6iWUD2X45Hv" + ); assert_eq!( pub_key.to_legacy_string(None).unwrap(), "EOS87DUhBcZrLhyFfBVDyu1iWZJUGURqbk6CQxwv5g6iWUCy9dCUJ" @@ -54,10 +91,18 @@ fn public_key_prefix() { #[test] fn public_from_private() { - let priv_key = PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); + let priv_key = + PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); let pub_key = priv_key.to_public(); - assert_eq!(pub_key.to_string(), "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs"); - let r1_priv_key = PrivateKey::from_str("PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", false).unwrap(); + assert_eq!( + pub_key.to_string(), + "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs" + ); + let r1_priv_key = PrivateKey::from_str( + "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", + false, + ) + .unwrap(); let r1_pub_key = r1_priv_key.to_public(); assert_eq!( r1_pub_key.to_string(), @@ -67,50 +112,79 @@ fn public_from_private() { #[test] fn sign_and_verify() { - let priv_key = PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); - let pub_key = PublicKey::from_str("PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs").unwrap(); + let priv_key = + PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); + let pub_key = + PublicKey::new_from_str("PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs") + .unwrap(); let message = String::from("I like turtles").into_bytes(); let signature = priv_key.sign_message(&message); assert!(signature.verify_message(&message, &pub_key)); assert!(!signature.verify_message(&b"beef".to_vec(), &pub_key)); - assert!( - !signature.verify_message( - &message, - &PublicKey::from_str("EOS7HBX4f8UknP5NNoX8ixCx4YrA8JcPhGbuQ7Xem8gmWg1nviTqR").unwrap() - ) - ); + assert!(!signature.verify_message( + &message, + &PublicKey::new_from_str("EOS7HBX4f8UknP5NNoX8ixCx4YrA8JcPhGbuQ7Xem8gmWg1nviTqR").unwrap() + )); // r1 let priv_key2 = PrivateKey::from_str( - "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", false - ).unwrap(); - let pub_key2 = PublicKey::from_str("PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu").unwrap(); + "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", + false, + ) + .unwrap(); + let pub_key2 = + PublicKey::new_from_str("PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu") + .unwrap(); let signature2 = priv_key2.sign_message(&message); assert_eq!(signature2.verify_message(&message, &pub_key2), true); } #[test] fn sign_and_recover() { - let key = PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); + let key = + PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu", false).unwrap(); let message = b"I like turtles".to_vec(); let signature = key.sign_message(&message); let recovered_key = signature.recover_message(&message); let recovered_key_failure = signature.recover_message(&b"beef".to_vec()); - assert_eq!(recovered_key.to_string(), "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs"); - assert_eq!(recovered_key.to_legacy_string(Some("EOS")).unwrap(), "EOS6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin"); - assert_eq!(recovered_key.to_legacy_string(Some("FIO")).unwrap(), "FIO6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin"); - assert_ne!(recovered_key_failure.to_string(), "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs"); + assert_eq!( + recovered_key.to_string(), + "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs" + ); + assert_eq!( + recovered_key.to_legacy_string(Some("EOS")).unwrap(), + "EOS6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin" + ); + assert_eq!( + recovered_key.to_legacy_string(Some("FIO")).unwrap(), + "FIO6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeABhJRin" + ); + assert_ne!( + recovered_key_failure.to_string(), + "PUB_K1_6RrvujLQN1x5Tacbep1KAk8zzKpSThAQXBCKYFfGUYeACcSRFs" + ); - let r1_private_key = PrivateKey::from_str("PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", false).unwrap(); + let r1_private_key = PrivateKey::from_str( + "PVT_R1_2dSFGZnA4oFvMHwfjeYCtK2MLLPNYWgYRXrPTcnTaLZFkDSELm", + false, + ) + .unwrap(); let r1_signature = r1_private_key.sign_message(&message); let recovered_r1_key = r1_signature.recover_message(&message); - assert_eq!(recovered_r1_key.to_string(), "PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu"); + assert_eq!( + recovered_r1_key.to_string(), + "PUB_R1_8E46r5HiQF84o6V8MWQQg1vPpgfjYA4XDqT6xbtaaebxw7XbLu" + ); } #[test] fn shared_secrets() { - let priv1 = PrivateKey::from_str("5KGNiwTYdDWVBc9RCC28hsi7tqHGUsikn9Gs8Yii93fXbkYzxGi", false).unwrap(); - let priv2 = PrivateKey::from_str("5Kik3tbLSn24ScHFsj6GwLkgd1H4Wecxkzt1VX7PBBRDQUCdGFa", false).unwrap(); - let pub1 = PublicKey::from_str("PUB_K1_7Wp9pzhtTfN3jSyQDCktKLqxdTAcAfgT2RrVpE6KThZraa381H").unwrap(); - let pub2 = PublicKey::from_str("PUB_K1_6P8aGPEP79815rKGQ1dbc9eDxoEjatX7Lp696ve5tinnfwJ6nt").unwrap(); + let priv1 = + PrivateKey::from_str("5KGNiwTYdDWVBc9RCC28hsi7tqHGUsikn9Gs8Yii93fXbkYzxGi", false).unwrap(); + let priv2 = + PrivateKey::from_str("5Kik3tbLSn24ScHFsj6GwLkgd1H4Wecxkzt1VX7PBBRDQUCdGFa", false).unwrap(); + let pub1 = PublicKey::new_from_str("PUB_K1_7Wp9pzhtTfN3jSyQDCktKLqxdTAcAfgT2RrVpE6KThZraa381H") + .unwrap(); + let pub2 = PublicKey::new_from_str("PUB_K1_6P8aGPEP79815rKGQ1dbc9eDxoEjatX7Lp696ve5tinnfwJ6nt") + .unwrap(); let expected = "def2d32f6b849198d71118ef53dbc3b679fe2b2c174ee4242a33e1a3f34c46fcbaa698fb599ca0e36f555dde2ac913a10563de2c33572155487cd8b34523de9e"; let secret1 = priv1.shared_secret(&pub2); @@ -121,7 +195,6 @@ fn shared_secrets() { #[test] fn key_generation() { - let k1_key = PrivateKey::random(KeyType::K1); let r1_key = PrivateKey::random(KeyType::R1); @@ -138,26 +211,64 @@ fn key_generation() { #[test] fn key_errors() { - let invalid_private_key_result = PrivateKey::from_str("PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Auz", false); - assert!(invalid_private_key_result.is_err(), "Invalid private key checksum should fail"); + let invalid_private_key_result = PrivateKey::from_str( + "PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Auz", + false, + ); + assert!( + invalid_private_key_result.is_err(), + "Invalid private key checksum should fail" + ); let empty_private_key_result = PrivateKey::from_str("", false); - assert!(empty_private_key_result.is_err(), "Empty private key should fail"); + assert!( + empty_private_key_result.is_err(), + "Empty private key should fail" + ); let invalid_ok_private_key_result = PrivateKey::from_str( "PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Auz", - true - ); - assert!(invalid_ok_private_key_result.is_ok(), "Should not fail if ignore_checksum = true"); - assert_eq!(invalid_ok_private_key_result.unwrap().to_string(), "PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Aux"); - let invalid_wif_private_key_result_enforce_checksum = PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zxx", false); - assert!(invalid_wif_private_key_result_enforce_checksum.is_err(), "Should fail with invalid wif key"); - - let invalid_wif_private_key_result_no_checksum = PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zxx", true); - assert!(invalid_wif_private_key_result_no_checksum.is_ok(), "Should not fail with invalid wif key if ignore_checksum = true"); - assert_eq!(invalid_wif_private_key_result_no_checksum.unwrap().to_wif().unwrap(), "5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu"); - let valid_failing_checksum = PrivateKey::from_str("PVT_K1_ApBgGcJ2HeGR3szXA9JJptGCWUbSwewtGsxm3DVr86pJtb5V", true); - assert!(valid_failing_checksum.is_ok(), "Invalid checksum should pass if ignore_checksum = false"); - let failing_checksum = PrivateKey::from_str("PVT_K1_ApBgGcJ2HeGR3szXA9JJptGCWUbSwewtGsxm3DVr86pJtb5V", false); + true, + ); + assert!( + invalid_ok_private_key_result.is_ok(), + "Should not fail if ignore_checksum = true" + ); + assert_eq!( + invalid_ok_private_key_result.unwrap().to_string(), + "PVT_K1_2be6BwD56MHeVD4P95bRLdnP3oB3P4QRAXAsSKh4N8Xu6d4Aux" + ); + let invalid_wif_private_key_result_enforce_checksum = + PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zxx", false); + assert!( + invalid_wif_private_key_result_enforce_checksum.is_err(), + "Should fail with invalid wif key" + ); + + let invalid_wif_private_key_result_no_checksum = + PrivateKey::from_str("5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zxx", true); + assert!( + invalid_wif_private_key_result_no_checksum.is_ok(), + "Should not fail with invalid wif key if ignore_checksum = true" + ); + assert_eq!( + invalid_wif_private_key_result_no_checksum + .unwrap() + .to_wif() + .unwrap(), + "5KQvfsPJ9YvGuVbLRLXVWPNubed6FWvV8yax6cNSJEzB4co3zFu" + ); + let valid_failing_checksum = PrivateKey::from_str( + "PVT_K1_ApBgGcJ2HeGR3szXA9JJptGCWUbSwewtGsxm3DVr86pJtb5V", + true, + ); + assert!( + valid_failing_checksum.is_ok(), + "Invalid checksum should pass if ignore_checksum = false" + ); + let failing_checksum = PrivateKey::from_str( + "PVT_K1_ApBgGcJ2HeGR3szXA9JJptGCWUbSwewtGsxm3DVr86pJtb5V", + false, + ); assert!(failing_checksum.is_err(), "Invalid checksum should fail"); } @@ -166,4 +277,4 @@ fn key_generation2() { let key = PrivateKey::random(KeyType::K1).unwrap(); let key_from_key = PrivateKey::from_str(key.to_string().as_str(), false); assert!(key_from_key.is_ok()); -} \ No newline at end of file +} diff --git a/crates/antelope/tests/integer.rs b/crates/antelope/tests/integer.rs index 8ecb21f..f3d3b28 100644 --- a/crates/antelope/tests/integer.rs +++ b/crates/antelope/tests/integer.rs @@ -1,22 +1,21 @@ - -#[test] -fn int8() { - /* - // Test the `From` trait conversion - let int8_instance: Int8 = 42.into(); - let uint128_instance: UInt128 = 120000300.into(); - //let int8_fail: Int8 = 200.into(); confirmed fails - assert_eq!(int8_instance.value, 42); - - // Test the `new` constructor - let int8_instance = Int8::new(-5); - assert_eq!(int8_instance.value, -5); - - let primitive_int8: i8 = int8_instance.value + 10; - assert_eq!(primitive_int8, int8_instance.value + 10); - assert_eq!(primitive_int8, 5); - - assert_eq!(int8_instance.get_abi_name(), "int8"); - assert_eq!(uint128_instance.get_abi_name(), "uint128"); - */ -} +#[test] +fn int8() { + /* + // Test the `From` trait conversion + let int8_instance: Int8 = 42.into(); + let uint128_instance: UInt128 = 120000300.into(); + //let int8_fail: Int8 = 200.into(); confirmed fails + assert_eq!(int8_instance.value, 42); + + // Test the `new` constructor + let int8_instance = Int8::new(-5); + assert_eq!(int8_instance.value, -5); + + let primitive_int8: i8 = int8_instance.value + 10; + assert_eq!(primitive_int8, int8_instance.value + 10); + assert_eq!(primitive_int8, 5); + + assert_eq!(int8_instance.get_abi_name(), "int8"); + assert_eq!(uint128_instance.get_abi_name(), "uint128"); + */ +} diff --git a/crates/antelope/tests/serializer.rs b/crates/antelope/tests/serializer.rs index f0fe47b..8495a0c 100644 --- a/crates/antelope/tests/serializer.rs +++ b/crates/antelope/tests/serializer.rs @@ -1,52 +1,55 @@ -use antelope::chain::{Decoder, Encoder}; use antelope::chain::name::Name; use antelope::chain::signature::Signature; -use antelope::serializer::serializer::Packer; +use antelope::chain::{Decoder, Encoder}; +use antelope::serializer::Packer; use antelope::util; use antelope::util::{bytes_to_hex, hex_to_bytes}; use antelope_macros::StructPacker; #[test] fn array() { - let data = "0303666f6f036261720362617a"; - let array = vec![String::from("foo"), String::from("bar"), String::from("baz")]; - assert_eq!(bytes_to_hex(&Encoder::pack(&array)), data); - - #[derive(Default, StructPacker)] - struct CustomType { - foo: Vec - } + let data = "0303666f6f036261720362617a"; + let array = vec![ + String::from("foo"), + String::from("bar"), + String::from("baz"), + ]; + assert_eq!(bytes_to_hex(&Encoder::pack(&array)), data); + + #[derive(Default, StructPacker)] + struct CustomType { + foo: Vec, + } + + let custom_array = vec![ + CustomType { + foo: vec![ + String::from("h"), + String::from("e"), + String::from("l"), + String::from("l"), + String::from("o"), + ], + }, + CustomType { + foo: vec![ + String::from("w"), + String::from("o"), + String::from("r"), + String::from("l"), + String::from("d"), + ], + }, + ]; - let custom_array = vec![ - CustomType { - foo: vec![ - String::from("h"), - String::from("e"), - String::from("l"), - String::from("l"), - String::from("o"), - ] - }, - CustomType { - foo: vec![ - String::from("w"), - String::from("o"), - String::from("r"), - String::from("l"), - String::from("d"), - ] - } - ]; - - let encoded = "020501680165016c016c016f050177016f0172016c0164"; - assert_eq!(bytes_to_hex(&Encoder::pack(&custom_array)), encoded); + let encoded = "020501680165016c016c016f050177016f0172016c0164"; + assert_eq!(bytes_to_hex(&Encoder::pack(&custom_array)), encoded); } - #[test] fn name() { let data = "000000005c73285d"; - let name1 = Name::from_str("foobar"); + let name1 = Name::new_from_str("foobar"); assert_eq!(bytes_to_hex(&Encoder::pack(&name1)), data); let data_bytes = hex_to_bytes(data); @@ -236,999 +239,999 @@ fn string() { } /* - test('bool', function () { - const data = '01' - const object = true - const json = 'true' - - assert.equal(Serializer.encode({object}).hexString, data) - assert.equal(JSON.stringify(Serializer.decode({data, type: 'bool'})), json) - assert.equal(JSON.stringify(object), json) - }) - - test('public key', function () { - const data = '000223e0ae8aacb41b06dc74af1a56b2eb69133f07f7f75bd1d5e53316bff195edf4' - const object = PublicKey.from('PUB_K1_5AHoNnWetuDhKWSDx3WUf8W7Dg5xjHCMc4yHmmSiaJCFvvAgnB') - const json = '"PUB_K1_5AHoNnWetuDhKWSDx3WUf8W7Dg5xjHCMc4yHmmSiaJCFvvAgnB"' - - assert.equal(Serializer.encode({object}).hexString, data) - assert.equal(JSON.stringify(Serializer.decode({data, type: PublicKey})), json) - assert.equal(JSON.stringify(Serializer.decode({json, type: 'public_key'})), json) - assert.equal(JSON.stringify(object), json) - }) - - test('public key (wa)', function () { - const data = - '020220b9dab512e892392a44a9f41f9433c9fbd80db864e9df5889c2407db3acbb9f010d6b656f73642e696e76616c6964' - const object = PublicKey.from( - 'PUB_WA_WdCPfafVNxVMiW5ybdNs83oWjenQXvSt1F49fg9mv7qrCiRwHj5b38U3ponCFWxQTkDsMC' - ) - const json = - '"PUB_WA_WdCPfafVNxVMiW5ybdNs83oWjenQXvSt1F49fg9mv7qrCiRwHj5b38U3ponCFWxQTkDsMC"' - - assert.equal(Serializer.encode({object}).hexString, data) - assert.equal(JSON.stringify(Serializer.decode({data, type: PublicKey})), json) - assert.equal(JSON.stringify(Serializer.decode({json, type: 'public_key'})), json) - assert.equal(JSON.stringify(object), json) - }) - */ +test('bool', function () { + const data = '01' + const object = true + const json = 'true' + + assert.equal(Serializer.encode({object}).hexString, data) + assert.equal(JSON.stringify(Serializer.decode({data, type: 'bool'})), json) + assert.equal(JSON.stringify(object), json) +}) + +test('public key', function () { + const data = '000223e0ae8aacb41b06dc74af1a56b2eb69133f07f7f75bd1d5e53316bff195edf4' + const object = PublicKey.from('PUB_K1_5AHoNnWetuDhKWSDx3WUf8W7Dg5xjHCMc4yHmmSiaJCFvvAgnB') + const json = '"PUB_K1_5AHoNnWetuDhKWSDx3WUf8W7Dg5xjHCMc4yHmmSiaJCFvvAgnB"' + + assert.equal(Serializer.encode({object}).hexString, data) + assert.equal(JSON.stringify(Serializer.decode({data, type: PublicKey})), json) + assert.equal(JSON.stringify(Serializer.decode({json, type: 'public_key'})), json) + assert.equal(JSON.stringify(object), json) +}) + +test('public key (wa)', function () { + const data = + '020220b9dab512e892392a44a9f41f9433c9fbd80db864e9df5889c2407db3acbb9f010d6b656f73642e696e76616c6964' + const object = PublicKey.from( + 'PUB_WA_WdCPfafVNxVMiW5ybdNs83oWjenQXvSt1F49fg9mv7qrCiRwHj5b38U3ponCFWxQTkDsMC' + ) + const json = + '"PUB_WA_WdCPfafVNxVMiW5ybdNs83oWjenQXvSt1F49fg9mv7qrCiRwHj5b38U3ponCFWxQTkDsMC"' + + assert.equal(Serializer.encode({object}).hexString, data) + assert.equal(JSON.stringify(Serializer.decode({data, type: PublicKey})), json) + assert.equal(JSON.stringify(Serializer.decode({json, type: 'public_key'})), json) + assert.equal(JSON.stringify(object), json) +}) +*/ #[test] fn signature() { - let data = + let data = hex_to_bytes("00205150a67288c3b393fdba9061b05019c54b12bdac295fc83bebad7cd63c7bb67d5cb8cc220564da006240a58419f64d06a5c6e1fc62889816a6c3dfdd231ed389"); - let json = + let json = "SIG_K1_KfPLgpw35iX8nfDzhbcmSBCr7nEGNEYXgmmempQspDJYBCKuAEs5rm3s4ZuLJY428Ca8ZhvR2Dkwu118y3NAoMDxhicRj9"; - let sig = Signature::from_string(json).unwrap(); + let sig = Signature::from_string(json).unwrap(); - let encoded = Encoder::pack(&sig); - assert_eq!(encoded, data); + let encoded = Encoder::pack(&sig); + assert_eq!(encoded, data); - let mut decoder = Decoder::new(data.as_slice()); - let decoded_sig = &mut Signature::default(); - let decoded_size = decoder.unpack(decoded_sig); - assert_eq!(decoded_size, 66); - assert_eq!(decoded_sig.to_string(), json); + let mut decoder = Decoder::new(data.as_slice()); + let decoded_sig = &mut Signature::default(); + let decoded_size = decoder.unpack(decoded_sig); + assert_eq!(decoded_size, 66); + assert_eq!(decoded_sig.to_string(), json); } /* - test('signature (wa)', function () { - const sig = - 'SIG_WA_2AAAuLJS3pLPgkQQPqLsehL6VeRBaAZS7NYM91UYRUrSAEfUvzKN7DCSwhjsDqe74cZNWKUU' + - 'GAHGG8ddSA7cvUxChbfKxLSrDCpwe6MVUqz4PDdyCt5tXhEJmKekxG1o1ucY3LVj8Vi9rRbzAkKPCzW' + - 'qC8cPcUtpLHNG8qUKkQrN4Xuwa9W8rsBiUKwZv1ToLyVhLrJe42pvHYBXicp4E8qec5E4m6SX11KuXE' + - 'RFcV48Mhiie2NyaxdtNtNzQ5XZ5hjBkxRujqejpF4SNHvdAGKRBbvhkiPLA25FD3xoCbrN26z72' - const data = - '0220d9132bbdb219e4e2d99af9c507e3597f86b615814f36672d501034861792bbcf21a46d1a2eb12bace4a29100b942f987494f3aefc8' + - 'efb2d5af4d4d8de3e0871525aa14905af60ca17a1bb80e0cf9c3b46908a0f14f72567a2f140c3a3bd2ef074c010000006d737b226f7269' + - '67696e223a2268747470733a2f2f6b656f73642e696e76616c6964222c2274797065223a22776562617574686e2e676574222c22636861' + - '6c6c656e6765223a226f69567235794848304a4336453962446675347142735a6a527a70416c5131505a50436e5974766850556b3d227d' - const object = Signature.from(sig) - const json = `"${sig}"` - - assert.equal(Serializer.encode({object}).hexString, data) - assert.equal(JSON.stringify(Serializer.decode({data, type: Signature})), json) - assert.equal(JSON.stringify(Serializer.decode({json, type: 'signature'})), json) - assert.equal(JSON.stringify(object), json) - }) - - test('time point', function () { - const data = 'f8b88a3cd5620400' - const object = TimePoint.from(1234567890123000) - const json = '"2009-02-13T23:31:30.123"' - - assert.equal(Serializer.encode({object}).hexString, data) - assert.equal(JSON.stringify(Serializer.decode({data, type: TimePoint})), json) - assert.equal(JSON.stringify(Serializer.decode({json, type: 'time_point'})), json) - assert.equal(JSON.stringify(object), json) - }) - - test('time point sec', function () { - const data = 'd2029649' - const object = TimePointSec.from(1234567890) - const json = '"2009-02-13T23:31:30"' - - assert.equal(Serializer.encode({object}).hexString, data) - assert.equal(JSON.stringify(Serializer.decode({data, type: TimePointSec})), json) - assert.equal(JSON.stringify(Serializer.decode({json, type: 'time_point_sec'})), json) - assert.equal(JSON.stringify(object), json) - }) - - test('optionals', function () { - assert.equal(Serializer.decode({data: '00', type: 'public_key?'}), null) - assert.equal(Serializer.decode({data: '0101', type: 'bool?'}), true) - assert.equal(Serializer.encode({object: null, type: 'signature?'}).hexString, '00') - assert.throws(() => { - Serializer.decode({object: null, type: 'bool'}) - }) - assert.throws(() => { - Serializer.encode({object: null, type: 'bool'}) - }) - }) - - test('api', function () { - assert.throws(() => { - Serializer.decode({json: '"foo"', type: 'santa'}) - }) - assert.throws(() => { - const BadType: any = {abiName: 'santa'} - Serializer.decode({json: '"foo"', type: BadType}) - }) - assert.throws(() => { - const BadType: any = {abiName: 'santa'} - Serializer.encode({object: 'foo', type: BadType}) - }) - assert.throws(() => { - Serializer.encode({object: 42 as any}) - }) - }) - - test('decoding errors', function () { - const abi = ABI.from({ - structs: [ - { - base: '', - name: 'type1', - fields: [{name: 'foo', type: 'type2?'}], - }, - { - base: '', - name: 'type2', - fields: [{name: 'bar', type: 'type3[]'}], - }, - { - base: '', - name: 'type3', - fields: [{name: 'baz', type: 'int8'}], - }, - ], - }) - try { - const object = {foo: {bar: [{baz: 'not int'}]}} - Serializer.decode({object, type: 'type1', abi}) - assert.fail() - } catch (error) { - assert.equal( - error.message, - 'Decoding error at root.foo.bar.0.baz: Invalid number' - ) - } - try { - const data = Bytes.from('beefbeef', 'utf8') - Serializer.decode({data, type: 'type1', abi}) - assert.fail() - } catch (error) { - assert.equal( - error.message, - 'Decoding error at root.foo.bar.6.baz: Read past end of buffer' - ) - } - }) - - test('variant', function () { - const abi = ABI.from({ - structs: [{base: '', name: 'struct', fields: [{name: 'field1', type: 'bool'}]}], - variants: [{name: 'foo', types: ['uint8', 'string[]', 'struct', 'struct?']}], - }) - assert.deepEqual( - Serializer.objectify(Serializer.decode({data: '00ff', abi, type: 'foo'})), - ['uint8', 255] - ) - assert.deepEqual( - Serializer.objectify(Serializer.decode({object: UInt8.from(255), abi, type: 'foo'})), - ['uint8', 255] - ) - assert.equal( - Serializer.encode({object: UInt8.from(255), abi, type: 'foo'}).hexString, - '00ff' - ) - assert.equal( - Serializer.encode({object: ['struct?', {field1: true}], abi, type: 'foo'}).hexString, - '030101' - ) - assert.throws(() => { - Serializer.decode({data: '04ff', abi, type: 'foo'}) - }) - assert.throws(() => { - Serializer.encode({object: UInt64.from(255), abi, type: 'foo'}) - }) - }) - - test('custom variant', function () { - @Struct.type('my_struct') - class MyStruct extends Struct { - @Struct.field('string?') foo?: string - } - @Variant.type('my_variant', [ - 'string', - 'bool', - 'string[]', - MyStruct, - {type: MyStruct, array: true}, - ]) - class MyVariant extends Variant { - declare value: string | boolean | string[] | MyStruct | MyStruct[] - } - assert.deepEqual(MyVariant.from('hello'), {value: 'hello', variantIdx: 0}) - assert.deepEqual(MyVariant.from(false), {value: false, variantIdx: 1}) - assert.deepEqual(MyVariant.from(['string[]', ['bool', 'booly']]), { - value: ['bool', 'booly'], - variantIdx: 2, - }) - assert.deepEqual(MyVariant.from(MyStruct.from({foo: 'bar'})), { - value: {foo: 'bar'}, - variantIdx: 3, - }) - assert.deepEqual(MyVariant.from(['my_struct', {foo: 'bar'}]), { - value: {foo: 'bar'}, - variantIdx: 3, - }) - assert.deepEqual(MyVariant.from(['my_struct[]', [{foo: 'bar'}]]), { - value: [{foo: 'bar'}], - variantIdx: 4, - }) - assert.equal(JSON.stringify(MyVariant.from('hello')), '["string","hello"]') - assert.equal(Serializer.encode({object: MyVariant.from(false)}).hexString, '0100') - assert.equal(Serializer.encode({object: false, type: MyVariant}).hexString, '0100') - assert.equal( - Serializer.encode({object: ['string', 'hello'], type: MyVariant}).hexString, - '000568656c6c6f' - ) - assert.deepEqual( - Serializer.decode({object: ['my_struct', {foo: 'bar'}], type: MyVariant}), - {value: {foo: 'bar'}, variantIdx: 3} - ) - assert.deepEqual(Serializer.decode({data: '0101', type: MyVariant}), { - value: true, - variantIdx: 1, - }) - assert.throws(() => { - MyVariant.from(Name.from('hello')) - }) - assert.throws(() => { - MyVariant.from(['not_my_struct', {foo: 'bar'}]) - }) - }) - - test('alias', function () { - const abi = ABI.from({ - types: [ - { - new_type_name: 'super_string', - type: 'string', - }, - { - new_type_name: 'super_foo', - type: 'foo', - }, - ], - structs: [ - { - base: '', - name: 'foo', - fields: [{name: 'bar', type: 'string'}], - }, - ], - }) - assert.equal( - Serializer.encode({object: 'foo', type: 'super_string', abi}).hexString, - '03666f6f' - ) - assert.equal(Serializer.decode({data: '03666f6f', type: 'super_string', abi}), 'foo') - assert.equal( - Serializer.encode({object: {bar: 'foo'}, type: 'super_foo', abi}).hexString, - '03666f6f' - ) - assert.deepEqual( - Serializer.decode({ - data: '03666f6f', - type: 'super_foo', - abi, - }), - { - bar: 'foo', - } - ) - assert.deepEqual( - Serializer.decode({ - object: {bar: 'foo'}, - type: 'super_foo', - abi, - }), - { - bar: 'foo', - } - ) - }) - - test('custom alias', function () { - @TypeAlias('super_int') - class SuperInt extends Int32 { - static from(value: Int32Type) { - if (typeof value === 'number' && value < 100) { - value *= 42 - } - return super.from(value) as SuperInt - } - didIt = false - doIt() { - this.didIt = true - } - } - assert.equal( - Serializer.encode({ - object: SuperInt.from(10), - }).hexString, - 'a4010000' - ) - assert.equal( - Serializer.encode({ - object: 10, - type: 'super_int', - customTypes: [SuperInt], - }).hexString, - 'a4010000' - ) - const decoded = Serializer.decode({ - data: 'a4010000', - type: 'super_int', - customTypes: [SuperInt], - }) - assert.equal(decoded instanceof SuperInt, true) - assert.equal(decoded instanceof Int32, true) - const sint = Serializer.decode({ - data: 'a4010000', - type: SuperInt, - }) - assert.strictEqual(sint.didIt, false) - sint.doIt() - assert.strictEqual(sint.didIt, true) - @Variant.type('my_variant', ['string', SuperInt]) - class MyVariant extends Variant {} - const v = MyVariant.from(['super_int', 1]) - assert.equal(v.value instanceof SuperInt, true) - const v2 = Serializer.decode({data: '01a4010000', type: MyVariant}) - assert.equal(v2.value instanceof SuperInt, true) - }) - - test('synthesize abi', function () { - @TypeAlias('my_transaction') - class MyTransaction extends Transaction {} - - @Variant.type('my_variant', ['string', MyTransaction]) - class MyVariant extends Variant {} - - assert.deepEqual(Serializer.synthesize(MyVariant), { - version: 'eosio::abi/1.1', - types: [{new_type_name: 'my_transaction', type: 'transaction'}], - variants: [{name: 'my_variant', types: ['string', 'my_transaction']}], - structs: [ - { - base: '', - name: 'permission_level', - fields: [ - {name: 'actor', type: 'name'}, - {name: 'permission', type: 'name'}, - ], - }, - { - base: '', - name: 'action', - fields: [ - {name: 'account', type: 'name'}, - {name: 'name', type: 'name'}, - {name: 'authorization', type: 'permission_level[]'}, - {name: 'data', type: 'bytes'}, - ], - }, - { - base: '', - name: 'transaction_extension', - fields: [ - {name: 'type', type: 'uint16'}, - {name: 'data', type: 'bytes'}, - ], - }, - { - base: '', - name: 'transaction_header', - fields: [ - {name: 'expiration', type: 'time_point_sec'}, - {name: 'ref_block_num', type: 'uint16'}, - {name: 'ref_block_prefix', type: 'uint32'}, - {name: 'max_net_usage_words', type: 'varuint32'}, - {name: 'max_cpu_usage_ms', type: 'uint8'}, - {name: 'delay_sec', type: 'varuint32'}, - ], - }, - { - base: 'transaction_header', - name: 'transaction', - fields: [ - {name: 'context_free_actions', type: 'action[]'}, - {name: 'actions', type: 'action[]'}, - {name: 'transaction_extensions', type: 'transaction_extension[]'}, - ], - }, - ], - actions: [], - tables: [], - ricardian_clauses: [], - action_results: [], - }) - }) - - test('circular alias', function () { - const abi = ABI.from({ - types: [ - {new_type_name: 'a', type: 'a'}, - {new_type_name: 'b1', type: 'b2'}, - {new_type_name: 'b2', type: 'b1'}, - {new_type_name: 'c1', type: 'c2'}, - {new_type_name: 'c2', type: 'c3'}, - ], - structs: [ - {base: '', name: 'c3', fields: [{name: 'f', type: 'c4'}]}, - {base: '', name: 'c4', fields: [{name: 'f', type: 'c1'}]}, - ], - }) - assert.throws(() => { - Serializer.decode({data: 'beef', type: 'a', abi}) - }) - assert.throws(() => { - Serializer.decode({data: 'beef', type: 'b1', abi}) - }) - assert.throws(() => { - Serializer.decode({data: 'beef', type: 'c1', abi}) - }) - assert.throws(() => { - Serializer.encode({object: {f: {f: {}}}, type: 'c1', abi}) - }) - }) - - test('complex type', function () { - @TypeAlias('do_you_even') - class DoYouEven extends Int128 {} - @Variant.type('several_things', [{type: Transaction, array: true}, 'bool?', DoYouEven]) - class SeveralThings extends Variant {} - @Struct.type('complex') - class Complex extends Struct { - @Struct.field(SeveralThings) declare things: SeveralThings - @Struct.field(Complex, {optional: true}) self?: Complex - } - const object = Complex.from({ - things: [ - 'transaction[]', - [ - { - actions: [ - { - account: 'eosio.token', - name: 'transfer', - authorization: [{actor: 'foo', permission: 'active'}], - data: '000000000000285d000000000000ae39e80300000000000003454f53000000000b68656c6c6f207468657265', - }, - ], - context_free_actions: [], - delay_sec: 123, - expiration: '2018-02-15T00:00:00', - max_cpu_usage_ms: 99, - max_net_usage_words: 0, - ref_block_num: 0, - ref_block_prefix: 0, - transaction_extensions: [], - }, - ], - ], - self: { - things: ['do_you_even', 2], - self: { - things: ['do_you_even', '-170141183460469231731687303715884105727'], - }, - }, - }) - const recoded = Serializer.decode({data: Serializer.encode({object}), type: Complex}) - assert.deepStrictEqual(JSON.parse(JSON.stringify(recoded)), { - things: [ - 'transaction[]', - [ - { - delay_sec: 123, - expiration: '2018-02-15T00:00:00', - max_cpu_usage_ms: 99, - max_net_usage_words: 0, - ref_block_num: 0, - ref_block_prefix: 0, - context_free_actions: [], - actions: [ - { - account: 'eosio.token', - name: 'transfer', - authorization: [{actor: 'foo', permission: 'active'}], - data: '000000000000285d000000000000ae39e80300000000000003454f53000000000b68656c6c6f207468657265', - }, - ], - transaction_extensions: [], - }, - ], - ], - self: { - things: ['do_you_even', 2], - self: { - things: ['do_you_even', '-170141183460469231731687303715884105727'], - self: null, - }, - }, - }) - }) - - test('typestresser abi', function () { - // eslint-disable-next-line @typescript-eslint/no-var-requires - - const abi = typestresserAbi - // .readFileSync(__dirname + '/') - // .toString() - const object = { - bool: true, - int8: 127, - uint8: 255, - int16: 32767, - uint16: 65535, - int32: 2147483647, - uint32: 4294967295, - int64: '9223372036854775807', - uint64: '18446744073709551615', - int128: '170141183460469231731687303715884105727', - uint128: '340282366920938463463374607431768211455', - varint32: 2147483647, - varuint32: 4294967295, - float32: '3.1415925', - float64: '3.141592653589793', - float128: '0xbeefbeefbeefbeefbeefbeefbeefbeef', - time_point: '2020-02-02T02:02:02.222', - time_point_sec: '2020-02-02T02:02:02', - block_timestamp_type: '2020-02-02T02:02:02.500', - name: 'foobar', - bytes: 'beef', - string: 'hello', - checksum160: 'ffffffffffffffffffffffffffffffffffffffff', - checksum256: 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', - checksum512: - 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', - public_key: 'PUB_K1_5AHoNnWetuDhKWSDx3WUf8W7Dg5xjHCMc4yHmmSiaJCFvvAgnB', - signature: - 'SIG_K1_KfPLgpw35iX8nfDzhbcmSBCr7nEGNEYXgmmempQspDJYBCKuAEs5rm3s4ZuLJY428Ca8ZhvR2Dkwu118y3NAoMDxhicRj9', - symbol: '7,PI', - symbol_code: 'PI', - asset: '3.1415926 PI', - extended_asset: { - quantity: '3.1415926 PI', - contract: 'pi.token', - }, - alias1: true, - alias2: true, - alias3: { - bool: true, - }, - alias4: ['int8', 1], - alias5: [true, true], - alias6: null, - extension: { - message: 'hello', - extension: { - message: 'world', - extension: null, - }, - }, - } - - const data = Serializer.encode({object, type: 'all_types', abi}) - assert.equal( - data.hexString, - '017fffff7fffffffffff7fffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffff7fffff' + - 'fffffffffffffffffffffffffffffeffffff0fffffffff0fda0f4940182d4454fb210940beefbeefbeefbeefbeefbeefbe' + - 'efbeefb07d56318e9d05009a2d365e35d4914b000000005c73285d02beef0568656c6c6fffffffffffffffffffffffffff' + - 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + - 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + - 'ffffffffff000223e0ae8aacb41b06dc74af1a56b2eb69133f07f7f75bd1d5e53316bff195edf400205150a67288c3b393' + - 'fdba9061b05019c54b12bdac295fc83bebad7cd63c7bb67d5cb8cc220564da006240a58419f64d06a5c6e1fc62889816a6' + - 'c3dfdd231ed38907504900000000005049000000000000765edf01000000000750490000000000765edf01000000000750' + - '49000000000000000053419a81ab0101010001020101000568656c6c6f0105776f726c6400' - ) - const decoded = Serializer.decode({data, type: 'all_types', abi}) - assert.deepStrictEqual(JSON.parse(JSON.stringify(decoded)), object) - }) - - test('coder metadata', function () { - @TypeAlias('endian_int64') - class EndianInt64 extends Int64 { - static fromABI(decoder: ABIDecoder) { - const bigEndian = decoder.metadata['endian'] === 'big' - const data = decoder.readArray(8) - const bn = new BN(data, undefined, bigEndian ? 'be' : 'le') - return new this(bn) - } - toABI(encoder: ABIEncoder) { - const bigEndian = encoder.metadata['endian'] === 'big' - const data = this.value.toArray(bigEndian ? 'be' : 'le', 8) - encoder.writeArray(data) - } - } - const bigData = Serializer.encode({ - object: 255, - type: EndianInt64, - metadata: {endian: 'big'}, - }) - const littleData = Serializer.encode({ - object: 255, - type: EndianInt64, - metadata: {endian: 'little'}, - }) - assert.equal(bigData.hexString, '00000000000000ff') - assert.equal(littleData.hexString, 'ff00000000000000') - const valueFromBig = Serializer.decode({ - data: bigData, - type: EndianInt64, - metadata: {endian: 'big'}, - }) - const valueFromLittle = Serializer.decode({ - data: littleData, - type: EndianInt64, - metadata: {endian: 'little'}, - }) - assert.equal(valueFromBig.toNumber(), 255) - assert.equal(valueFromLittle.toNumber(), 255) - }) - - test('object-only any coding', function () { - @Struct.type('my_struct') - class MyStruct extends Struct { - @Struct.field('any') declare foo: any - @Struct.field('any[]') declare bar: any[] - @Struct.field('any', {optional: true}) baz?: any - @Struct.field('name') declare account: Name - } - const decoded = Serializer.decode({ - object: { - foo: 'hello', - bar: [1, 'two', false], - account: 'foobar1234', - }, - type: MyStruct, - }) - assert.deepEqual(JSON.parse(JSON.stringify(decoded)), { - foo: 'hello', - bar: [1, 'two', false], - baz: null, - account: 'foobar1234', - }) - const abi = Serializer.synthesize(MyStruct) - const decoded2 = Serializer.decode({ - object: { - foo: {nested: 'obj'}, - bar: [], - baz: {b: {a: {z: 'zz'}}}, - account: 'foo', - }, - type: 'my_struct', - abi, - }) - assert.deepEqual(JSON.parse(JSON.stringify(decoded2)), { - foo: {nested: 'obj'}, - bar: [], - baz: {b: {a: {z: 'zz'}}}, - account: 'foo', - }) - assert.throws(() => { - Serializer.decode({data: 'beef', type: MyStruct}) - }) - assert.throws(() => { - Serializer.encode({object: decoded}) - }) - }) - - test('coding with type descriptors', function () { - const array = Serializer.decode({ - data: '020000ffff', - type: {type: UInt16, array: true}, - }) as UInt16[] - assert.deepEqual(array.map(Number), [0, 65535]) - const optional = Serializer.decode({ - data: '00', - type: {type: Transaction, optional: true}, - }) - assert.strictEqual(optional, null) - const obj = Serializer.decode({ - object: [false, true, false], - type: {type: 'bool', array: true}, - }) - assert.deepEqual(obj, [false, true, false]) - @Struct.type('my_struct') - class MyStruct extends Struct { - @Struct.field('uint16') declare foo: UInt16 - } - const encoded = Serializer.encode({ - object: [{foo: 0}, {foo: 65535}], - type: {type: MyStruct, array: true}, - }) - assert.equal(encoded.hexString, '020000ffff') - const decoded = Serializer.decode({ - data: '020000ffff', - type: {type: MyStruct, array: true}, - }) as MyStruct[] - assert.equal(decoded.length, 2) - assert.equal( - decoded.every((v) => v instanceof MyStruct), - true - ) - assert.deepEqual( - decoded.map((v) => Number(v.foo)), - [0, 65535] - ) - }) - - test('unicode', function () { - const data = Serializer.encode({object: '😷'}) - const text = Serializer.decode({data, type: 'string'}) - assert.strictEqual(text, '😷') - }) - - test('argument mutation', function () { - // should never mutate input values to 'from' methods - @Struct.type('test_obj') - class TestObj extends Struct { - @Struct.field('asset') declare asset: Asset - @Struct.field('int32') int32!: Int32 - @Struct.field(PermissionLevel) declare auth: PermissionLevel - } - const object = {asset: '1.3 ROCKS', int32: 1234, auth: {actor: 'foo', permission: 'bar'}} - const original = JSON.parse(JSON.stringify(object)) - assert.deepStrictEqual(object, original) - TestObj.from(object) - assert.deepStrictEqual(object, original) - Serializer.decode({object, type: 'test_obj', customTypes: [TestObj]}) - assert.deepStrictEqual(object, original) - }) - - test('abi resolve all', function () { - const abi = ABI.from({ - types: [ - {new_type_name: 'a', type: 'a'}, - {new_type_name: 'b1', type: 'b2'}, - {new_type_name: 'b2', type: 'b1'}, - {new_type_name: 'c1', type: 'c2'}, - {new_type_name: 'c2', type: 'c3'}, - ], - structs: [ - {base: '', name: 'c3', fields: [{name: 'f', type: 'c4'}]}, - {base: '', name: 'c4', fields: [{name: 'f', type: 'c1'}]}, - {base: 'c4', name: 'c5', fields: [{name: 'f2', type: 'c5[]?'}]}, - ], - variants: [{name: 'c6', types: ['a', 'b1', 'c1', 'c5']}], - }) - const types = abi.resolveAll() - const allTypes = types.types.concat(types.structs).concat(types.variants) - const maxId = allTypes.reduce((p, v) => (v.id > p ? v.id : p), 0) - assert.equal(maxId, 9) - }) - - test('objectify', function () { - const tx = Transaction.from({ - ref_block_num: 123, - ref_block_prefix: 456, - expiration: 992, - actions: [ - { - account: 'eosio.token', - name: 'transfer', - authorization: [{actor: 'foo', permission: 'active'}], - data: '0000000000855c340000000000000e3da40100000000000001474d5a0000000007666f7220796f75', - }, - ], - }) - assert.deepStrictEqual(Serializer.objectify(tx), { - expiration: '1970-01-01T00:16:32', - ref_block_num: 123, - ref_block_prefix: 456, - max_net_usage_words: 0, - max_cpu_usage_ms: 0, - delay_sec: 0, - context_free_actions: [], - actions: [ - { - account: 'eosio.token', - name: 'transfer', - authorization: [{actor: 'foo', permission: 'active'}], - data: '0000000000855c340000000000000e3da40100000000000001474d5a0000000007666f7220796f75', - }, - ], - transaction_extensions: [], - }) - }) - - test('struct optional field', function () { - @Struct.type('test') - class Test extends Struct { - @Struct.field('string') declare a: string - @Struct.field('string?') b?: string - @Struct.field('string', {optional: true}) c?: string - @Struct.field('string[]?') d?: string - } - assert.doesNotThrow(() => { - Test.from({a: 'foo'}) - }) - assert.throws(() => { - Test.from({b: 'foo'}) - }, /encountered undefined for non-optional/) - }) - - test('abi def', function () { - const abi = ABI.from({ - types: [{new_type_name: 'b', type: 'a'}], - structs: [{base: '', name: 'a', fields: [{name: 'f', type: 'a'}]}], - tables: [ - {name: 't', type: 'a', index_type: 'i64', key_names: ['k'], key_types: ['i64']}, - ], - ricardian_clauses: [{id: 'foo', body: 'bar'}], - variants: [{name: 'v', types: ['a', 'b']}], - }) - const data = Serializer.encode({object: abi}) - assert.equal( - data.hexString, - '0e656f73696f3a3a6162692f312e310101620161010161000101660161000100000000000000c80369363401016b010369363401610103666f6f036261720000010176020161016200' - ) - const decoded = Serializer.objectify(Serializer.decode({data, type: ABI})) - assert.deepEqual(abi.types, decoded.types) - assert.deepEqual(abi.structs, decoded.structs) - assert.deepEqual(abi.tables, decoded.tables) - assert.deepEqual(abi.ricardian_clauses, decoded.ricardian_clauses) - assert.deepEqual(abi.variants, decoded.variants) - assert.ok(abi.equals(decoded)) - }) - - test('binary extensions', function () { - @Struct.type('info_pair') - class InfoPair extends Struct { - @Struct.field('string') declare key: string - @Struct.field('bytes') declare value: Bytes - } - @TypeAlias('super_int') - class SuperInt extends UInt8 { - static abiDefault() { - return SuperInt.from(42) - } - } - @Variant.type('jazz_variant', [SuperInt, 'string']) - class JazzVariant extends Variant {} - @Struct.type('many_extensions') - class ManyExtensions extends Struct { - @Struct.field('string') declare name: string - @Struct.field(InfoPair, {array: true, extension: true}) declare info: InfoPair[] - @Struct.field(InfoPair, {extension: true}) declare singleInfo: InfoPair - @Struct.field('uint32$') declare uint32: UInt32 - @Struct.field('asset$') declare asset: Asset - @Struct.field('checksum256$') declare checksum256: Checksum256 - @Struct.field(SuperInt, {extension: true}) declare superInt: SuperInt - @Struct.field(JazzVariant, {extension: true}) declare jazz: JazzVariant - @Struct.field(JazzVariant, {extension: true, optional: true}) - declare maybeJazz?: JazzVariant - @Struct.field('bool?$') declare dumbBool?: boolean - @Struct.field('bool$') declare bool: boolean - } - const res1 = Serializer.decode({ - data: '03666f6f', - type: ManyExtensions, - strictExtensions: true, - }) - assert.equal(res1.uint32.toNumber(), 0) - assert.equal(res1.asset.toString(), '0.0000 SYS') - assert.equal(res1.superInt.toNumber(), 42) - assert.equal(res1.jazz.value, 42) - assert.strictEqual(res1.maybeJazz, null) - assert.strictEqual(res1.dumbBool, null) - assert.strictEqual(res1.bool, false) - const res2 = Serializer.decode({ - object: {name: 'foo'}, - type: ManyExtensions, - strictExtensions: true, - }) - assert.ok(res1.equals(res2)) - const abi = Serializer.synthesize(ManyExtensions) - const res3 = Serializer.decode({ - object: {name: 'foo', dumbBool: false}, - abi, - type: 'many_extensions', - strictExtensions: true, - }) as any - assert.equal(res3.superInt.toNumber(), 0) // expected since we loose coupling to the SuperInt type implementation and it resolves to UInt8 instead - assert.equal(res3.jazz[0], 'super_int') - assert.strictEqual(res3.dumbBool, false) - assert.strictEqual(res3.bool, false) - const res4 = Serializer.decode({ - object: {name: 'foo', jazz: JazzVariant.from('hi'), maybeJazz: ['super_int', 22]}, - abi, - type: 'many_extensions', - customTypes: [SuperInt, JazzVariant], - strictExtensions: true, - }) as any - assert.equal(res4.superInt.toNumber(), 42) // coupling restored - assert.equal(res4.jazz.value, 'hi') - assert.equal(res4.maybeJazz.value, 22) - const OptimisticBool: any = { - // don't try this at home, just because you can doesn't mean you should - abiName: 'bool', - abiDefault: () => true, - from: (value: boolean): boolean => value, - } - const res5 = Serializer.decode({ - object: {name: 'foo'}, - abi, - type: 'many_extensions', - customTypes: [SuperInt, JazzVariant, OptimisticBool], - strictExtensions: true, - }) as any - assert.strictEqual(res5.bool, true) - - abi.structs[1].fields[1].type = 'many_extensions$' - assert.throws(() => { - Serializer.decode({ - data: '03666f6f', - abi, - type: 'many_extensions', - strictExtensions: true, - }) - }, /Circular type reference/) - }) - - test('action_results', function () { - const raw = { - ____comment: 'This file was generated with eosio-abigen. DO NOT EDIT ', - version: 'eosio::abi/1.2', - types: [], - structs: [ - { - name: 'Result', - base: '', - fields: [ - { - name: 'id', - type: 'uint32', - }, - ], - }, - { - name: 'test', - base: '', - fields: [ - { - name: 'eos_account', - type: 'name', - }, - ], - }, - ], - actions: [ - { - name: 'test', - type: 'test', - ricardian_contract: '', - }, - ], - tables: [], - ricardian_clauses: [], - variants: [], - action_results: [ - { - name: 'test', - result_type: 'Result', - }, - ], - } - const abi = ABI.from(raw) - const encoded = Serializer.encode({object: abi}) - const decoded = Serializer.decode({data: encoded, type: ABI}) - assert.isTrue(abi.equals(decoded)) - assert.lengthOf(decoded.action_results, 1) - assert.isTrue(Name.from(decoded.action_results[0].name).equals('test')) - assert.equal(decoded.action_results[0].result_type, 'Result') - }) - */ \ No newline at end of file + test('signature (wa)', function () { + const sig = + 'SIG_WA_2AAAuLJS3pLPgkQQPqLsehL6VeRBaAZS7NYM91UYRUrSAEfUvzKN7DCSwhjsDqe74cZNWKUU' + + 'GAHGG8ddSA7cvUxChbfKxLSrDCpwe6MVUqz4PDdyCt5tXhEJmKekxG1o1ucY3LVj8Vi9rRbzAkKPCzW' + + 'qC8cPcUtpLHNG8qUKkQrN4Xuwa9W8rsBiUKwZv1ToLyVhLrJe42pvHYBXicp4E8qec5E4m6SX11KuXE' + + 'RFcV48Mhiie2NyaxdtNtNzQ5XZ5hjBkxRujqejpF4SNHvdAGKRBbvhkiPLA25FD3xoCbrN26z72' + const data = + '0220d9132bbdb219e4e2d99af9c507e3597f86b615814f36672d501034861792bbcf21a46d1a2eb12bace4a29100b942f987494f3aefc8' + + 'efb2d5af4d4d8de3e0871525aa14905af60ca17a1bb80e0cf9c3b46908a0f14f72567a2f140c3a3bd2ef074c010000006d737b226f7269' + + '67696e223a2268747470733a2f2f6b656f73642e696e76616c6964222c2274797065223a22776562617574686e2e676574222c22636861' + + '6c6c656e6765223a226f69567235794848304a4336453962446675347142735a6a527a70416c5131505a50436e5974766850556b3d227d' + const object = Signature.from(sig) + const json = `"${sig}"` + + assert.equal(Serializer.encode({object}).hexString, data) + assert.equal(JSON.stringify(Serializer.decode({data, type: Signature})), json) + assert.equal(JSON.stringify(Serializer.decode({json, type: 'signature'})), json) + assert.equal(JSON.stringify(object), json) + }) + + test('time point', function () { + const data = 'f8b88a3cd5620400' + const object = TimePoint.from(1234567890123000) + const json = '"2009-02-13T23:31:30.123"' + + assert.equal(Serializer.encode({object}).hexString, data) + assert.equal(JSON.stringify(Serializer.decode({data, type: TimePoint})), json) + assert.equal(JSON.stringify(Serializer.decode({json, type: 'time_point'})), json) + assert.equal(JSON.stringify(object), json) + }) + + test('time point sec', function () { + const data = 'd2029649' + const object = TimePointSec.from(1234567890) + const json = '"2009-02-13T23:31:30"' + + assert.equal(Serializer.encode({object}).hexString, data) + assert.equal(JSON.stringify(Serializer.decode({data, type: TimePointSec})), json) + assert.equal(JSON.stringify(Serializer.decode({json, type: 'time_point_sec'})), json) + assert.equal(JSON.stringify(object), json) + }) + + test('optionals', function () { + assert.equal(Serializer.decode({data: '00', type: 'public_key?'}), null) + assert.equal(Serializer.decode({data: '0101', type: 'bool?'}), true) + assert.equal(Serializer.encode({object: null, type: 'signature?'}).hexString, '00') + assert.throws(() => { + Serializer.decode({object: null, type: 'bool'}) + }) + assert.throws(() => { + Serializer.encode({object: null, type: 'bool'}) + }) + }) + + test('api', function () { + assert.throws(() => { + Serializer.decode({json: '"foo"', type: 'santa'}) + }) + assert.throws(() => { + const BadType: any = {abiName: 'santa'} + Serializer.decode({json: '"foo"', type: BadType}) + }) + assert.throws(() => { + const BadType: any = {abiName: 'santa'} + Serializer.encode({object: 'foo', type: BadType}) + }) + assert.throws(() => { + Serializer.encode({object: 42 as any}) + }) + }) + + test('decoding errors', function () { + const abi = ABI.from({ + structs: [ + { + base: '', + name: 'type1', + fields: [{name: 'foo', type: 'type2?'}], + }, + { + base: '', + name: 'type2', + fields: [{name: 'bar', type: 'type3[]'}], + }, + { + base: '', + name: 'type3', + fields: [{name: 'baz', type: 'int8'}], + }, + ], + }) + try { + const object = {foo: {bar: [{baz: 'not int'}]}} + Serializer.decode({object, type: 'type1', abi}) + assert.fail() + } catch (error) { + assert.equal( + error.message, + 'Decoding error at root.foo.bar.0.baz: Invalid number' + ) + } + try { + const data = Bytes.from('beefbeef', 'utf8') + Serializer.decode({data, type: 'type1', abi}) + assert.fail() + } catch (error) { + assert.equal( + error.message, + 'Decoding error at root.foo.bar.6.baz: Read past end of buffer' + ) + } + }) + + test('variant', function () { + const abi = ABI.from({ + structs: [{base: '', name: 'struct', fields: [{name: 'field1', type: 'bool'}]}], + variants: [{name: 'foo', types: ['uint8', 'string[]', 'struct', 'struct?']}], + }) + assert.deepEqual( + Serializer.objectify(Serializer.decode({data: '00ff', abi, type: 'foo'})), + ['uint8', 255] + ) + assert.deepEqual( + Serializer.objectify(Serializer.decode({object: UInt8.from(255), abi, type: 'foo'})), + ['uint8', 255] + ) + assert.equal( + Serializer.encode({object: UInt8.from(255), abi, type: 'foo'}).hexString, + '00ff' + ) + assert.equal( + Serializer.encode({object: ['struct?', {field1: true}], abi, type: 'foo'}).hexString, + '030101' + ) + assert.throws(() => { + Serializer.decode({data: '04ff', abi, type: 'foo'}) + }) + assert.throws(() => { + Serializer.encode({object: UInt64.from(255), abi, type: 'foo'}) + }) + }) + + test('custom variant', function () { + @Struct.type('my_struct') + class MyStruct extends Struct { + @Struct.field('string?') foo?: string + } + @Variant.type('my_variant', [ + 'string', + 'bool', + 'string[]', + MyStruct, + {type: MyStruct, array: true}, + ]) + class MyVariant extends Variant { + declare value: string | boolean | string[] | MyStruct | MyStruct[] + } + assert.deepEqual(MyVariant.from('hello'), {value: 'hello', variantIdx: 0}) + assert.deepEqual(MyVariant.from(false), {value: false, variantIdx: 1}) + assert.deepEqual(MyVariant.from(['string[]', ['bool', 'booly']]), { + value: ['bool', 'booly'], + variantIdx: 2, + }) + assert.deepEqual(MyVariant.from(MyStruct.from({foo: 'bar'})), { + value: {foo: 'bar'}, + variantIdx: 3, + }) + assert.deepEqual(MyVariant.from(['my_struct', {foo: 'bar'}]), { + value: {foo: 'bar'}, + variantIdx: 3, + }) + assert.deepEqual(MyVariant.from(['my_struct[]', [{foo: 'bar'}]]), { + value: [{foo: 'bar'}], + variantIdx: 4, + }) + assert.equal(JSON.stringify(MyVariant.from('hello')), '["string","hello"]') + assert.equal(Serializer.encode({object: MyVariant.from(false)}).hexString, '0100') + assert.equal(Serializer.encode({object: false, type: MyVariant}).hexString, '0100') + assert.equal( + Serializer.encode({object: ['string', 'hello'], type: MyVariant}).hexString, + '000568656c6c6f' + ) + assert.deepEqual( + Serializer.decode({object: ['my_struct', {foo: 'bar'}], type: MyVariant}), + {value: {foo: 'bar'}, variantIdx: 3} + ) + assert.deepEqual(Serializer.decode({data: '0101', type: MyVariant}), { + value: true, + variantIdx: 1, + }) + assert.throws(() => { + MyVariant.from(Name.from('hello')) + }) + assert.throws(() => { + MyVariant.from(['not_my_struct', {foo: 'bar'}]) + }) + }) + + test('alias', function () { + const abi = ABI.from({ + types: [ + { + new_type_name: 'super_string', + type: 'string', + }, + { + new_type_name: 'super_foo', + type: 'foo', + }, + ], + structs: [ + { + base: '', + name: 'foo', + fields: [{name: 'bar', type: 'string'}], + }, + ], + }) + assert.equal( + Serializer.encode({object: 'foo', type: 'super_string', abi}).hexString, + '03666f6f' + ) + assert.equal(Serializer.decode({data: '03666f6f', type: 'super_string', abi}), 'foo') + assert.equal( + Serializer.encode({object: {bar: 'foo'}, type: 'super_foo', abi}).hexString, + '03666f6f' + ) + assert.deepEqual( + Serializer.decode({ + data: '03666f6f', + type: 'super_foo', + abi, + }), + { + bar: 'foo', + } + ) + assert.deepEqual( + Serializer.decode({ + object: {bar: 'foo'}, + type: 'super_foo', + abi, + }), + { + bar: 'foo', + } + ) + }) + + test('custom alias', function () { + @TypeAlias('super_int') + class SuperInt extends Int32 { + static from(value: Int32Type) { + if (typeof value === 'number' && value < 100) { + value *= 42 + } + return super.from(value) as SuperInt + } + didIt = false + doIt() { + this.didIt = true + } + } + assert.equal( + Serializer.encode({ + object: SuperInt.from(10), + }).hexString, + 'a4010000' + ) + assert.equal( + Serializer.encode({ + object: 10, + type: 'super_int', + customTypes: [SuperInt], + }).hexString, + 'a4010000' + ) + const decoded = Serializer.decode({ + data: 'a4010000', + type: 'super_int', + customTypes: [SuperInt], + }) + assert.equal(decoded instanceof SuperInt, true) + assert.equal(decoded instanceof Int32, true) + const sint = Serializer.decode({ + data: 'a4010000', + type: SuperInt, + }) + assert.strictEqual(sint.didIt, false) + sint.doIt() + assert.strictEqual(sint.didIt, true) + @Variant.type('my_variant', ['string', SuperInt]) + class MyVariant extends Variant {} + const v = MyVariant.from(['super_int', 1]) + assert.equal(v.value instanceof SuperInt, true) + const v2 = Serializer.decode({data: '01a4010000', type: MyVariant}) + assert.equal(v2.value instanceof SuperInt, true) + }) + + test('synthesize abi', function () { + @TypeAlias('my_transaction') + class MyTransaction extends Transaction {} + + @Variant.type('my_variant', ['string', MyTransaction]) + class MyVariant extends Variant {} + + assert.deepEqual(Serializer.synthesize(MyVariant), { + version: 'eosio::abi/1.1', + types: [{new_type_name: 'my_transaction', type: 'transaction'}], + variants: [{name: 'my_variant', types: ['string', 'my_transaction']}], + structs: [ + { + base: '', + name: 'permission_level', + fields: [ + {name: 'actor', type: 'name'}, + {name: 'permission', type: 'name'}, + ], + }, + { + base: '', + name: 'action', + fields: [ + {name: 'account', type: 'name'}, + {name: 'name', type: 'name'}, + {name: 'authorization', type: 'permission_level[]'}, + {name: 'data', type: 'bytes'}, + ], + }, + { + base: '', + name: 'transaction_extension', + fields: [ + {name: 'type', type: 'uint16'}, + {name: 'data', type: 'bytes'}, + ], + }, + { + base: '', + name: 'transaction_header', + fields: [ + {name: 'expiration', type: 'time_point_sec'}, + {name: 'ref_block_num', type: 'uint16'}, + {name: 'ref_block_prefix', type: 'uint32'}, + {name: 'max_net_usage_words', type: 'varuint32'}, + {name: 'max_cpu_usage_ms', type: 'uint8'}, + {name: 'delay_sec', type: 'varuint32'}, + ], + }, + { + base: 'transaction_header', + name: 'transaction', + fields: [ + {name: 'context_free_actions', type: 'action[]'}, + {name: 'actions', type: 'action[]'}, + {name: 'transaction_extensions', type: 'transaction_extension[]'}, + ], + }, + ], + actions: [], + tables: [], + ricardian_clauses: [], + action_results: [], + }) + }) + + test('circular alias', function () { + const abi = ABI.from({ + types: [ + {new_type_name: 'a', type: 'a'}, + {new_type_name: 'b1', type: 'b2'}, + {new_type_name: 'b2', type: 'b1'}, + {new_type_name: 'c1', type: 'c2'}, + {new_type_name: 'c2', type: 'c3'}, + ], + structs: [ + {base: '', name: 'c3', fields: [{name: 'f', type: 'c4'}]}, + {base: '', name: 'c4', fields: [{name: 'f', type: 'c1'}]}, + ], + }) + assert.throws(() => { + Serializer.decode({data: 'beef', type: 'a', abi}) + }) + assert.throws(() => { + Serializer.decode({data: 'beef', type: 'b1', abi}) + }) + assert.throws(() => { + Serializer.decode({data: 'beef', type: 'c1', abi}) + }) + assert.throws(() => { + Serializer.encode({object: {f: {f: {}}}, type: 'c1', abi}) + }) + }) + + test('complex type', function () { + @TypeAlias('do_you_even') + class DoYouEven extends Int128 {} + @Variant.type('several_things', [{type: Transaction, array: true}, 'bool?', DoYouEven]) + class SeveralThings extends Variant {} + @Struct.type('complex') + class Complex extends Struct { + @Struct.field(SeveralThings) declare things: SeveralThings + @Struct.field(Complex, {optional: true}) self?: Complex + } + const object = Complex.from({ + things: [ + 'transaction[]', + [ + { + actions: [ + { + account: 'eosio.token', + name: 'transfer', + authorization: [{actor: 'foo', permission: 'active'}], + data: '000000000000285d000000000000ae39e80300000000000003454f53000000000b68656c6c6f207468657265', + }, + ], + context_free_actions: [], + delay_sec: 123, + expiration: '2018-02-15T00:00:00', + max_cpu_usage_ms: 99, + max_net_usage_words: 0, + ref_block_num: 0, + ref_block_prefix: 0, + transaction_extensions: [], + }, + ], + ], + self: { + things: ['do_you_even', 2], + self: { + things: ['do_you_even', '-170141183460469231731687303715884105727'], + }, + }, + }) + const recoded = Serializer.decode({data: Serializer.encode({object}), type: Complex}) + assert.deepStrictEqual(JSON.parse(JSON.stringify(recoded)), { + things: [ + 'transaction[]', + [ + { + delay_sec: 123, + expiration: '2018-02-15T00:00:00', + max_cpu_usage_ms: 99, + max_net_usage_words: 0, + ref_block_num: 0, + ref_block_prefix: 0, + context_free_actions: [], + actions: [ + { + account: 'eosio.token', + name: 'transfer', + authorization: [{actor: 'foo', permission: 'active'}], + data: '000000000000285d000000000000ae39e80300000000000003454f53000000000b68656c6c6f207468657265', + }, + ], + transaction_extensions: [], + }, + ], + ], + self: { + things: ['do_you_even', 2], + self: { + things: ['do_you_even', '-170141183460469231731687303715884105727'], + self: null, + }, + }, + }) + }) + + test('typestresser abi', function () { + // eslint-disable-next-line @typescript-eslint/no-var-requires + + const abi = typestresserAbi + // .readFileSync(__dirname + '/') + // .toString() + const object = { + bool: true, + int8: 127, + uint8: 255, + int16: 32767, + uint16: 65535, + int32: 2147483647, + uint32: 4294967295, + int64: '9223372036854775807', + uint64: '18446744073709551615', + int128: '170141183460469231731687303715884105727', + uint128: '340282366920938463463374607431768211455', + varint32: 2147483647, + varuint32: 4294967295, + float32: '3.1415925', + float64: '3.141592653589793', + float128: '0xbeefbeefbeefbeefbeefbeefbeefbeef', + time_point: '2020-02-02T02:02:02.222', + time_point_sec: '2020-02-02T02:02:02', + block_timestamp_type: '2020-02-02T02:02:02.500', + name: 'foobar', + bytes: 'beef', + string: 'hello', + checksum160: 'ffffffffffffffffffffffffffffffffffffffff', + checksum256: 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', + checksum512: + 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', + public_key: 'PUB_K1_5AHoNnWetuDhKWSDx3WUf8W7Dg5xjHCMc4yHmmSiaJCFvvAgnB', + signature: + 'SIG_K1_KfPLgpw35iX8nfDzhbcmSBCr7nEGNEYXgmmempQspDJYBCKuAEs5rm3s4ZuLJY428Ca8ZhvR2Dkwu118y3NAoMDxhicRj9', + symbol: '7,PI', + symbol_code: 'PI', + asset: '3.1415926 PI', + extended_asset: { + quantity: '3.1415926 PI', + contract: 'pi.token', + }, + alias1: true, + alias2: true, + alias3: { + bool: true, + }, + alias4: ['int8', 1], + alias5: [true, true], + alias6: null, + extension: { + message: 'hello', + extension: { + message: 'world', + extension: null, + }, + }, + } + + const data = Serializer.encode({object, type: 'all_types', abi}) + assert.equal( + data.hexString, + '017fffff7fffffffffff7fffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffff7fffff' + + 'fffffffffffffffffffffffffffffeffffff0fffffffff0fda0f4940182d4454fb210940beefbeefbeefbeefbeefbeefbe' + + 'efbeefb07d56318e9d05009a2d365e35d4914b000000005c73285d02beef0568656c6c6fffffffffffffffffffffffffff' + + 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + + 'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff' + + 'ffffffffff000223e0ae8aacb41b06dc74af1a56b2eb69133f07f7f75bd1d5e53316bff195edf400205150a67288c3b393' + + 'fdba9061b05019c54b12bdac295fc83bebad7cd63c7bb67d5cb8cc220564da006240a58419f64d06a5c6e1fc62889816a6' + + 'c3dfdd231ed38907504900000000005049000000000000765edf01000000000750490000000000765edf01000000000750' + + '49000000000000000053419a81ab0101010001020101000568656c6c6f0105776f726c6400' + ) + const decoded = Serializer.decode({data, type: 'all_types', abi}) + assert.deepStrictEqual(JSON.parse(JSON.stringify(decoded)), object) + }) + + test('coder metadata', function () { + @TypeAlias('endian_int64') + class EndianInt64 extends Int64 { + static fromABI(decoder: ABIDecoder) { + const bigEndian = decoder.metadata['endian'] === 'big' + const data = decoder.readArray(8) + const bn = new BN(data, undefined, bigEndian ? 'be' : 'le') + return new this(bn) + } + toABI(encoder: ABIEncoder) { + const bigEndian = encoder.metadata['endian'] === 'big' + const data = this.value.toArray(bigEndian ? 'be' : 'le', 8) + encoder.writeArray(data) + } + } + const bigData = Serializer.encode({ + object: 255, + type: EndianInt64, + metadata: {endian: 'big'}, + }) + const littleData = Serializer.encode({ + object: 255, + type: EndianInt64, + metadata: {endian: 'little'}, + }) + assert.equal(bigData.hexString, '00000000000000ff') + assert.equal(littleData.hexString, 'ff00000000000000') + const valueFromBig = Serializer.decode({ + data: bigData, + type: EndianInt64, + metadata: {endian: 'big'}, + }) + const valueFromLittle = Serializer.decode({ + data: littleData, + type: EndianInt64, + metadata: {endian: 'little'}, + }) + assert.equal(valueFromBig.toNumber(), 255) + assert.equal(valueFromLittle.toNumber(), 255) + }) + + test('object-only any coding', function () { + @Struct.type('my_struct') + class MyStruct extends Struct { + @Struct.field('any') declare foo: any + @Struct.field('any[]') declare bar: any[] + @Struct.field('any', {optional: true}) baz?: any + @Struct.field('name') declare account: Name + } + const decoded = Serializer.decode({ + object: { + foo: 'hello', + bar: [1, 'two', false], + account: 'foobar1234', + }, + type: MyStruct, + }) + assert.deepEqual(JSON.parse(JSON.stringify(decoded)), { + foo: 'hello', + bar: [1, 'two', false], + baz: null, + account: 'foobar1234', + }) + const abi = Serializer.synthesize(MyStruct) + const decoded2 = Serializer.decode({ + object: { + foo: {nested: 'obj'}, + bar: [], + baz: {b: {a: {z: 'zz'}}}, + account: 'foo', + }, + type: 'my_struct', + abi, + }) + assert.deepEqual(JSON.parse(JSON.stringify(decoded2)), { + foo: {nested: 'obj'}, + bar: [], + baz: {b: {a: {z: 'zz'}}}, + account: 'foo', + }) + assert.throws(() => { + Serializer.decode({data: 'beef', type: MyStruct}) + }) + assert.throws(() => { + Serializer.encode({object: decoded}) + }) + }) + + test('coding with type descriptors', function () { + const array = Serializer.decode({ + data: '020000ffff', + type: {type: UInt16, array: true}, + }) as UInt16[] + assert.deepEqual(array.map(Number), [0, 65535]) + const optional = Serializer.decode({ + data: '00', + type: {type: Transaction, optional: true}, + }) + assert.strictEqual(optional, null) + const obj = Serializer.decode({ + object: [false, true, false], + type: {type: 'bool', array: true}, + }) + assert.deepEqual(obj, [false, true, false]) + @Struct.type('my_struct') + class MyStruct extends Struct { + @Struct.field('uint16') declare foo: UInt16 + } + const encoded = Serializer.encode({ + object: [{foo: 0}, {foo: 65535}], + type: {type: MyStruct, array: true}, + }) + assert.equal(encoded.hexString, '020000ffff') + const decoded = Serializer.decode({ + data: '020000ffff', + type: {type: MyStruct, array: true}, + }) as MyStruct[] + assert.equal(decoded.length, 2) + assert.equal( + decoded.every((v) => v instanceof MyStruct), + true + ) + assert.deepEqual( + decoded.map((v) => Number(v.foo)), + [0, 65535] + ) + }) + + test('unicode', function () { + const data = Serializer.encode({object: '😷'}) + const text = Serializer.decode({data, type: 'string'}) + assert.strictEqual(text, '😷') + }) + + test('argument mutation', function () { + // should never mutate input values to 'from' methods + @Struct.type('test_obj') + class TestObj extends Struct { + @Struct.field('asset') declare asset: Asset + @Struct.field('int32') int32!: Int32 + @Struct.field(PermissionLevel) declare auth: PermissionLevel + } + const object = {asset: '1.3 ROCKS', int32: 1234, auth: {actor: 'foo', permission: 'bar'}} + const original = JSON.parse(JSON.stringify(object)) + assert.deepStrictEqual(object, original) + TestObj.from(object) + assert.deepStrictEqual(object, original) + Serializer.decode({object, type: 'test_obj', customTypes: [TestObj]}) + assert.deepStrictEqual(object, original) + }) + + test('abi resolve all', function () { + const abi = ABI.from({ + types: [ + {new_type_name: 'a', type: 'a'}, + {new_type_name: 'b1', type: 'b2'}, + {new_type_name: 'b2', type: 'b1'}, + {new_type_name: 'c1', type: 'c2'}, + {new_type_name: 'c2', type: 'c3'}, + ], + structs: [ + {base: '', name: 'c3', fields: [{name: 'f', type: 'c4'}]}, + {base: '', name: 'c4', fields: [{name: 'f', type: 'c1'}]}, + {base: 'c4', name: 'c5', fields: [{name: 'f2', type: 'c5[]?'}]}, + ], + variants: [{name: 'c6', types: ['a', 'b1', 'c1', 'c5']}], + }) + const types = abi.resolveAll() + const allTypes = types.types.concat(types.structs).concat(types.variants) + const maxId = allTypes.reduce((p, v) => (v.id > p ? v.id : p), 0) + assert.equal(maxId, 9) + }) + + test('objectify', function () { + const tx = Transaction.from({ + ref_block_num: 123, + ref_block_prefix: 456, + expiration: 992, + actions: [ + { + account: 'eosio.token', + name: 'transfer', + authorization: [{actor: 'foo', permission: 'active'}], + data: '0000000000855c340000000000000e3da40100000000000001474d5a0000000007666f7220796f75', + }, + ], + }) + assert.deepStrictEqual(Serializer.objectify(tx), { + expiration: '1970-01-01T00:16:32', + ref_block_num: 123, + ref_block_prefix: 456, + max_net_usage_words: 0, + max_cpu_usage_ms: 0, + delay_sec: 0, + context_free_actions: [], + actions: [ + { + account: 'eosio.token', + name: 'transfer', + authorization: [{actor: 'foo', permission: 'active'}], + data: '0000000000855c340000000000000e3da40100000000000001474d5a0000000007666f7220796f75', + }, + ], + transaction_extensions: [], + }) + }) + + test('struct optional field', function () { + @Struct.type('test') + class Test extends Struct { + @Struct.field('string') declare a: string + @Struct.field('string?') b?: string + @Struct.field('string', {optional: true}) c?: string + @Struct.field('string[]?') d?: string + } + assert.doesNotThrow(() => { + Test.from({a: 'foo'}) + }) + assert.throws(() => { + Test.from({b: 'foo'}) + }, /encountered undefined for non-optional/) + }) + + test('abi def', function () { + const abi = ABI.from({ + types: [{new_type_name: 'b', type: 'a'}], + structs: [{base: '', name: 'a', fields: [{name: 'f', type: 'a'}]}], + tables: [ + {name: 't', type: 'a', index_type: 'i64', key_names: ['k'], key_types: ['i64']}, + ], + ricardian_clauses: [{id: 'foo', body: 'bar'}], + variants: [{name: 'v', types: ['a', 'b']}], + }) + const data = Serializer.encode({object: abi}) + assert.equal( + data.hexString, + '0e656f73696f3a3a6162692f312e310101620161010161000101660161000100000000000000c80369363401016b010369363401610103666f6f036261720000010176020161016200' + ) + const decoded = Serializer.objectify(Serializer.decode({data, type: ABI})) + assert.deepEqual(abi.types, decoded.types) + assert.deepEqual(abi.structs, decoded.structs) + assert.deepEqual(abi.tables, decoded.tables) + assert.deepEqual(abi.ricardian_clauses, decoded.ricardian_clauses) + assert.deepEqual(abi.variants, decoded.variants) + assert.ok(abi.equals(decoded)) + }) + + test('binary extensions', function () { + @Struct.type('info_pair') + class InfoPair extends Struct { + @Struct.field('string') declare key: string + @Struct.field('bytes') declare value: Bytes + } + @TypeAlias('super_int') + class SuperInt extends UInt8 { + static abiDefault() { + return SuperInt.from(42) + } + } + @Variant.type('jazz_variant', [SuperInt, 'string']) + class JazzVariant extends Variant {} + @Struct.type('many_extensions') + class ManyExtensions extends Struct { + @Struct.field('string') declare name: string + @Struct.field(InfoPair, {array: true, extension: true}) declare info: InfoPair[] + @Struct.field(InfoPair, {extension: true}) declare singleInfo: InfoPair + @Struct.field('uint32$') declare uint32: UInt32 + @Struct.field('asset$') declare asset: Asset + @Struct.field('checksum256$') declare checksum256: Checksum256 + @Struct.field(SuperInt, {extension: true}) declare superInt: SuperInt + @Struct.field(JazzVariant, {extension: true}) declare jazz: JazzVariant + @Struct.field(JazzVariant, {extension: true, optional: true}) + declare maybeJazz?: JazzVariant + @Struct.field('bool?$') declare dumbBool?: boolean + @Struct.field('bool$') declare bool: boolean + } + const res1 = Serializer.decode({ + data: '03666f6f', + type: ManyExtensions, + strictExtensions: true, + }) + assert.equal(res1.uint32.toNumber(), 0) + assert.equal(res1.asset.toString(), '0.0000 SYS') + assert.equal(res1.superInt.toNumber(), 42) + assert.equal(res1.jazz.value, 42) + assert.strictEqual(res1.maybeJazz, null) + assert.strictEqual(res1.dumbBool, null) + assert.strictEqual(res1.bool, false) + const res2 = Serializer.decode({ + object: {name: 'foo'}, + type: ManyExtensions, + strictExtensions: true, + }) + assert.ok(res1.equals(res2)) + const abi = Serializer.synthesize(ManyExtensions) + const res3 = Serializer.decode({ + object: {name: 'foo', dumbBool: false}, + abi, + type: 'many_extensions', + strictExtensions: true, + }) as any + assert.equal(res3.superInt.toNumber(), 0) // expected since we loose coupling to the SuperInt type implementation and it resolves to UInt8 instead + assert.equal(res3.jazz[0], 'super_int') + assert.strictEqual(res3.dumbBool, false) + assert.strictEqual(res3.bool, false) + const res4 = Serializer.decode({ + object: {name: 'foo', jazz: JazzVariant.from('hi'), maybeJazz: ['super_int', 22]}, + abi, + type: 'many_extensions', + customTypes: [SuperInt, JazzVariant], + strictExtensions: true, + }) as any + assert.equal(res4.superInt.toNumber(), 42) // coupling restored + assert.equal(res4.jazz.value, 'hi') + assert.equal(res4.maybeJazz.value, 22) + const OptimisticBool: any = { + // don't try this at home, just because you can doesn't mean you should + abiName: 'bool', + abiDefault: () => true, + from: (value: boolean): boolean => value, + } + const res5 = Serializer.decode({ + object: {name: 'foo'}, + abi, + type: 'many_extensions', + customTypes: [SuperInt, JazzVariant, OptimisticBool], + strictExtensions: true, + }) as any + assert.strictEqual(res5.bool, true) + + abi.structs[1].fields[1].type = 'many_extensions$' + assert.throws(() => { + Serializer.decode({ + data: '03666f6f', + abi, + type: 'many_extensions', + strictExtensions: true, + }) + }, /Circular type reference/) + }) + + test('action_results', function () { + const raw = { + ____comment: 'This file was generated with eosio-abigen. DO NOT EDIT ', + version: 'eosio::abi/1.2', + types: [], + structs: [ + { + name: 'Result', + base: '', + fields: [ + { + name: 'id', + type: 'uint32', + }, + ], + }, + { + name: 'test', + base: '', + fields: [ + { + name: 'eos_account', + type: 'name', + }, + ], + }, + ], + actions: [ + { + name: 'test', + type: 'test', + ricardian_contract: '', + }, + ], + tables: [], + ricardian_clauses: [], + variants: [], + action_results: [ + { + name: 'test', + result_type: 'Result', + }, + ], + } + const abi = ABI.from(raw) + const encoded = Serializer.encode({object: abi}) + const decoded = Serializer.decode({data: encoded, type: ABI}) + assert.isTrue(abi.equals(decoded)) + assert.lengthOf(decoded.action_results, 1) + assert.isTrue(Name.from(decoded.action_results[0].name).equals('test')) + assert.equal(decoded.action_results[0].result_type, 'Result') + }) +*/ diff --git a/crates/antelope/tests/utils/mock_provider.rs b/crates/antelope/tests/utils/mock_provider.rs index 0e91d32..ca96162 100644 --- a/crates/antelope/tests/utils/mock_provider.rs +++ b/crates/antelope/tests/utils/mock_provider.rs @@ -1,22 +1,26 @@ -use std::fs; -use std::path::PathBuf; use antelope::api::client::{HTTPMethod, Provider}; use antelope::api::v1::structs::GetInfoResponse; +use antelope::chain::action::{Action, PermissionLevel}; use antelope::chain::asset::Asset; use antelope::chain::checksum::Checksum160; use antelope::chain::name::Name; use antelope::chain::private_key::PrivateKey; use antelope::chain::transaction::{SignedTransaction, Transaction}; -use antelope::chain::{Packer, Encoder, Decoder}; -use antelope::chain::action::{Action, PermissionLevel}; +use antelope::chain::{Decoder, Encoder, Packer}; use antelope::name; use antelope_macros::StructPacker; +use std::fs; +use std::path::PathBuf; -pub struct MockProvider { -} +pub struct MockProvider {} impl MockProvider { - fn call(&self, method: HTTPMethod, path: String, body: Option) -> Result { + fn call( + &self, + method: HTTPMethod, + path: String, + body: Option, + ) -> Result { let mut to_hash = method.to_string() + &path; if body.is_some() { to_hash += body.unwrap().as_str(); @@ -40,7 +44,7 @@ impl Provider for MockProvider { } } -pub fn make_mock_transaction(info: &GetInfoResponse) -> Transaction { +pub fn make_mock_transaction(info: &GetInfoResponse, asset_to_transfer: Asset) -> Transaction { let trx_header = info.get_transaction_header(90); #[derive(Clone, Eq, PartialEq, Default, StructPacker)] @@ -48,13 +52,13 @@ pub fn make_mock_transaction(info: &GetInfoResponse) -> Transaction { from: Name, to: Name, quantity: Asset, - memo: String + memo: String, } let transfer_data = Transfer { from: name!("corecorecore"), to: name!("teamgreymass"), - quantity: Asset::from_string("0.0420 TLOS"), + quantity: asset_to_transfer, memo: String::from("Testing antelope-rs"), }; @@ -62,7 +66,7 @@ pub fn make_mock_transaction(info: &GetInfoResponse) -> Transaction { name!("eosio.token"), name!("transfer"), vec![PermissionLevel::new(name!("corecorecore"), name!("active"))], - &transfer_data + &transfer_data, ); Transaction { @@ -74,11 +78,12 @@ pub fn make_mock_transaction(info: &GetInfoResponse) -> Transaction { } pub fn sign_mock_transaction(trx: &Transaction, info: &GetInfoResponse) -> SignedTransaction { - let private_key = PrivateKey::from_str("5JW71y3njNNVf9fiGaufq8Up5XiGk68jZ5tYhKpy69yyU9cr7n9", false).unwrap(); + let private_key = + PrivateKey::from_str("5JW71y3njNNVf9fiGaufq8Up5XiGk68jZ5tYhKpy69yyU9cr7n9", false).unwrap(); let sign_data = trx.signing_data(&info.chain_id.data.to_vec()); SignedTransaction { transaction: trx.clone(), signatures: vec![private_key.sign_message(&sign_data)], context_free_data: vec![], } -} \ No newline at end of file +} diff --git a/crates/antelope/tests/utils/mock_provider_data/d188458060901bf0f84748ca127d8e392c6b7db6.json b/crates/antelope/tests/utils/mock_provider_data/d188458060901bf0f84748ca127d8e392c6b7db6.json new file mode 100644 index 0000000..956f039 --- /dev/null +++ b/crates/antelope/tests/utils/mock_provider_data/d188458060901bf0f84748ca127d8e392c6b7db6.json @@ -0,0 +1,23 @@ +{ + "code": 500, + "message": "Internal Service Error", + "error": { + "code": 3050003, + "name": "eosio_assert_message_exception", + "what": "eosio_assert_message assertion failure", + "details": [ + { + "message": "assertion failure with message: unable to find key", + "file": "cf_system.cpp", + "line_number": 14, + "method": "eosio_assert" + }, + { + "message": "pending console output: ", + "file": "apply_context.cpp", + "line_number": 124, + "method": "exec_one" + } + ] + } +} \ No newline at end of file diff --git a/crates/antelope/tests/utils/mod.rs b/crates/antelope/tests/utils/mod.rs index 87d50be..eab80f3 100644 --- a/crates/antelope/tests/utils/mod.rs +++ b/crates/antelope/tests/utils/mod.rs @@ -2,4 +2,4 @@ //pub mod base58; //pub mod crypto; //pub mod serializer; -pub mod mock_provider; \ No newline at end of file +pub mod mock_provider; diff --git a/crates/antelope_macros/lib.rs b/crates/antelope_macros/lib.rs index aeb99a6..bc3dc1a 100644 --- a/crates/antelope_macros/lib.rs +++ b/crates/antelope_macros/lib.rs @@ -1,4 +1,4 @@ -use proc_macro::{TokenStream}; +use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, DeriveInput, Fields}; @@ -64,4 +64,4 @@ pub fn your_macro_name_derive(input: TokenStream) -> TokenStream { // Return the generated implementation TokenStream::from(expanded) -} \ No newline at end of file +}