diff --git a/adm/Cargo.toml b/adm/Cargo.toml index a9b43f347d..e037a84f55 100644 --- a/adm/Cargo.toml +++ b/adm/Cargo.toml @@ -33,6 +33,7 @@ sawtooth-sdk = "0.3" serde = "1.0" serde_derive = "1.0" serde_yaml = "0.8" +thiserror = "1.0.36" [build-dependencies] glob = "0.3" diff --git a/adm/src/blockstore.rs b/adm/src/blockstore.rs index da0ce2c280..4ebdb29958 100644 --- a/adm/src/blockstore.rs +++ b/adm/src/blockstore.rs @@ -34,12 +34,8 @@ impl<'a> Blockstore<'a> { let reader = self.db.reader()?; let packed = reader .get(block_id.as_bytes()) - .ok_or_else(|| DatabaseError::NotFoundError(format!("Block not found: {block_id}")))?; - let block: Block = Message::parse_from_bytes(&packed).map_err(|err| { - DatabaseError::CorruptionError(format!( - "Could not interpret stored data as a block: {err}" - )) - })?; + .ok_or_else(|| DatabaseError::NotFound(format!("Block not found: {block_id}")))?; + let block: Block = Message::parse_from_bytes(&packed)?; Ok(block) } @@ -49,18 +45,13 @@ impl<'a> Blockstore<'a> { let block_id = reader .index_get("index_block_num", block_num.as_bytes()) .and_then(|block_id| { - block_id.ok_or_else(|| { - DatabaseError::NotFoundError(format!("Block not found: {height}")) - }) + block_id + .ok_or_else(|| DatabaseError::NotFound(format!("Block not found: {height}"))) })?; - let packed = reader.get(&block_id).ok_or_else(|| { - DatabaseError::CorruptionError(format!("Block not found: {block_id:?}")) - })?; - let block: Block = Message::parse_from_bytes(&packed).map_err(|err| { - DatabaseError::CorruptionError(format!( - "Could not interpret stored data as a block: {err}" - )) - })?; + let packed = reader + .get(&block_id) + .ok_or_else(|| DatabaseError::NotFound(format!("Block not found: {block_id:?}")))?; + let block: Block = Message::parse_from_bytes(&packed)?; Ok(block) } @@ -69,18 +60,13 @@ impl<'a> Blockstore<'a> { let block_id = reader .index_get("index_batch", batch_id.as_bytes()) .and_then(|block_id| { - block_id.ok_or_else(|| { - DatabaseError::NotFoundError(format!("Batch not found: {batch_id}")) - }) + block_id + .ok_or_else(|| DatabaseError::NotFound(format!("Batch not found: {batch_id}"))) })?; - let packed = reader.get(&block_id).ok_or_else(|| { - DatabaseError::CorruptionError(format!("Block not found: {block_id:?}")) - })?; - let block: Block = Message::parse_from_bytes(&packed).map_err(|err| { - DatabaseError::CorruptionError(format!( - "Could not interpret stored data as a block: {err}" - )) - })?; + let packed = reader + .get(&block_id) + .ok_or_else(|| DatabaseError::NotFound(format!("Block not found: {block_id:?}")))?; + let block: Block = Message::parse_from_bytes(&packed)?; Ok(block) } @@ -90,30 +76,21 @@ impl<'a> Blockstore<'a> { .index_get("index_transaction", transaction_id.as_bytes()) .and_then(|block_id| { block_id.ok_or_else(|| { - DatabaseError::NotFoundError(format!("Transaction not found: {transaction_id}")) + DatabaseError::NotFound(format!("Transaction not found: {transaction_id}")) }) })?; - let packed = reader.get(&block_id).ok_or_else(|| { - DatabaseError::CorruptionError(format!("Block not found: {block_id:?}")) - })?; - let block: Block = Message::parse_from_bytes(&packed).map_err(|err| { - DatabaseError::CorruptionError(format!( - "Could not interpret stored data as a block: {err}" - )) - })?; + let packed = reader + .get(&block_id) + .ok_or_else(|| DatabaseError::NotFound(format!("Block not found: {block_id:?}")))?; + let block: Block = Message::parse_from_bytes(&packed)?; Ok(block) } pub fn put(&self, block: &Block) -> Result<(), DatabaseError> { - let block_header: BlockHeader = - Message::parse_from_bytes(&block.header).map_err(|err| { - DatabaseError::CorruptionError(format!("Invalid block header: {err}")) - })?; + let block_header: BlockHeader = Message::parse_from_bytes(&block.header)?; let mut writer = self.db.writer()?; // Add block to main db - let packed = block.write_to_bytes().map_err(|err| { - DatabaseError::WriterError(format!("Failed to serialize block: {err}")) - })?; + let packed = block.write_to_bytes()?; writer.put(block.header_signature.as_bytes(), &packed)?; // Add block to block num index @@ -149,10 +126,8 @@ impl<'a> Blockstore<'a> { pub fn delete(&self, block_id: &str) -> Result<(), DatabaseError> { let block = self.get(block_id)?; let block_id = &block.header_signature; - let block_header: BlockHeader = - Message::parse_from_bytes(&block.header).map_err(|err| { - DatabaseError::CorruptionError(format!("Invalid block header: {err}")) - })?; + let block_header: BlockHeader = Message::parse_from_bytes(&block.header)?; + // Delete block from main db let mut writer = self.db.writer()?; writer.delete(block_id.as_bytes())?; @@ -181,10 +156,8 @@ impl<'a> Blockstore<'a> { let mut cursor = reader.index_cursor("index_block_num")?; let (_, val) = cursor .last() - .ok_or_else(|| DatabaseError::NotFoundError("No chain head".into()))?; - String::from_utf8(val).map_err(|err| { - DatabaseError::CorruptionError(format!("Chain head block id is corrupt: {err}")) - }) + .ok_or_else(|| DatabaseError::NotFound("No chain head".into()))?; + Ok(String::from_utf8(val)?) } // Get the number of blocks @@ -231,23 +204,19 @@ mod tests { /// deleting, and looking up blocks), making assertions about the /// blockstore contents at each step. #[test] - fn test_blockstore() { + fn test_blockstore() -> Result<(), DatabaseError> { let path_config = config::get_path_config(); let blockstore_path = &path_config.data_dir.join(config::get_blockstore_filename()); // Set the file size to 10MB, so as to support file systems that do // not support sparse files. - let ctx = LmdbContext::new(blockstore_path, 3, Some(10 * 1024 * 1024)) - .map_err(|err| DatabaseError::InitError(format!("{err}"))) - .unwrap(); + let ctx = LmdbContext::new(blockstore_path, 3, Some(10 * 1024 * 1024))?; let database = LmdbDatabase::new( &ctx, &["index_batch", "index_transaction", "index_block_num"], - ) - .map_err(|err| DatabaseError::InitError(format!("{err}"))) - .unwrap(); + )?; let blockstore = Blockstore::new(database); @@ -260,9 +229,9 @@ mod tests { block.set_header_signature(format!("block-{i}")); let mut header = BlockHeader::new(); header.set_block_num(i); - block.set_header(header.write_to_bytes().unwrap()); + block.set_header(header.write_to_bytes()?); - blockstore.put(&block).unwrap(); + blockstore.put(&block)?; assert_current_height(i as usize + 1, &blockstore); assert_chain_head(format!("block-{i}"), &blockstore); @@ -272,13 +241,13 @@ mod tests { // Check that the blocks are in the right order. for i in 0..5 { - let block = blockstore.get_by_height(i).unwrap(); + let block = blockstore.get_by_height(i)?; assert_header_signature(block, format!("block-{i}")); } // Get a block. - let get_block = blockstore.get("block-2").unwrap(); + let get_block = blockstore.get("block-2")?; assert_header_signature(get_block, String::from("block-2")); @@ -290,32 +259,34 @@ mod tests { batch.set_header_signature(String::from("batch")); batch.set_transactions(protobuf::RepeatedField::from_vec(vec![transaction])); let batch_header = BatchHeader::new(); - batch.set_header(batch_header.write_to_bytes().unwrap()); + batch.set_header(batch_header.write_to_bytes()?); let mut block = Block::new(); block.set_header_signature(String::from("block-with-batch")); let mut block_header = BlockHeader::new(); block_header.set_block_num(6); - block.set_header(block_header.write_to_bytes().unwrap()); + block.set_header(block_header.write_to_bytes()?); block.set_batches(protobuf::RepeatedField::from_vec(vec![batch])); - blockstore.put(&block).unwrap(); + blockstore.put(&block)?; assert_current_height(6, &blockstore); assert_chain_head(String::from("block-with-batch"), &blockstore); - let get_by_batch = blockstore.get_by_batch("batch").unwrap(); + let get_by_batch = blockstore.get_by_batch("batch")?; assert_header_signature(get_by_batch, String::from("block-with-batch")); - let get_by_transaction = blockstore.get_by_transaction("transaction").unwrap(); + let get_by_transaction = blockstore.get_by_transaction("transaction")?; assert_header_signature(get_by_transaction, String::from("block-with-batch")); // Delete a block. - blockstore.delete("block-with-batch").unwrap(); + blockstore.delete("block-with-batch")?; assert_current_height(5, &blockstore); assert_chain_head(String::from("block-4"), &blockstore); + + Ok(()) } } diff --git a/adm/src/commands/blockstore.rs b/adm/src/commands/blockstore.rs index 977ac69709..d585056949 100644 --- a/adm/src/commands/blockstore.rs +++ b/adm/src/commands/blockstore.rs @@ -59,24 +59,24 @@ fn run_backup_command(args: &ArgMatches) -> Result<(), CliError> { let filepath = args .value_of("output") - .ok_or_else(|| CliError::ArgumentError("No output file".into()))?; - let mut file = File::create(filepath) - .map_err(|err| CliError::EnvironmentError(format!("Failed to create file: {err}")))?; + .ok_or_else(|| CliError::Argument("No output file".into()))?; + + let mut file = File::create(filepath)?; let mut current = match args.value_of("start") { None => blockstore .get_chain_head() - .map_err(|err| CliError::EnvironmentError(format!("unable to read chain head: {err}"))), + .map_err(|err| CliError::Environment(format!("unable to read chain head: {err}"))), Some(sig) => Ok(sig.into()), }?; while current != NULL_BLOCK_IDENTIFIER { let block = blockstore.get(¤t).map_err(|err| { - CliError::EnvironmentError(format!("Block in chain missing from blockstore: {err}")) + CliError::Environment(format!("Block in chain missing from blockstore: {err}")) })?; backup_block(&block, &mut file)?; let block_header: BlockHeader = Message::parse_from_bytes(&block.header) - .map_err(|err| CliError::ParseError(format!("Unable to read block header: {err}")))?; + .map_err(|err| CliError::Parse(format!("Unable to read block header: {err}")))?; current = block_header.previous_block_id } Ok(()) @@ -88,16 +88,16 @@ fn run_restore_command(args: &ArgMatches) -> Result<(), CliError> { let filepath = args .value_of("input") - .ok_or_else(|| CliError::ArgumentError("No input file".into()))?; + .ok_or_else(|| CliError::Argument("No input file".into()))?; let mut file = File::open(filepath) - .map_err(|err| CliError::EnvironmentError(format!("Failed to open file: {err}")))?; + .map_err(|err| CliError::Environment(format!("Failed to open file: {err}")))?; let mut source = protobuf::CodedInputStream::new(&mut file); while let Some(block) = restore_block(&mut source)? { blockstore .put(&block) - .map_err(|err| CliError::EnvironmentError(format!("Failed to put block: {err}")))?; + .map_err(|err| CliError::Environment(format!("Failed to put block: {err}")))?; } Ok(()) } @@ -114,9 +114,9 @@ fn run_list_command(args: &ArgMatches) -> Result<(), CliError> { // Get the chain head let head_sig = match args.value_of("start") { - None => blockstore.get_chain_head().map_err(|err| { - CliError::EnvironmentError(format!("failed to get chain head id: {err}")) - }), + None => blockstore + .get_chain_head() + .map_err(|err| CliError::Environment(format!("failed to get chain head id: {err}"))), Some(sig) => Ok(sig.into()), }?; @@ -126,11 +126,11 @@ fn run_list_command(args: &ArgMatches) -> Result<(), CliError> { while block_id != NULL_BLOCK_IDENTIFIER && count > 0 { let block = blockstore.get(&block_id).map_err(|err| { - CliError::EnvironmentError(format!("failed to read block {block_id}: {err}")) + CliError::Environment(format!("failed to read block {block_id}: {err}")) })?; let block_header: BlockHeader = Message::parse_from_bytes(&block.header).map_err(|err| { - CliError::ParseError(format!( + CliError::Parse(format!( "failed to parse header for block {block_id}: {err}" )) })?; @@ -184,38 +184,37 @@ fn run_show_command(args: &ArgMatches) -> Result<(), CliError> { if args.is_present("block") { let block = args .value_of("block") - .ok_or_else(|| CliError::ArgumentError("No block".into()))?; + .ok_or_else(|| CliError::Argument("No block".into()))?; blockstore.get(block) } else if args.is_present("batch") { let batch = args .value_of("batch") - .ok_or_else(|| CliError::ArgumentError("No batch".into()))?; + .ok_or_else(|| CliError::Argument("No batch".into()))?; blockstore.get_by_batch(batch) } else if args.is_present("transaction") { let transaction = args .value_of("transaction") - .ok_or_else(|| CliError::ArgumentError("No transaction".into()))?; + .ok_or_else(|| CliError::Argument("No transaction".into()))?; blockstore.get_by_transaction(transaction) } else if args.is_present("blocknum") { let blocknum = args .value_of("blocknum") - .ok_or_else(|| CliError::ArgumentError("No block num".into()))?; + .ok_or_else(|| CliError::Argument("No block num".into()))?; let height: u64 = blocknum .parse() - .map_err(|err| CliError::ArgumentError(format!("Invalid block num: {err}")))?; + .map_err(|err| CliError::Argument(format!("Invalid block num: {err}")))?; blockstore.get_by_height(height) } else { - return Err(CliError::ArgumentError("No identifier specified".into())); + return Err(CliError::Argument("No identifier specified".into())); } } - .map_err(|err| CliError::ArgumentError(format!("Error getting block: {err}")))?; + .map_err(|err| CliError::Argument(format!("Error getting block: {err}")))?; - let block_wrapper = BlockWrapper::try_from(block).map_err(|err| { - CliError::EnvironmentError(format!("failed to create block wrapper: {err}")) - })?; + let block_wrapper = BlockWrapper::try_from(block) + .map_err(|err| CliError::Environment(format!("failed to create block wrapper: {err}")))?; let block_yaml = serde_yaml::to_string(&block_wrapper).map_err(|err| { - CliError::EnvironmentError(format!("failed to serialize block wrapper: {err}")) + CliError::Environment(format!("failed to serialize block wrapper: {err}")) })?; println!("{block_yaml}"); @@ -228,26 +227,26 @@ fn run_prune_command(args: &ArgMatches) -> Result<(), CliError> { let block_id = args .value_of("block") - .ok_or_else(|| CliError::ArgumentError("No block id".into()))?; + .ok_or_else(|| CliError::Argument("No block id".into()))?; blockstore .get(block_id) - .map_err(|_| CliError::ArgumentError(format!("Block not found: {block_id}")))?; + .map_err(|_| CliError::Argument(format!("Block not found: {block_id}")))?; // Get the chain head let chain_head = blockstore .get_chain_head() - .map_err(|err| CliError::EnvironmentError(format!("failed to get chain head id: {err}")))?; + .map_err(|err| CliError::Environment(format!("failed to get chain head id: {err}")))?; let mut current = blockstore.get(&chain_head).map_err(|err| { - CliError::EnvironmentError(format!("failed to get chain head ({chain_head}): {err}")) + CliError::Environment(format!("failed to get chain head ({chain_head}): {err}")) })?; loop { blockstore .delete(¤t.header_signature) .map_err(|err| { - CliError::EnvironmentError(format!( + CliError::Environment(format!( "failed to delete block {}: {}", current.header_signature, err )) @@ -256,14 +255,14 @@ fn run_prune_command(args: &ArgMatches) -> Result<(), CliError> { break; } let header: BlockHeader = Message::parse_from_bytes(¤t.header).map_err(|err| { - CliError::ParseError(format!( + CliError::Parse(format!( "failed to parse block_header for block {}: {}", current.header_signature, err )) })?; current = blockstore.get(&header.previous_block_id).map_err(|err| { - CliError::EnvironmentError(format!( + CliError::Environment(format!( "failed to read block {}: {}", header.previous_block_id, err )) @@ -278,30 +277,25 @@ fn run_export_command(args: &ArgMatches) -> Result<(), CliError> { let block_id = args .value_of("block") - .ok_or_else(|| CliError::ArgumentError("No block id".into()))?; + .ok_or_else(|| CliError::Argument("No block id".into()))?; let block = blockstore .get(block_id) - .map_err(|_| CliError::ArgumentError(format!("Block not found: {block_id}")))?; + .map_err(|_| CliError::Argument(format!("Block not found: {block_id}")))?; match args.value_of("output") { Some(filepath) => { - let mut file = File::create(filepath).map_err(|err| { - CliError::EnvironmentError(format!("Failed to create file: {err}")) - })?; + let mut file = File::create(filepath) + .map_err(|err| CliError::Environment(format!("Failed to create file: {err}")))?; block.write_to_writer(&mut file).map_err(|err| { - CliError::EnvironmentError(format!( - "failed to write {block_id} to {filepath}: {err}" - )) + CliError::Environment(format!("failed to write {block_id} to {filepath}: {err}")) }) } None => { let stdout = io::stdout(); let mut handle = stdout.lock(); block.write_to_writer(&mut handle).map_err(|err| { - CliError::EnvironmentError(format!( - "failed to write block {block_id} to stdout: {err}" - )) + CliError::Environment(format!("failed to write block {block_id} to stdout: {err}")) }) } } @@ -313,39 +307,39 @@ fn run_import_command(args: &ArgMatches) -> Result<(), CliError> { let filepath = args .value_of("blockfile") - .ok_or_else(|| CliError::ArgumentError("No file".into()))?; + .ok_or_else(|| CliError::Argument("No file".into()))?; let mut file = File::open(filepath) - .map_err(|err| CliError::EnvironmentError(format!("Failed to open file: {err}")))?; + .map_err(|err| CliError::Environment(format!("Failed to open file: {err}")))?; let mut packed = Vec::new(); file.read_to_end(&mut packed) - .map_err(|err| CliError::EnvironmentError(format!("Failed to read file: {err}")))?; + .map_err(|err| CliError::Environment(format!("Failed to read file: {err}")))?; let block: Block = - Message::parse_from_bytes(&packed).map_err(|err| CliError::ParseError(format!("{err}")))?; + Message::parse_from_bytes(&packed).map_err(|err| CliError::Parse(format!("{err}")))?; let block_header: BlockHeader = Message::parse_from_bytes(&block.header) - .map_err(|err| CliError::ParseError(format!("{err}")))?; + .map_err(|err| CliError::Parse(format!("{err}")))?; let block_id = block.header_signature.clone(); // Ensure this block is an immediate child of the current chain head match blockstore.get_chain_head() { Ok(chain_head) => { if block_header.previous_block_id != chain_head { - return Err(CliError::ArgumentError(format!( + return Err(CliError::Argument(format!( "New block must be an immediate child of the current chain head: {chain_head}" ))); } } - Err(DatabaseError::NotFoundError(_)) => (), + Err(DatabaseError::NotFound(_)) => (), Err(err) => { - return Err(CliError::EnvironmentError(format!( + return Err(CliError::Environment(format!( "failed to read chain head id: {err}" ))); } } - blockstore.put(&block).map_err(|err| { - CliError::ArgumentError(format!("Failed to put block into database: {err}")) - })?; + blockstore + .put(&block) + .map_err(|err| CliError::Argument(format!("Failed to put block into database: {err}")))?; println!("Block {block_id} added"); Ok(()) @@ -357,29 +351,29 @@ fn run_stats_command(args: &ArgMatches) -> Result<(), CliError> { let block_count = blockstore .get_current_height() - .map_err(|err| CliError::EnvironmentError(format!("failed to read block count: {err}")))?; + .map_err(|err| CliError::Environment(format!("failed to read block count: {err}")))?; let batch_count = blockstore .get_batch_count() - .map_err(|err| CliError::EnvironmentError(format!("failed to read batch count: {err}")))?; - let txn_count = blockstore.get_transaction_count().map_err(|err| { - CliError::EnvironmentError(format!("failed to read transaction count: {err}")) - })?; + .map_err(|err| CliError::Environment(format!("failed to read batch count: {err}")))?; + let txn_count = blockstore + .get_transaction_count() + .map_err(|err| CliError::Environment(format!("failed to read transaction count: {err}")))?; if args.is_present("extended") { let mut txn_family_counts = HashMap::new(); - let chain_head = blockstore.get_chain_head().map_err(|err| { - CliError::EnvironmentError(format!("failed to get chain head id: {err}")) - })?; - let mut block = blockstore.get(&chain_head).map_err(|err| { - CliError::EnvironmentError(format!("failed to read chain head: {err}")) - })?; + let chain_head = blockstore + .get_chain_head() + .map_err(|err| CliError::Environment(format!("failed to get chain head id: {err}")))?; + let mut block = blockstore + .get(&chain_head) + .map_err(|err| CliError::Environment(format!("failed to read chain head: {err}")))?; loop { for batch in &block.batches { for txn in &batch.transactions { let txn_header: TransactionHeader = Message::parse_from_bytes(&txn.header) .map_err(|err| { - CliError::ParseError(format!( + CliError::Parse(format!( "failed to parse header for transaction {}: {}", txn.header_signature, err )) @@ -389,7 +383,7 @@ fn run_stats_command(args: &ArgMatches) -> Result<(), CliError> { } } let header: BlockHeader = Message::parse_from_bytes(&block.header).map_err(|err| { - CliError::ParseError(format!( + CliError::Parse(format!( "failed to parse header for block {}: {}", block.header_signature, err )) @@ -398,7 +392,7 @@ fn run_stats_command(args: &ArgMatches) -> Result<(), CliError> { break; } block = blockstore.get(&header.previous_block_id).map_err(|err| { - CliError::EnvironmentError(format!( + CliError::Environment(format!( "failed to read block {}: {}", header.previous_block_id, err )) @@ -425,7 +419,7 @@ fn create_context() -> Result { let blockstore_path = &path_config.data_dir.join(config::get_blockstore_filename()); lmdb::LmdbContext::new(blockstore_path, 3, None).map_err(|err| { - CliError::EnvironmentError(format!("failed to create block store context: {err}")) + CliError::Environment(format!("failed to create block store context: {err}")) }) } @@ -434,7 +428,7 @@ fn open_blockstore(ctx: &lmdb::LmdbContext) -> Result { ctx, &["index_batch", "index_transaction", "index_block_num"], ) - .map_err(|err| CliError::EnvironmentError(format!("failed to open block store DB: {err}")))?; + .map_err(|err| CliError::Environment(format!("failed to open block store DB: {err}")))?; Ok(Blockstore::new(blockstore_db)) } @@ -442,13 +436,13 @@ fn open_blockstore(ctx: &lmdb::LmdbContext) -> Result { fn backup_block(block: &Block, writer: &mut W) -> Result<(), CliError> { block .write_length_delimited_to_writer(writer) - .map_err(|err| CliError::EnvironmentError(format!("{err}"))) + .map_err(|err| CliError::Environment(format!("{err}"))) } fn restore_block(source: &mut protobuf::CodedInputStream) -> Result, CliError> { let eof = source .eof() - .map_err(|err| CliError::EnvironmentError(format!("Failed to check EOF: {err}")))?; + .map_err(|err| CliError::Environment(format!("Failed to check EOF: {err}")))?; if eof { return Ok(None); } @@ -456,7 +450,7 @@ fn restore_block(source: &mut protobuf::CodedInputStream) -> Result Result<(), CliError> { let genesis_file_path = if args.is_present("output") { args.value_of("output") - .ok_or_else(|| CliError::ArgumentError("Failed to read `output` arg".into())) + .ok_or_else(|| CliError::Argument("Failed to read `output` arg".into())) .map(|pathstr| Path::new(pathstr).to_path_buf()) } else { Ok(config::get_path_config().data_dir.join("genesis.batch")) }?; if genesis_file_path.exists() { - return Err(CliError::EnvironmentError(format!( + return Err(CliError::Environment(format!( "File already exists: {genesis_file_path:?}" ))); } let input_files = args .values_of("input_file") - .ok_or_else(|| CliError::ArgumentError("No input files passed".into()))?; + .ok_or_else(|| CliError::Argument("No input files passed".into()))?; let batch_lists = input_files .map(|filepath| { let mut file = File::open(filepath) - .map_err(|err| CliError::EnvironmentError(format!("Failed to open file: {err}")))?; + .map_err(|err| CliError::Environment(format!("Failed to open file: {err}")))?; let mut packed = Vec::new(); file.read_to_end(&mut packed) - .map_err(|err| CliError::EnvironmentError(format!("Failed to read file: {err}")))?; - let batch_list: BatchList = Message::parse_from_bytes(&packed).map_err(|err| { - CliError::ArgumentError(format!("Unable to read {filepath}: {err}")) - })?; + .map_err(|err| CliError::Environment(format!("Failed to read file: {err}")))?; + let batch_list: BatchList = Message::parse_from_bytes(&packed) + .map_err(|err| CliError::Argument(format!("Unable to read {filepath}: {err}")))?; Ok(batch_list) }) .collect::, CliError>>()?; @@ -81,7 +80,7 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { genesis_data.set_batches(protobuf::RepeatedField::from_vec(batches)); let buf = genesis_data.write_to_bytes().map_err(|err| { - CliError::ArgumentError(format!( + CliError::Argument(format!( "Failed to convert BatchLists to GenesisData: {err}" )) })?; @@ -91,11 +90,11 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { .create(true) .mode(0o640) .open(genesis_file_path.as_path()) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + .map_err(|err| CliError::Environment(format!("{err}")))?; genesis_data_file .write(&buf) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + .map_err(|err| CliError::Environment(format!("{err}")))?; Ok(()) } @@ -106,14 +105,14 @@ fn validate_depedencies(batches: &[Batch]) -> Result<(), CliError> { for txn in batch.transactions.iter() { let header: TransactionHeader = Message::parse_from_bytes(&txn.header).map_err(|err| { - CliError::ArgumentError(format!( + CliError::Argument(format!( "Invalid transaction header for txn {}: {}", &txn.header_signature, err )) })?; for dep in header.dependencies.iter() { if !txn_ids.contains(dep) { - return Err(CliError::ArgumentError(format!( + return Err(CliError::Argument(format!( "Unsatisfied dependency in given transaction {}: {}", &txn.header_signature, dep ))); @@ -135,7 +134,7 @@ fn check_required_settings(batches: &[Batch]) -> Result<(), CliError> { for txn in batch.transactions.iter() { let txn_header: TransactionHeader = Message::parse_from_bytes(&txn.header).map_err(|err| { - CliError::ArgumentError(format!( + CliError::Argument(format!( "Invalid transaction header for txn {}: {}", &txn.header_signature, err )) @@ -143,7 +142,7 @@ fn check_required_settings(batches: &[Batch]) -> Result<(), CliError> { if txn_header.family_name == "sawtooth_settings" { let settings_payload: SettingsPayload = Message::parse_from_bytes(&txn.payload) .map_err(|err| { - CliError::ArgumentError(format!( + CliError::Argument(format!( "Invalid payload for settings txn {}: {}", &txn.header_signature, err )) @@ -151,7 +150,7 @@ fn check_required_settings(batches: &[Batch]) -> Result<(), CliError> { if let SettingsPayload_Action::PROPOSE = settings_payload.action { let proposal: SettingProposal = Message::parse_from_bytes(&settings_payload.data).map_err(|err| { - CliError::ArgumentError(format!( + CliError::Argument(format!( "Invalid proposal for settings payload: {err}" )) })?; @@ -162,7 +161,7 @@ fn check_required_settings(batches: &[Batch]) -> Result<(), CliError> { } if !required_settings.is_empty() { - Err(CliError::ArgumentError(format!( + Err(CliError::Argument(format!( "The following setting(s) are required at genesis, but were not included in the \ genesis batches: {required_settings:?}" ))) diff --git a/adm/src/commands/keygen.rs b/adm/src/commands/keygen.rs index d24492109c..fc459f0dc2 100644 --- a/adm/src/commands/keygen.rs +++ b/adm/src/commands/keygen.rs @@ -37,7 +37,7 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { let path_config = config::get_path_config(); let key_dir = &path_config.key_dir; if !key_dir.exists() { - return Err(CliError::EnvironmentError(format!( + return Err(CliError::Environment(format!( "Key directory does not exist: {key_dir:?}" ))); } @@ -48,29 +48,24 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { if !args.is_present("force") { if private_key_path.exists() { - return Err(CliError::EnvironmentError(format!( + return Err(CliError::Environment(format!( "file exists: {private_key_path:?}" ))); } if public_key_path.exists() { - return Err(CliError::EnvironmentError(format!( + return Err(CliError::Environment(format!( "file exists: {public_key_path:?}" ))); } } - let context = signing::create_context("secp256k1") - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + let context = signing::create_context("secp256k1")?; - let private_key = context - .new_random_private_key() - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; - let public_key = context - .get_public_key(&*private_key) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + let private_key = context.new_random_private_key()?; - let key_dir_info = - metadata(key_dir).map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + let public_key = context.get_public_key(&*private_key)?; + + let key_dir_info = metadata(key_dir)?; #[cfg(not(target_os = "linux"))] let (key_dir_uid, key_dir_gid) = (key_dir_info.uid(), key_dir_info.gid()); @@ -87,12 +82,9 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { .write(true) .create(true) .mode(0o640) - .open(private_key_path.as_path()) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + .open(private_key_path.as_path())?; - private_key_file - .write(private_key.as_hex().as_bytes()) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + private_key_file.write_all(private_key.as_hex().as_bytes())?; } { @@ -105,12 +97,9 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { .write(true) .create(true) .mode(0o644) - .open(public_key_path.as_path()) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + .open(public_key_path.as_path())?; - public_key_file - .write(public_key.as_hex().as_bytes()) - .map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + public_key_file.write_all(public_key.as_hex().as_bytes())?; } chown(private_key_path.as_path(), key_dir_uid, key_dir_gid)?; @@ -121,13 +110,12 @@ pub fn run(args: &ArgMatches) -> Result<(), CliError> { fn chown(path: &Path, uid: u32, gid: u32) -> Result<(), CliError> { let pathstr = path .to_str() - .ok_or_else(|| CliError::EnvironmentError(format!("Invalid path: {path:?}")))?; - let cpath = - CString::new(pathstr).map_err(|err| CliError::EnvironmentError(format!("{err}")))?; + .ok_or_else(|| CliError::Environment(format!("Invalid path: {path:?}")))?; + let cpath = CString::new(pathstr)?; let result = unsafe { libc::chown(cpath.as_ptr(), uid, gid) }; match result { 0 => Ok(()), - code => Err(CliError::EnvironmentError(format!( + code => Err(CliError::Environment(format!( "Error chowning file {pathstr}: {code}" ))), } diff --git a/adm/src/database/error.rs b/adm/src/database/error.rs index 3ba1458a7d..c124ff1b9e 100644 --- a/adm/src/database/error.rs +++ b/adm/src/database/error.rs @@ -15,26 +15,30 @@ * ------------------------------------------------------------------------------ */ -use std; +use std::string::FromUtf8Error; -#[allow(clippy::enum_variant_names)] -#[derive(Debug)] +use thiserror::Error; + +#[derive(Error, Debug)] pub enum DatabaseError { - InitError(String), - ReaderError(String), - WriterError(String), - CorruptionError(String), - NotFoundError(String), -} + #[error("Init error: {0}")] + Init(String), + + #[error("LmDb error: {0}")] + Lmdb(#[from] lmdb_zero::error::Error), + + #[error("Could not interpret stored data as a block: {0}")] + Protobuf(#[from] protobuf::ProtobufError), + + #[error("Reader error: {0}")] + Reader(String), + + #[error("Writer error: {0}")] + Writer(String), + + #[error("Chain head block id is corrupt: {0}")] + Corruption(#[from] FromUtf8Error), -impl std::fmt::Display for DatabaseError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match *self { - DatabaseError::InitError(ref msg) => write!(f, "InitError: {msg}"), - DatabaseError::ReaderError(ref msg) => write!(f, "ReaderError: {msg}"), - DatabaseError::WriterError(ref msg) => write!(f, "WriterError: {msg}"), - DatabaseError::CorruptionError(ref msg) => write!(f, "CorruptionError: {msg}"), - DatabaseError::NotFoundError(ref msg) => write!(f, "NotFoundError: {msg}"), - } - } + #[error("Unable to read chain head: {0}")] + NotFound(String), } diff --git a/adm/src/database/lmdb.rs b/adm/src/database/lmdb.rs index 54e9b5204b..c85635274d 100644 --- a/adm/src/database/lmdb.rs +++ b/adm/src/database/lmdb.rs @@ -34,23 +34,16 @@ impl LmdbContext { let filepath_str = filepath .to_str() - .ok_or_else(|| DatabaseError::InitError(format!("Invalid filepath: {filepath:?}")))?; - - let mut builder = lmdb::EnvBuilder::new().map_err(|err| { - DatabaseError::InitError(format!("Failed to initialize environment: {err}")) - })?; - builder - .set_maxdbs(indexes + 1) - .map_err(|err| DatabaseError::InitError(format!("Failed to set MAX_DBS: {err}")))?; - builder - .set_mapsize(size.unwrap_or(DEFAULT_SIZE)) - .map_err(|err| DatabaseError::InitError(format!("Failed to set MAP_SIZE: {err}")))?; - - let env = unsafe { - builder - .open(filepath_str, flags, 0o600) - .map_err(|err| DatabaseError::InitError(format!("Database not found: {err}"))) - }?; + .ok_or_else(|| DatabaseError::Init(format!("Invalid filepath: {filepath:?}")))?; + + let mut builder = lmdb::EnvBuilder::new()?; + + builder.set_maxdbs(indexes + 1)?; + + builder.set_mapsize(size.unwrap_or(DEFAULT_SIZE))?; + + let env = unsafe { builder.open(filepath_str, flags, 0o600) }?; + Ok(LmdbContext { env }) } } @@ -67,8 +60,7 @@ impl<'e> LmdbDatabase<'e> { &ctx.env, Some("main"), &lmdb::DatabaseOptions::new(lmdb::db::CREATE), - ) - .map_err(|err| DatabaseError::InitError(format!("Failed to open database: {err:?}")))?; + )?; let mut index_dbs = HashMap::with_capacity(indexes.len()); for name in indexes { @@ -76,8 +68,7 @@ impl<'e> LmdbDatabase<'e> { &ctx.env, Some(name), &lmdb::DatabaseOptions::new(lmdb::db::CREATE), - ) - .map_err(|err| DatabaseError::InitError(format!("Failed to open database: {err:?}")))?; + )?; index_dbs.insert(String::from(*name), db); } Ok(LmdbDatabase { @@ -88,14 +79,12 @@ impl<'e> LmdbDatabase<'e> { } pub fn reader(&self) -> Result { - let txn = lmdb::ReadTransaction::new(&self.ctx.env) - .map_err(|err| DatabaseError::ReaderError(format!("Failed to create reader: {err}")))?; + let txn = lmdb::ReadTransaction::new(&self.ctx.env)?; Ok(LmdbDatabaseReader { db: self, txn }) } pub fn writer(&self) -> Result { - let txn = lmdb::WriteTransaction::new(&self.ctx.env) - .map_err(|err| DatabaseError::WriterError(format!("Failed to create writer: {err}")))?; + let txn = lmdb::WriteTransaction::new(&self.ctx.env)?; Ok(LmdbDatabaseWriter { db: self, txn }) } } @@ -117,7 +106,7 @@ impl<'a> LmdbDatabaseReader<'a> { .db .indexes .get(index) - .ok_or_else(|| DatabaseError::ReaderError(format!("Not an index: {index}")))?; + .ok_or_else(|| DatabaseError::Reader(format!("Not an index: {index}")))?; let access = self.txn.access(); let val: Result<&[u8], _> = access.get(index, key); Ok(val.ok().map(Vec::from)) @@ -128,22 +117,15 @@ impl<'a> LmdbDatabaseReader<'a> { .db .indexes .get(index) - .ok_or_else(|| DatabaseError::ReaderError(format!("Not an index: {index}")))?; - let cursor = self - .txn - .cursor(index) - .map_err(|err| DatabaseError::ReaderError(format!("{err}")))?; + .ok_or_else(|| DatabaseError::Reader(format!("Not an index: {index}")))?; + let cursor = self.txn.cursor(index)?; let access = self.txn.access(); Ok(LmdbDatabaseReaderCursor { access, cursor }) } pub fn count(&self) -> Result { - self.txn - .db_stat(&self.db.main) - .map_err(|err| { - DatabaseError::CorruptionError(format!("Failed to get database stats: {err}")) - }) - .map(|stat| stat.entries) + let stat = self.txn.db_stat(&self.db.main)?; + Ok(stat.entries) } pub fn index_count(&self, index: &str) -> Result { @@ -151,13 +133,9 @@ impl<'a> LmdbDatabaseReader<'a> { .db .indexes .get(index) - .ok_or_else(|| DatabaseError::ReaderError(format!("Not an index: {index}")))?; - self.txn - .db_stat(index) - .map_err(|err| { - DatabaseError::CorruptionError(format!("Failed to get database stats: {err}")) - }) - .map(|stat| stat.entries) + .ok_or_else(|| DatabaseError::Reader(format!("Not an index: {index}")))?; + let stat = self.txn.db_stat(index)?; + Ok(stat.entries) } } @@ -182,17 +160,14 @@ pub struct LmdbDatabaseWriter<'a> { impl<'a> LmdbDatabaseWriter<'a> { pub fn put(&mut self, key: &[u8], value: &[u8]) -> Result<(), DatabaseError> { - self.txn + Ok(self + .txn .access() - .put(&self.db.main, key, value, lmdb::put::Flags::empty()) - .map_err(|err| DatabaseError::WriterError(format!("{err}"))) + .put(&self.db.main, key, value, lmdb::put::Flags::empty())?) } pub fn delete(&mut self, key: &[u8]) -> Result<(), DatabaseError> { - self.txn - .access() - .del_key(&self.db.main, key) - .map_err(|err| DatabaseError::WriterError(format!("{err}"))) + Ok(self.txn.access().del_key(&self.db.main, key)?) } pub fn index_put( @@ -205,11 +180,11 @@ impl<'a> LmdbDatabaseWriter<'a> { .db .indexes .get(index) - .ok_or_else(|| DatabaseError::WriterError(format!("Not an index: {index}")))?; - self.txn + .ok_or_else(|| DatabaseError::Writer(format!("Not an index: {index}")))?; + Ok(self + .txn .access() - .put(index, key, value, lmdb::put::Flags::empty()) - .map_err(|err| DatabaseError::WriterError(format!("{err}"))) + .put(index, key, value, lmdb::put::Flags::empty())?) } pub fn index_delete(&mut self, index: &str, key: &[u8]) -> Result<(), DatabaseError> { @@ -217,17 +192,12 @@ impl<'a> LmdbDatabaseWriter<'a> { .db .indexes .get(index) - .ok_or_else(|| DatabaseError::WriterError(format!("Not an index: {index}")))?; - self.txn - .access() - .del_key(index, key) - .map_err(|err| DatabaseError::WriterError(format!("{err}"))) + .ok_or_else(|| DatabaseError::Writer(format!("Not an index: {index}")))?; + Ok(self.txn.access().del_key(index, key)?) } pub fn commit(self) -> Result<(), DatabaseError> { - self.txn - .commit() - .map_err(|err| DatabaseError::WriterError(format!("{err}"))) + Ok(self.txn.commit()?) } } @@ -282,51 +252,47 @@ mod tests { /// (adding keys, deleting keys, etc), making assertions about the /// database contents at each step. #[test] - fn test_lmdb() { + fn test_lmdb() -> Result<(), DatabaseError> { let path_config = config::get_path_config(); let blockstore_path = &path_config.data_dir.join(String::from("unit-lmdb.lmdb")); - let ctx = LmdbContext::new(blockstore_path, 3, None) - .map_err(|err| DatabaseError::InitError(format!("{err}"))) - .unwrap(); + let ctx = LmdbContext::new(blockstore_path, 3, None)?; - let database = LmdbDatabase::new(&ctx, &["a", "b"]) - .map_err(|err| DatabaseError::InitError(format!("{err}"))) - .unwrap(); + let database = LmdbDatabase::new(&ctx, &["a", "b"])?; assert_database_count(0, &database); assert_not_in_database(3, &database); assert_not_in_database(5, &database); // Add {3: 4} - let mut writer = database.writer().unwrap(); - writer.put(&[3], &[4]).unwrap(); + let mut writer = database.writer()?; + writer.put(&[3], &[4])?; assert_database_count(0, &database); assert_not_in_database(3, &database); - writer.commit().unwrap(); + writer.commit()?; assert_database_count(1, &database); assert_key_value(3, 4, &database); // Add {5: 6} - let mut writer = database.writer().unwrap(); - writer.put(&[5], &[6]).unwrap(); - writer.commit().unwrap(); + let mut writer = database.writer()?; + writer.put(&[5], &[6])?; + writer.commit()?; assert_database_count(2, &database); assert_key_value(5, 6, &database); assert_key_value(3, 4, &database); // Delete {3: 4} - let mut writer = database.writer().unwrap(); - writer.delete(&[3]).unwrap(); + let mut writer = database.writer()?; + writer.delete(&[3])?; assert_database_count(2, &database); - writer.commit().unwrap(); + writer.commit()?; assert_database_count(1, &database); assert_key_value(5, 6, &database); @@ -338,15 +304,15 @@ mod tests { assert_not_in_index("a", 5, &database); assert_not_in_index("b", 5, &database); - let mut writer = database.writer().unwrap(); - writer.index_put("a", &[55], &[5]).unwrap(); + let mut writer = database.writer()?; + writer.index_put("a", &[55], &[5])?; assert_index_count("a", 0, &database); assert_index_count("b", 0, &database); assert_not_in_index("a", 5, &database); assert_not_in_index("b", 5, &database); - writer.commit().unwrap(); + writer.commit()?; assert_index_count("a", 1, &database); assert_index_count("b", 0, &database); @@ -357,15 +323,15 @@ mod tests { assert_not_in_database(3, &database); // Delete {55: 5} in "a" - let mut writer = database.writer().unwrap(); - writer.index_delete("a", &[55]).unwrap(); + let mut writer = database.writer()?; + writer.index_delete("a", &[55])?; assert_index_count("a", 1, &database); assert_index_count("b", 0, &database); assert_index_key_value("a", 55, 5, &database); assert_not_in_index("b", 5, &database); - writer.commit().unwrap(); + writer.commit()?; assert_index_count("a", 0, &database); assert_index_count("b", 0, &database); @@ -374,5 +340,7 @@ mod tests { assert_database_count(1, &database); assert_key_value(5, 6, &database); assert_not_in_database(3, &database); + + Ok(()) } } diff --git a/adm/src/database/mod.rs b/adm/src/database/mod.rs index 059ac4a262..0bcf264584 100644 --- a/adm/src/database/mod.rs +++ b/adm/src/database/mod.rs @@ -15,5 +15,5 @@ * ------------------------------------------------------------------------------ */ -pub mod error; +pub(crate) mod error; pub mod lmdb; diff --git a/adm/src/err.rs b/adm/src/err.rs index 0d65ee9928..ed2dbdbc42 100644 --- a/adm/src/err.rs +++ b/adm/src/err.rs @@ -15,20 +15,30 @@ * ------------------------------------------------------------------------------ */ -#[allow(clippy::enum_variant_names)] -#[derive(Debug)] +use thiserror::Error; + +use crate::database; + +#[derive(Error, Debug)] pub enum CliError { - ArgumentError(String), - EnvironmentError(String), - ParseError(String), -} + #[error("Argument error: {0}")] + Argument(String), + + #[error("Database error: {0}")] + Database(#[from] database::error::DatabaseError), + + #[error("Environment error: {0}")] + Environment(String), + + #[error("Io error: {0}")] + Io(#[from] std::io::Error), + + #[error("Nul error: {0}")] + Nul(#[from] std::ffi::NulError), + + #[error("Parse error: {0}")] + Parse(String), -impl std::fmt::Display for CliError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match *self { - CliError::ArgumentError(ref msg) => write!(f, "ArgumentError: {msg}"), - CliError::EnvironmentError(ref msg) => write!(f, "EnvironmentError: {msg}"), - CliError::ParseError(ref msg) => write!(f, "ParseError: {msg}"), - } - } + #[error("Signing error: {0}")] + Signing(#[from] sawtooth_sdk::signing::Error), } diff --git a/adm/src/main.rs b/adm/src/main.rs index 3ba53f910d..e8cb8cfa08 100644 --- a/adm/src/main.rs +++ b/adm/src/main.rs @@ -30,7 +30,7 @@ mod blockstore; mod commands; mod config; mod database; -mod err; +pub(crate) mod err; mod proto; mod wrappers; diff --git a/adm/src/wrappers.rs b/adm/src/wrappers.rs index ad992b3ac2..383d912b92 100644 --- a/adm/src/wrappers.rs +++ b/adm/src/wrappers.rs @@ -15,20 +15,14 @@ * ------------------------------------------------------------------------------ */ use protobuf::Message; +use thiserror::Error; use crate::proto; -#[derive(Debug)] -pub enum Error { - ParseError(String), -} - -impl std::fmt::Display for Error { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - match *self { - Error::ParseError(ref msg) => write!(f, "ParseError: {msg}"), - } - } +#[derive(Error, Debug)] +pub enum WrappersError { + #[error("Protobuf error: {0}")] + Protobuf(#[from] protobuf::ProtobufError), } #[derive(Serialize)] @@ -43,28 +37,21 @@ pub struct Block { } impl Block { - pub fn try_from(block: proto::block::Block) -> Result { - Message::parse_from_bytes(&block.header) - .map_err(|err| { - Error::ParseError(format!( - "Invalid BlockHeader {}: {}", - block.header_signature, err - )) - }) - .and_then(|block_header: proto::block::BlockHeader| { - block - .get_batches() - .iter() - .map(|batch| Batch::try_from(batch.clone())) - .collect::, Error>>() - .map(move |batches| Block { - batches, - block_num: block_header.get_block_num(), - consensus: Vec::from(block_header.get_consensus()), - header_signature: String::from(block.get_header_signature()), - previous_block_id: String::from(block_header.get_previous_block_id()), - state_root_hash: String::from(block_header.get_state_root_hash()), - }) + pub fn try_from(block: proto::block::Block) -> Result { + let block_header: proto::block::BlockHeader = Message::parse_from_bytes(&block.header)?; + + block + .get_batches() + .iter() + .map(|batch| Batch::try_from(batch.clone())) + .collect::, WrappersError>>() + .map(move |batches| Block { + batches, + block_num: block_header.get_block_num(), + consensus: Vec::from(block_header.get_consensus()), + header_signature: String::from(block.get_header_signature()), + previous_block_id: String::from(block_header.get_previous_block_id()), + state_root_hash: String::from(block_header.get_state_root_hash()), }) } } @@ -77,25 +64,17 @@ pub struct Batch { } impl Batch { - pub fn try_from(batch: proto::batch::Batch) -> Result { - Message::parse_from_bytes(&batch.header) - .map_err(|err| { - Error::ParseError(format!( - "Invalid BatchHeader {}: {}", - batch.header_signature, err - )) - }) - .and_then(|batch_header: proto::batch::BatchHeader| { - batch - .get_transactions() - .iter() - .map(|transaction| Transaction::try_from(&transaction.clone())) - .collect::, Error>>() - .map(move |transactions| Batch { - header_signature: String::from(batch.get_header_signature()), - signer_public_key: String::from(batch_header.get_signer_public_key()), - transactions, - }) + pub fn try_from(batch: proto::batch::Batch) -> Result { + let batch_header: proto::batch::BatchHeader = Message::parse_from_bytes(&batch.header)?; + batch + .get_transactions() + .iter() + .map(|transaction| Transaction::try_from(&transaction.clone())) + .collect::, WrappersError>>() + .map(move |transactions| Batch { + header_signature: String::from(batch.get_header_signature()), + signer_public_key: String::from(batch_header.get_signer_public_key()), + transactions, }) } } @@ -117,28 +96,21 @@ pub struct Transaction { } impl Transaction { - pub fn try_from(transaction: &proto::transaction::Transaction) -> Result { - Message::parse_from_bytes(&transaction.header) - .map_err(|err| { - Error::ParseError(format!( - "Invalid TransactionHeader {}: {}", - transaction.header_signature, err - )) - }) - .map( - |transaction_header: proto::transaction::TransactionHeader| Transaction { - batcher_public_key: String::from(transaction_header.get_batcher_public_key()), - dependencies: transaction_header.get_dependencies().to_vec(), - family_name: String::from(transaction_header.get_family_name()), - family_version: String::from(transaction_header.get_family_version()), - header_signature: String::from(transaction.get_header_signature()), - inputs: transaction_header.get_inputs().to_vec(), - nonce: String::from(transaction_header.get_nonce()), - outputs: transaction_header.get_outputs().to_vec(), - payload: Vec::from(transaction.get_payload()), - payload_sha512: String::from(transaction_header.get_payload_sha512()), - signer_public_key: String::from(transaction_header.get_signer_public_key()), - }, - ) + pub fn try_from(transaction: &proto::transaction::Transaction) -> Result { + let transaction_header: proto::transaction::TransactionHeader = + Message::parse_from_bytes(&transaction.header)?; + Ok(Transaction { + batcher_public_key: String::from(transaction_header.get_batcher_public_key()), + dependencies: transaction_header.get_dependencies().to_vec(), + family_name: String::from(transaction_header.get_family_name()), + family_version: String::from(transaction_header.get_family_version()), + header_signature: String::from(transaction.get_header_signature()), + inputs: transaction_header.get_inputs().to_vec(), + nonce: String::from(transaction_header.get_nonce()), + outputs: transaction_header.get_outputs().to_vec(), + payload: Vec::from(transaction.get_payload()), + payload_sha512: String::from(transaction_header.get_payload_sha512()), + signer_public_key: String::from(transaction_header.get_signer_public_key()), + }) } }