diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 66c1a690d..d1e6b0cf8 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -24,6 +24,7 @@ jobs: DO_BENCH: true AS_DEPENDENCY: true DO_NO_STD: true + DO_DOCS: true - rust: 1.29.0 env: AS_DEPENDENCY: true @@ -82,20 +83,6 @@ jobs: - name: run cross test run: cross test --target s390x-unknown-linux-gnu - Docs: - name: Docs - runs-on: ubuntu-latest - steps: - - name: Checkout Crate - uses: actions/checkout@v2 - - name: Checkout Toolchain - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: stable - override: true - - name: Create Doc - run: cargo doc Embedded: runs-on: ubuntu-latest steps: diff --git a/CHANGELOG.md b/CHANGELOG.md index 683d456b5..bbc269bf4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,7 +13,7 @@ The previous release changed the behavior of `Display` for `ChildNumber`, assuming that any correct usage would not be affected. [Issue 608](https://github.com/rust-bitcoin/rust-bitcoin/issues/608) goes into the details of why this isn't -the case and how we broke both `rust-miniscript` and BDK. +the case and how we broke both `rust-miniscript` and BDK. # 0.26.1 - 2021-06-06 (yanked, see explanation above) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eca42753e..367588024 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -128,7 +128,7 @@ PR authors may also find it useful to run the following script locally in order to check that each of the commits within the PR satisfies the requirements above, before submitting the PR to review: ```shell script -BITCOIN_MSRV=1.29.0 ./contrib/ci.sh +BITCOIN_MSRV=1.29.0 ./contrib/test.sh ``` Please replace the value in `BITCOIN_MSRV=1.29.0` with the current MSRV from [README.md]. diff --git a/Cargo.toml b/Cargo.toml index 7b4dc33fc..0f9050d62 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "bitcoin" -version = "0.28.0-rc.1" +version = "0.28.0-rc.2" authors = ["Andrew Poelstra "] license = "CC0-1.0" homepage = "https://github.com/rust-bitcoin/rust-bitcoin/" @@ -36,7 +36,7 @@ rustdoc-args = ["--cfg", "docsrs"] [dependencies] bech32 = { version = "0.8.1", default-features = false } bitcoin_hashes = { version = "0.10.0", default-features = false } -secp256k1 = { version = "0.21.2", default-features = false } +secp256k1 = { version = "0.22.0", default-features = false } core2 = { version = "0.3.0", optional = true, default-features = false } base64-compat = { version = "1.0.0", optional = true } @@ -47,7 +47,7 @@ hashbrown = { version = "0.8", optional = true } [dev-dependencies] serde_json = "<1.0.45" serde_test = "1" -secp256k1 = { version = "0.21.2", features = [ "recovery", "rand-std" ] } +secp256k1 = { version = "0.22.0", features = [ "recovery", "rand-std" ] } bincode = "1.3.1" # We need to pin ryu (transitive dep from serde_json) to stay compatible with Rust 1.22.0 ryu = "<1.0.5" diff --git a/README.md b/README.md index 17f97e3a5..f3c0a458c 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,23 @@ -![Continuous integration](https://github.com/rust-bitcoin/rust-bitcoin/workflows/Continuous%20integration/badge.svg) -[![Safety Dance](https://img.shields.io/badge/unsafe-forbidden-success.svg)](https://github.com/rust-secure-code/safety-dance/) +
+

Rust Bitcoin

-# Rust Bitcoin Library + Rust Bitcoin logo by Hunter Trujillo, see license and source files under /logo + +

Library with support for de/serialization, parsing and executing on data-structures + and network messages related to Bitcoin. +

+ +

+ Crate Info + MIT or Apache-2.0 Licensed + CI Status + API Docs + Rustc Version 1.29+ + Chat on IRC + Lines of code +

+
-Library with support for de/serialization, parsing and executing on data -structures and network messages related to Bitcoin. [Documentation](https://docs.rs/bitcoin/) @@ -104,7 +117,7 @@ You can run tests with: cargo test ``` -Please refer to the [`cargo` documentation](https://doc.rust-lang.org/stable/cargo/) for more detailed instructions. +Please refer to the [`cargo` documentation](https://doc.rust-lang.org/stable/cargo/) for more detailed instructions. ## Pull Requests diff --git a/contrib/test.sh b/contrib/test.sh index b6da35fc4..41503bc99 100755 --- a/contrib/test.sh +++ b/contrib/test.sh @@ -37,27 +37,27 @@ cargo test --verbose if [ "$DO_NO_STD" = true ] then -echo "********* Testing no-std build *************" - # Build no_std, to make sure that cfg(test) doesn't hide any issues - cargo build --verbose --features="no-std" --no-default-features + echo "********* Testing no-std build *************" + # Build no_std, to make sure that cfg(test) doesn't hide any issues + cargo build --verbose --features="no-std" --no-default-features - # Build std + no_std, to make sure they are not incompatible - cargo build --verbose --features="no-std" + # Build std + no_std, to make sure they are not incompatible + cargo build --verbose --features="no-std" - # Test no_std - cargo test --verbose --features="no-std" --no-default-features + # Test no_std + cargo test --verbose --features="no-std" --no-default-features - # Build all features - cargo build --verbose --features="no-std $FEATURES" --no-default-features + # Build all features + cargo build --verbose --features="no-std $FEATURES" --no-default-features - # Build specific features - for feature in ${FEATURES} - do - cargo build --verbose --features="no-std $feature" - done + # Build specific features + for feature in ${FEATURES} + do + cargo build --verbose --features="no-std $feature" + done - cargo run --example bip32 7934c09359b234e076b9fa5a1abfd38e3dc2a9939745b7cc3c22a48d831d14bd - cargo run --no-default-features --features no-std --example bip32 7934c09359b234e076b9fa5a1abfd38e3dc2a9939745b7cc3c22a48d831d14bd + cargo run --example bip32 7934c09359b234e076b9fa5a1abfd38e3dc2a9939745b7cc3c22a48d831d14bd + cargo run --no-default-features --features no-std --example bip32 7934c09359b234e076b9fa5a1abfd38e3dc2a9939745b7cc3c22a48d831d14bd fi # Test each feature @@ -67,6 +67,11 @@ do cargo test --verbose --features="$feature" done +# Build the docs if told to (this only works with the nightly toolchain) +if [ "$DO_DOCS" = true ]; then + RUSTDOCFLAGS="--cfg docsrs" cargo doc --all --features="$FEATURES" +fi + # Fuzz if told to if [ "$DO_FUZZ" = true ] then diff --git a/logo/README.md b/logo/README.md new file mode 100644 index 000000000..71a563dba --- /dev/null +++ b/logo/README.md @@ -0,0 +1,32 @@ +# Rust Bitcoin Logo + +## Files + +Included are: + +- [rust-bitcoin-inkscape.svg](./rust-bitcoin-inkscape.svg) - The Inkscape source file with the things used to make the logo in case adjustments are desired +- [rust-bitcoin-optimized.svg](./rust-bitcoin-optimized.svg) - An optimized SVG for embedding or rendering at any size desired +- [rust-bitcoin.png](./rust-bitcoin.png) - The PNG logo rendered at 300px used by this project +- [rust-bitcoin-large.png](./rust-bitcoin-large.png) - A larger size 1024px x 1024px for convenience for embedding in presentations + +## Author + +Hunter Trujillo, @cryptoquick on [Twitter](https://twitter.com/cryptoquick), [GitHub](https://github.com/cryptoquick), and Telegram. + +## License + +Licensed in the public domain under [CC0 1.0 Universal Public Domain Dedication](https://creativecommons.org/publicdomain/zero/1.0/), and the author of this work rescinds all claims to copyright or coercion or acts of force from any nation state over this work for any purpose + +Bitcoin Logo is licensed under the CC Public Domain Dedication: and + +Rust Logo is licensed under CC-BY, which allows reuse and modifications for any purpose, as long as distributors give appropriate credit and indicate changes have been made. See here: + +## Acknowledgements + +Acknowledgement for the runners up in this PR: https://github.com/rust-bitcoin/rust-bitcoin/pull/891#issuecomment-1074476858 + +In particular, the Rust Bitcoin Wizard gear was an incredibly inspired piece of art. Also, the meshed gears design was beloved by some but not all. + +Thank you to the Rust Bitcoin maintainers and community, your timely responses and guidance was appreciated. + +Also, thank you to the voters on the Rust in Bitcoin Telegram group: . diff --git a/logo/rust-bitcoin-inkscape.svg b/logo/rust-bitcoin-inkscape.svg new file mode 100644 index 000000000..9b504a344 --- /dev/null +++ b/logo/rust-bitcoin-inkscape.svg @@ -0,0 +1,244 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/logo/rust-bitcoin-large.png b/logo/rust-bitcoin-large.png new file mode 100644 index 000000000..f0cce3403 Binary files /dev/null and b/logo/rust-bitcoin-large.png differ diff --git a/logo/rust-bitcoin-optimized.svg b/logo/rust-bitcoin-optimized.svg new file mode 100644 index 000000000..31cf374ba --- /dev/null +++ b/logo/rust-bitcoin-optimized.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/logo/rust-bitcoin.png b/logo/rust-bitcoin.png new file mode 100644 index 000000000..0f59dd168 Binary files /dev/null and b/logo/rust-bitcoin.png differ diff --git a/src/blockdata/block.rs b/src/blockdata/block.rs index 28aeda599..8fe78118c 100644 --- a/src/blockdata/block.rs +++ b/src/blockdata/block.rs @@ -44,30 +44,30 @@ use VarInt; pub struct BlockHeader { /// The protocol version. Should always be 1. pub version: i32, - /// Reference to the previous block in the chain + /// Reference to the previous block in the chain. pub prev_blockhash: BlockHash, - /// The root hash of the merkle tree of transactions in the block + /// The root hash of the merkle tree of transactions in the block. pub merkle_root: TxMerkleNode, - /// The timestamp of the block, as claimed by the miner + /// The timestamp of the block, as claimed by the miner. pub time: u32, /// The target value below which the blockhash must lie, encoded as a - /// a float (with well-defined rounding, of course) + /// a float (with well-defined rounding, of course). pub bits: u32, - /// The nonce, selected to obtain a low enough blockhash + /// The nonce, selected to obtain a low enough blockhash. pub nonce: u32, } impl_consensus_encoding!(BlockHeader, version, prev_blockhash, merkle_root, time, bits, nonce); impl BlockHeader { - /// Return the block hash. + /// Returns the block hash. pub fn block_hash(&self) -> BlockHash { let mut engine = BlockHash::engine(); self.consensus_encode(&mut engine).expect("engines don't error"); BlockHash::from_engine(engine) } - /// Computes the target [0, T] that a blockhash must land in to be valid + /// Computes the target [0, T] that a blockhash must land in to be valid. pub fn target(&self) -> Uint256 { Self::u256_from_compact_target(self.bits) } @@ -125,7 +125,7 @@ impl BlockHeader { compact | (size << 24) as u32 } - /// Compute the popular "difficulty" measure for mining + /// Computes the popular "difficulty" measure for mining. pub fn difficulty(&self, network: Network) -> u64 { (max_target(network) / self.target()).low_u64() } @@ -143,7 +143,7 @@ impl BlockHeader { if hash <= target { Ok(block_hash) } else { Err(BlockBadProofOfWork) } } - /// Returns the total work of the block + /// Returns the total work of the block. pub fn work(&self) -> Uint256 { // 2**256 / (target + 1) == ~target / (target+1) + 1 (eqn shamelessly stolen from bitcoind) let mut ret = !self.target(); @@ -169,50 +169,54 @@ pub struct Block { impl_consensus_encoding!(Block, header, txdata); impl Block { - /// Return the block hash. + /// Returns the block hash. pub fn block_hash(&self) -> BlockHash { self.header.block_hash() } /// check if merkle root of header matches merkle root of the transaction list - pub fn check_merkle_root (&self) -> bool { + pub fn check_merkle_root(&self) -> bool { match self.compute_merkle_root() { Some(merkle_root) => self.header.merkle_root == merkle_root, None => false, } } - /// check if witness commitment in coinbase is matching the transaction list + /// Checks if witness commitment in coinbase matches the transaction list. pub fn check_witness_commitment(&self) -> bool { - - // witness commitment is optional if there are no transactions using SegWit in the block + const MAGIC: [u8; 6] = [0x6a, 0x24, 0xaa, 0x21, 0xa9, 0xed]; + // Witness commitment is optional if there are no transactions using SegWit in the block. if self.txdata.iter().all(|t| t.input.iter().all(|i| i.witness.is_empty())) { return true; } - if !self.txdata.is_empty() { - let coinbase = &self.txdata[0]; - if coinbase.is_coin_base() { - // commitment is in the last output that starts with below magic - if let Some(pos) = coinbase.output.iter() - .rposition(|o| { - o.script_pubkey.len () >= 38 - && o.script_pubkey[0..6] == [0x6a, 0x24, 0xaa, 0x21, 0xa9, 0xed] }) { - let commitment = WitnessCommitment::from_slice(&coinbase.output[pos].script_pubkey.as_bytes()[6..38]).unwrap(); - // witness reserved value is in coinbase input witness - let witness_vec: Vec<_> = coinbase.input[0].witness.iter().collect(); - if witness_vec.len() == 1 && witness_vec[0].len() == 32 { - match self.witness_root() { - Some(witness_root) => return commitment == Self::compute_witness_commitment(&witness_root, witness_vec[0]), - None => return false, - } - } + + if self.txdata.is_empty() { + return false; + } + + let coinbase = &self.txdata[0]; + if !coinbase.is_coin_base() { + return false; + } + + // Commitment is in the last output that starts with magic bytes. + if let Some(pos) = coinbase.output.iter() + .rposition(|o| o.script_pubkey.len () >= 38 && o.script_pubkey[0..6] == MAGIC) + { + let commitment = WitnessCommitment::from_slice(&coinbase.output[pos].script_pubkey.as_bytes()[6..38]).unwrap(); + // Witness reserved value is in coinbase input witness. + let witness_vec: Vec<_> = coinbase.input[0].witness.iter().collect(); + if witness_vec.len() == 1 && witness_vec[0].len() == 32 { + if let Some(witness_root) = self.witness_root() { + return commitment == Self::compute_witness_commitment(&witness_root, witness_vec[0]); } } } + false } - /// Compute the transaction merkle root. + /// Computes the transaction merkle root. pub fn compute_merkle_root(&self) -> Option { let hashes = self.txdata.iter().map(|obj| obj.txid().as_hash()); bitcoin_merkle_root(hashes).map(|h| h.into()) @@ -224,59 +228,77 @@ impl Block { self.compute_merkle_root() } - /// compute witness commitment for the transaction list - pub fn compute_witness_commitment (witness_root: &WitnessMerkleNode, witness_reserved_value: &[u8]) -> WitnessCommitment { + /// Computes the witness commitment for the block's transaction list. + pub fn compute_witness_commitment(witness_root: &WitnessMerkleNode, witness_reserved_value: &[u8]) -> WitnessCommitment { let mut encoder = WitnessCommitment::engine(); witness_root.consensus_encode(&mut encoder).expect("engines don't error"); encoder.input(witness_reserved_value); WitnessCommitment::from_engine(encoder) } - /// Merkle root of transactions hashed for witness + /// Computes the merkle root of transactions hashed for witness. pub fn witness_root(&self) -> Option { - let hashes = self.txdata.iter().enumerate().map(|(i, t)| + let hashes = self.txdata.iter().enumerate().map(|(i, t)| { if i == 0 { // Replace the first hash with zeroes. Wtxid::default().as_hash() } else { t.wtxid().as_hash() } - ); + }); bitcoin_merkle_root(hashes).map(|h| h.into()) } - /// The size of the header + the size of the varint with the tx count + the txs themselves - #[inline] - fn get_base_size(&self) -> usize { + /// base_size == size of header + size of encoded transaction count. + fn base_size(&self) -> usize { 80 + VarInt(self.txdata.len() as u64).len() } - /// Get the size of the block + /// Returns the size of the block. + #[deprecated(since = "0.28.0", note = "Please use `block::size` instead.")] pub fn get_size(&self) -> usize { - let txs_size: usize = self.txdata.iter().map(Transaction::get_size).sum(); - self.get_base_size() + txs_size + self.size() } - /// Get the strippedsize of the block + /// Returns the size of the block. + /// + /// size == size of header + size of encoded transaction count + total size of transactions. + pub fn size(&self) -> usize { + let txs_size: usize = self.txdata.iter().map(Transaction::size).sum(); + self.base_size() + txs_size + } + + /// Returns the strippedsize of the block. + #[deprecated(since = "0.28.0", note = "Please use `transaction::strippedsize` instead.")] pub fn get_strippedsize(&self) -> usize { - let txs_size: usize = self.txdata.iter().map(Transaction::get_strippedsize).sum(); - self.get_base_size() + txs_size + self.strippedsize() } - /// Get the weight of the block + /// Returns the strippedsize of the block. + pub fn strippedsize(&self) -> usize { + let txs_size: usize = self.txdata.iter().map(Transaction::strippedsize).sum(); + self.base_size() + txs_size + } + + /// Returns the weight of the block. + #[deprecated(since = "0.28.0", note = "Please use `transaction::weight` instead.")] pub fn get_weight(&self) -> usize { - let base_weight = WITNESS_SCALE_FACTOR * self.get_base_size(); - let txs_weight: usize = self.txdata.iter().map(Transaction::get_weight).sum(); + self.weight() + } + + /// Returns the weight of the block. + pub fn weight(&self) -> usize { + let base_weight = WITNESS_SCALE_FACTOR * self.base_size(); + let txs_weight: usize = self.txdata.iter().map(Transaction::weight).sum(); base_weight + txs_weight } - /// Get the coinbase transaction, if one is present. + /// Returns the coinbase transaction, if one is present. pub fn coinbase(&self) -> Option<&Transaction> { self.txdata.first() } - /// Get the block height as encoded into the coinbase according to BIP34. - /// Returns [None] if not present. + /// Returns the block height, as encoded in the coinbase transaction according to BIP34. pub fn bip34_block_height(&self) -> Result { // Citing the spec: // Add height as the first item in the coinbase transaction's scriptSig, @@ -393,9 +415,9 @@ mod tests { assert_eq!(real_decode.header.difficulty(Network::Bitcoin), 1); // [test] TODO: check the transaction data - assert_eq!(real_decode.get_size(), some_block.len()); - assert_eq!(real_decode.get_strippedsize(), some_block.len()); - assert_eq!(real_decode.get_weight(), some_block.len() * 4); + assert_eq!(real_decode.size(), some_block.len()); + assert_eq!(real_decode.strippedsize(), some_block.len()); + assert_eq!(real_decode.weight(), some_block.len() * 4); // should be also ok for a non-witness block as commitment is optional in that case assert!(real_decode.check_witness_commitment()); @@ -428,9 +450,9 @@ mod tests { assert_eq!(real_decode.header.difficulty(Network::Testnet), 2456598); // [test] TODO: check the transaction data - assert_eq!(real_decode.get_size(), segwit_block.len()); - assert_eq!(real_decode.get_strippedsize(), 4283); - assert_eq!(real_decode.get_weight(), 17168); + assert_eq!(real_decode.size(), segwit_block.len()); + assert_eq!(real_decode.strippedsize(), 4283); + assert_eq!(real_decode.weight(), 17168); assert!(real_decode.check_witness_commitment()); diff --git a/src/blockdata/opcodes.rs b/src/blockdata/opcodes.rs index 8d5e767c9..41b4796d3 100644 --- a/src/blockdata/opcodes.rs +++ b/src/blockdata/opcodes.rs @@ -30,7 +30,7 @@ use core::{fmt, convert::From}; // opcode enum. If you want to check ranges of opcodes, etc., // write an #[inline] helper function which casts to u8s. -/// A script Opcode +/// A script Opcode. #[derive(Copy, Clone, PartialEq, Eq)] pub struct All { code: u8, @@ -40,522 +40,522 @@ pub mod all { //! Constants associated with All type use super::All; - /// Push an empty array onto the stack + /// Push an empty array onto the stack. pub const OP_PUSHBYTES_0: All = All {code: 0x00}; - /// Push the next byte as an array onto the stack + /// Push the next byte as an array onto the stack. pub const OP_PUSHBYTES_1: All = All {code: 0x01}; - /// Push the next 2 bytes as an array onto the stack + /// Push the next 2 bytes as an array onto the stack. pub const OP_PUSHBYTES_2: All = All {code: 0x02}; - /// Push the next 2 bytes as an array onto the stack + /// Push the next 2 bytes as an array onto the stack. pub const OP_PUSHBYTES_3: All = All {code: 0x03}; - /// Push the next 4 bytes as an array onto the stack + /// Push the next 4 bytes as an array onto the stack. pub const OP_PUSHBYTES_4: All = All {code: 0x04}; - /// Push the next 5 bytes as an array onto the stack + /// Push the next 5 bytes as an array onto the stack. pub const OP_PUSHBYTES_5: All = All {code: 0x05}; - /// Push the next 6 bytes as an array onto the stack + /// Push the next 6 bytes as an array onto the stack. pub const OP_PUSHBYTES_6: All = All {code: 0x06}; - /// Push the next 7 bytes as an array onto the stack + /// Push the next 7 bytes as an array onto the stack. pub const OP_PUSHBYTES_7: All = All {code: 0x07}; - /// Push the next 8 bytes as an array onto the stack + /// Push the next 8 bytes as an array onto the stack. pub const OP_PUSHBYTES_8: All = All {code: 0x08}; - /// Push the next 9 bytes as an array onto the stack + /// Push the next 9 bytes as an array onto the stack. pub const OP_PUSHBYTES_9: All = All {code: 0x09}; - /// Push the next 10 bytes as an array onto the stack + /// Push the next 10 bytes as an array onto the stack. pub const OP_PUSHBYTES_10: All = All {code: 0x0a}; - /// Push the next 11 bytes as an array onto the stack + /// Push the next 11 bytes as an array onto the stack. pub const OP_PUSHBYTES_11: All = All {code: 0x0b}; - /// Push the next 12 bytes as an array onto the stack + /// Push the next 12 bytes as an array onto the stack. pub const OP_PUSHBYTES_12: All = All {code: 0x0c}; - /// Push the next 13 bytes as an array onto the stack + /// Push the next 13 bytes as an array onto the stack. pub const OP_PUSHBYTES_13: All = All {code: 0x0d}; - /// Push the next 14 bytes as an array onto the stack + /// Push the next 14 bytes as an array onto the stack. pub const OP_PUSHBYTES_14: All = All {code: 0x0e}; - /// Push the next 15 bytes as an array onto the stack + /// Push the next 15 bytes as an array onto the stack. pub const OP_PUSHBYTES_15: All = All {code: 0x0f}; - /// Push the next 16 bytes as an array onto the stack + /// Push the next 16 bytes as an array onto the stack. pub const OP_PUSHBYTES_16: All = All {code: 0x10}; - /// Push the next 17 bytes as an array onto the stack + /// Push the next 17 bytes as an array onto the stack. pub const OP_PUSHBYTES_17: All = All {code: 0x11}; - /// Push the next 18 bytes as an array onto the stack + /// Push the next 18 bytes as an array onto the stack. pub const OP_PUSHBYTES_18: All = All {code: 0x12}; - /// Push the next 19 bytes as an array onto the stack + /// Push the next 19 bytes as an array onto the stack. pub const OP_PUSHBYTES_19: All = All {code: 0x13}; - /// Push the next 20 bytes as an array onto the stack + /// Push the next 20 bytes as an array onto the stack. pub const OP_PUSHBYTES_20: All = All {code: 0x14}; - /// Push the next 21 bytes as an array onto the stack + /// Push the next 21 bytes as an array onto the stack. pub const OP_PUSHBYTES_21: All = All {code: 0x15}; - /// Push the next 22 bytes as an array onto the stack + /// Push the next 22 bytes as an array onto the stack. pub const OP_PUSHBYTES_22: All = All {code: 0x16}; - /// Push the next 23 bytes as an array onto the stack + /// Push the next 23 bytes as an array onto the stack. pub const OP_PUSHBYTES_23: All = All {code: 0x17}; - /// Push the next 24 bytes as an array onto the stack + /// Push the next 24 bytes as an array onto the stack. pub const OP_PUSHBYTES_24: All = All {code: 0x18}; - /// Push the next 25 bytes as an array onto the stack + /// Push the next 25 bytes as an array onto the stack. pub const OP_PUSHBYTES_25: All = All {code: 0x19}; - /// Push the next 26 bytes as an array onto the stack + /// Push the next 26 bytes as an array onto the stack. pub const OP_PUSHBYTES_26: All = All {code: 0x1a}; - /// Push the next 27 bytes as an array onto the stack + /// Push the next 27 bytes as an array onto the stack. pub const OP_PUSHBYTES_27: All = All {code: 0x1b}; - /// Push the next 28 bytes as an array onto the stack + /// Push the next 28 bytes as an array onto the stack. pub const OP_PUSHBYTES_28: All = All {code: 0x1c}; - /// Push the next 29 bytes as an array onto the stack + /// Push the next 29 bytes as an array onto the stack. pub const OP_PUSHBYTES_29: All = All {code: 0x1d}; - /// Push the next 30 bytes as an array onto the stack + /// Push the next 30 bytes as an array onto the stack. pub const OP_PUSHBYTES_30: All = All {code: 0x1e}; - /// Push the next 31 bytes as an array onto the stack + /// Push the next 31 bytes as an array onto the stack. pub const OP_PUSHBYTES_31: All = All {code: 0x1f}; - /// Push the next 32 bytes as an array onto the stack + /// Push the next 32 bytes as an array onto the stack. pub const OP_PUSHBYTES_32: All = All {code: 0x20}; - /// Push the next 33 bytes as an array onto the stack + /// Push the next 33 bytes as an array onto the stack. pub const OP_PUSHBYTES_33: All = All {code: 0x21}; - /// Push the next 34 bytes as an array onto the stack + /// Push the next 34 bytes as an array onto the stack. pub const OP_PUSHBYTES_34: All = All {code: 0x22}; - /// Push the next 35 bytes as an array onto the stack + /// Push the next 35 bytes as an array onto the stack. pub const OP_PUSHBYTES_35: All = All {code: 0x23}; - /// Push the next 36 bytes as an array onto the stack + /// Push the next 36 bytes as an array onto the stack. pub const OP_PUSHBYTES_36: All = All {code: 0x24}; - /// Push the next 37 bytes as an array onto the stack + /// Push the next 37 bytes as an array onto the stack. pub const OP_PUSHBYTES_37: All = All {code: 0x25}; - /// Push the next 38 bytes as an array onto the stack + /// Push the next 38 bytes as an array onto the stack. pub const OP_PUSHBYTES_38: All = All {code: 0x26}; - /// Push the next 39 bytes as an array onto the stack + /// Push the next 39 bytes as an array onto the stack. pub const OP_PUSHBYTES_39: All = All {code: 0x27}; - /// Push the next 40 bytes as an array onto the stack + /// Push the next 40 bytes as an array onto the stack. pub const OP_PUSHBYTES_40: All = All {code: 0x28}; - /// Push the next 41 bytes as an array onto the stack + /// Push the next 41 bytes as an array onto the stack. pub const OP_PUSHBYTES_41: All = All {code: 0x29}; - /// Push the next 42 bytes as an array onto the stack + /// Push the next 42 bytes as an array onto the stack. pub const OP_PUSHBYTES_42: All = All {code: 0x2a}; - /// Push the next 43 bytes as an array onto the stack + /// Push the next 43 bytes as an array onto the stack. pub const OP_PUSHBYTES_43: All = All {code: 0x2b}; - /// Push the next 44 bytes as an array onto the stack + /// Push the next 44 bytes as an array onto the stack. pub const OP_PUSHBYTES_44: All = All {code: 0x2c}; - /// Push the next 45 bytes as an array onto the stack + /// Push the next 45 bytes as an array onto the stack. pub const OP_PUSHBYTES_45: All = All {code: 0x2d}; - /// Push the next 46 bytes as an array onto the stack + /// Push the next 46 bytes as an array onto the stack. pub const OP_PUSHBYTES_46: All = All {code: 0x2e}; - /// Push the next 47 bytes as an array onto the stack + /// Push the next 47 bytes as an array onto the stack. pub const OP_PUSHBYTES_47: All = All {code: 0x2f}; - /// Push the next 48 bytes as an array onto the stack + /// Push the next 48 bytes as an array onto the stack. pub const OP_PUSHBYTES_48: All = All {code: 0x30}; - /// Push the next 49 bytes as an array onto the stack + /// Push the next 49 bytes as an array onto the stack. pub const OP_PUSHBYTES_49: All = All {code: 0x31}; - /// Push the next 50 bytes as an array onto the stack + /// Push the next 50 bytes as an array onto the stack. pub const OP_PUSHBYTES_50: All = All {code: 0x32}; - /// Push the next 51 bytes as an array onto the stack + /// Push the next 51 bytes as an array onto the stack. pub const OP_PUSHBYTES_51: All = All {code: 0x33}; - /// Push the next 52 bytes as an array onto the stack + /// Push the next 52 bytes as an array onto the stack. pub const OP_PUSHBYTES_52: All = All {code: 0x34}; - /// Push the next 53 bytes as an array onto the stack + /// Push the next 53 bytes as an array onto the stack. pub const OP_PUSHBYTES_53: All = All {code: 0x35}; - /// Push the next 54 bytes as an array onto the stack + /// Push the next 54 bytes as an array onto the stack. pub const OP_PUSHBYTES_54: All = All {code: 0x36}; - /// Push the next 55 bytes as an array onto the stack + /// Push the next 55 bytes as an array onto the stack. pub const OP_PUSHBYTES_55: All = All {code: 0x37}; - /// Push the next 56 bytes as an array onto the stack + /// Push the next 56 bytes as an array onto the stack. pub const OP_PUSHBYTES_56: All = All {code: 0x38}; - /// Push the next 57 bytes as an array onto the stack + /// Push the next 57 bytes as an array onto the stack. pub const OP_PUSHBYTES_57: All = All {code: 0x39}; - /// Push the next 58 bytes as an array onto the stack + /// Push the next 58 bytes as an array onto the stack. pub const OP_PUSHBYTES_58: All = All {code: 0x3a}; - /// Push the next 59 bytes as an array onto the stack + /// Push the next 59 bytes as an array onto the stack. pub const OP_PUSHBYTES_59: All = All {code: 0x3b}; - /// Push the next 60 bytes as an array onto the stack + /// Push the next 60 bytes as an array onto the stack. pub const OP_PUSHBYTES_60: All = All {code: 0x3c}; - /// Push the next 61 bytes as an array onto the stack + /// Push the next 61 bytes as an array onto the stack. pub const OP_PUSHBYTES_61: All = All {code: 0x3d}; - /// Push the next 62 bytes as an array onto the stack + /// Push the next 62 bytes as an array onto the stack. pub const OP_PUSHBYTES_62: All = All {code: 0x3e}; - /// Push the next 63 bytes as an array onto the stack + /// Push the next 63 bytes as an array onto the stack. pub const OP_PUSHBYTES_63: All = All {code: 0x3f}; - /// Push the next 64 bytes as an array onto the stack + /// Push the next 64 bytes as an array onto the stack. pub const OP_PUSHBYTES_64: All = All {code: 0x40}; - /// Push the next 65 bytes as an array onto the stack + /// Push the next 65 bytes as an array onto the stack. pub const OP_PUSHBYTES_65: All = All {code: 0x41}; - /// Push the next 66 bytes as an array onto the stack + /// Push the next 66 bytes as an array onto the stack. pub const OP_PUSHBYTES_66: All = All {code: 0x42}; - /// Push the next 67 bytes as an array onto the stack + /// Push the next 67 bytes as an array onto the stack. pub const OP_PUSHBYTES_67: All = All {code: 0x43}; - /// Push the next 68 bytes as an array onto the stack + /// Push the next 68 bytes as an array onto the stack. pub const OP_PUSHBYTES_68: All = All {code: 0x44}; - /// Push the next 69 bytes as an array onto the stack + /// Push the next 69 bytes as an array onto the stack. pub const OP_PUSHBYTES_69: All = All {code: 0x45}; - /// Push the next 70 bytes as an array onto the stack + /// Push the next 70 bytes as an array onto the stack. pub const OP_PUSHBYTES_70: All = All {code: 0x46}; - /// Push the next 71 bytes as an array onto the stack + /// Push the next 71 bytes as an array onto the stack. pub const OP_PUSHBYTES_71: All = All {code: 0x47}; - /// Push the next 72 bytes as an array onto the stack + /// Push the next 72 bytes as an array onto the stack. pub const OP_PUSHBYTES_72: All = All {code: 0x48}; - /// Push the next 73 bytes as an array onto the stack + /// Push the next 73 bytes as an array onto the stack. pub const OP_PUSHBYTES_73: All = All {code: 0x49}; - /// Push the next 74 bytes as an array onto the stack + /// Push the next 74 bytes as an array onto the stack. pub const OP_PUSHBYTES_74: All = All {code: 0x4a}; - /// Push the next 75 bytes as an array onto the stack + /// Push the next 75 bytes as an array onto the stack. pub const OP_PUSHBYTES_75: All = All {code: 0x4b}; - /// Read the next byte as N; push the next N bytes as an array onto the stack + /// Read the next byte as N; push the next N bytes as an array onto the stack. pub const OP_PUSHDATA1: All = All {code: 0x4c}; - /// Read the next 2 bytes as N; push the next N bytes as an array onto the stack + /// Read the next 2 bytes as N; push the next N bytes as an array onto the stack. pub const OP_PUSHDATA2: All = All {code: 0x4d}; - /// Read the next 4 bytes as N; push the next N bytes as an array onto the stack + /// Read the next 4 bytes as N; push the next N bytes as an array onto the stack. pub const OP_PUSHDATA4: All = All {code: 0x4e}; - /// Push the array `0x81` onto the stack + /// Push the array `0x81` onto the stack. pub const OP_PUSHNUM_NEG1: All = All {code: 0x4f}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RESERVED: All = All {code: 0x50}; - /// Push the array `0x01` onto the stack + /// Push the array `0x01` onto the stack. pub const OP_PUSHNUM_1: All = All {code: 0x51}; - /// Push the array `0x02` onto the stack + /// Push the array `0x02` onto the stack. pub const OP_PUSHNUM_2: All = All {code: 0x52}; - /// Push the array `0x03` onto the stack + /// Push the array `0x03` onto the stack. pub const OP_PUSHNUM_3: All = All {code: 0x53}; - /// Push the array `0x04` onto the stack + /// Push the array `0x04` onto the stack. pub const OP_PUSHNUM_4: All = All {code: 0x54}; - /// Push the array `0x05` onto the stack + /// Push the array `0x05` onto the stack. pub const OP_PUSHNUM_5: All = All {code: 0x55}; - /// Push the array `0x06` onto the stack + /// Push the array `0x06` onto the stack. pub const OP_PUSHNUM_6: All = All {code: 0x56}; - /// Push the array `0x07` onto the stack + /// Push the array `0x07` onto the stack. pub const OP_PUSHNUM_7: All = All {code: 0x57}; - /// Push the array `0x08` onto the stack + /// Push the array `0x08` onto the stack. pub const OP_PUSHNUM_8: All = All {code: 0x58}; - /// Push the array `0x09` onto the stack + /// Push the array `0x09` onto the stack. pub const OP_PUSHNUM_9: All = All {code: 0x59}; - /// Push the array `0x0a` onto the stack + /// Push the array `0x0a` onto the stack. pub const OP_PUSHNUM_10: All = All {code: 0x5a}; - /// Push the array `0x0b` onto the stack + /// Push the array `0x0b` onto the stack. pub const OP_PUSHNUM_11: All = All {code: 0x5b}; - /// Push the array `0x0c` onto the stack + /// Push the array `0x0c` onto the stack. pub const OP_PUSHNUM_12: All = All {code: 0x5c}; - /// Push the array `0x0d` onto the stack + /// Push the array `0x0d` onto the stack. pub const OP_PUSHNUM_13: All = All {code: 0x5d}; - /// Push the array `0x0e` onto the stack + /// Push the array `0x0e` onto the stack. pub const OP_PUSHNUM_14: All = All {code: 0x5e}; - /// Push the array `0x0f` onto the stack + /// Push the array `0x0f` onto the stack. pub const OP_PUSHNUM_15: All = All {code: 0x5f}; - /// Push the array `0x10` onto the stack + /// Push the array `0x10` onto the stack. pub const OP_PUSHNUM_16: All = All {code: 0x60}; - /// Does nothing + /// Does nothing. pub const OP_NOP: All = All {code: 0x61}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_VER: All = All {code: 0x62}; - /// Pop and execute the next statements if a nonzero element was popped + /// Pop and execute the next statements if a nonzero element was popped. pub const OP_IF: All = All {code: 0x63}; - /// Pop and execute the next statements if a zero element was popped + /// Pop and execute the next statements if a zero element was popped. pub const OP_NOTIF: All = All {code: 0x64}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_VERIF: All = All {code: 0x65}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_VERNOTIF: All = All {code: 0x66}; /// Execute statements if those after the previous OP_IF were not, and vice-versa. /// If there is no previous OP_IF, this acts as a RETURN. pub const OP_ELSE: All = All {code: 0x67}; - /// Pop and execute the next statements if a zero element was popped + /// Pop and execute the next statements if a zero element was popped. pub const OP_ENDIF: All = All {code: 0x68}; - /// If the top value is zero or the stack is empty, fail; otherwise, pop the stack + /// If the top value is zero or the stack is empty, fail; otherwise, pop the stack. pub const OP_VERIFY: All = All {code: 0x69}; - /// Fail the script immediately. (Must be executed.) + /// Fail the script immediately. (Must be executed.). pub const OP_RETURN: All = All {code: 0x6a}; - /// Pop one element from the main stack onto the alt stack + /// Pop one element from the main stack onto the alt stack. pub const OP_TOALTSTACK: All = All {code: 0x6b}; - /// Pop one element from the alt stack onto the main stack + /// Pop one element from the alt stack onto the main stack. pub const OP_FROMALTSTACK: All = All {code: 0x6c}; - /// Drops the top two stack items + /// Drops the top two stack items. pub const OP_2DROP: All = All {code: 0x6d}; - /// Duplicates the top two stack items as AB -> ABAB + /// Duplicates the top two stack items as AB -> ABAB. pub const OP_2DUP: All = All {code: 0x6e}; - /// Duplicates the two three stack items as ABC -> ABCABC + /// Duplicates the two three stack items as ABC -> ABCABC. pub const OP_3DUP: All = All {code: 0x6f}; /// Copies the two stack items of items two spaces back to - /// the front, as xxAB -> ABxxAB + /// the front, as xxAB -> ABxxAB. pub const OP_2OVER: All = All {code: 0x70}; /// Moves the two stack items four spaces back to the front, - /// as xxxxAB -> ABxxxx + /// as xxxxAB -> ABxxxx. pub const OP_2ROT: All = All {code: 0x71}; - /// Swaps the top two pairs, as ABCD -> CDAB + /// Swaps the top two pairs, as ABCD -> CDAB. pub const OP_2SWAP: All = All {code: 0x72}; - /// Duplicate the top stack element unless it is zero + /// Duplicate the top stack element unless it is zero. pub const OP_IFDUP: All = All {code: 0x73}; - /// Push the current number of stack items onto the stack + /// Push the current number of stack items onto the stack. pub const OP_DEPTH: All = All {code: 0x74}; - /// Drops the top stack item + /// Drops the top stack item. pub const OP_DROP: All = All {code: 0x75}; - /// Duplicates the top stack item + /// Duplicates the top stack item. pub const OP_DUP: All = All {code: 0x76}; - /// Drops the second-to-top stack item + /// Drops the second-to-top stack item. pub const OP_NIP: All = All {code: 0x77}; - /// Copies the second-to-top stack item, as xA -> AxA + /// Copies the second-to-top stack item, as xA -> AxA. pub const OP_OVER: All = All {code: 0x78}; - /// Pop the top stack element as N. Copy the Nth stack element to the top + /// Pop the top stack element as N. Copy the Nth stack element to the top. pub const OP_PICK: All = All {code: 0x79}; - /// Pop the top stack element as N. Move the Nth stack element to the top + /// Pop the top stack element as N. Move the Nth stack element to the top. pub const OP_ROLL: All = All {code: 0x7a}; - /// Rotate the top three stack items, as [top next1 next2] -> [next2 top next1] + /// Rotate the top three stack items, as [top next1 next2] -> [next2 top next1]. pub const OP_ROT: All = All {code: 0x7b}; - /// Swap the top two stack items + /// Swap the top two stack items. pub const OP_SWAP: All = All {code: 0x7c}; - /// Copy the top stack item to before the second item, as [top next] -> [top next top] + /// Copy the top stack item to before the second item, as [top next] -> [top next top]. pub const OP_TUCK: All = All {code: 0x7d}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_CAT: All = All {code: 0x7e}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_SUBSTR: All = All {code: 0x7f}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_LEFT: All = All {code: 0x80}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_RIGHT: All = All {code: 0x81}; - /// Pushes the length of the top stack item onto the stack + /// Pushes the length of the top stack item onto the stack. pub const OP_SIZE: All = All {code: 0x82}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_INVERT: All = All {code: 0x83}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_AND: All = All {code: 0x84}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_OR: All = All {code: 0x85}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_XOR: All = All {code: 0x86}; - /// Pushes 1 if the inputs are exactly equal, 0 otherwise + /// Pushes 1 if the inputs are exactly equal, 0 otherwise. pub const OP_EQUAL: All = All {code: 0x87}; - /// Returns success if the inputs are exactly equal, failure otherwise + /// Returns success if the inputs are exactly equal, failure otherwise. pub const OP_EQUALVERIFY: All = All {code: 0x88}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RESERVED1: All = All {code: 0x89}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RESERVED2: All = All {code: 0x8a}; - /// Increment the top stack element in place + /// Increment the top stack element in place. pub const OP_1ADD: All = All {code: 0x8b}; - /// Decrement the top stack element in place + /// Decrement the top stack element in place. pub const OP_1SUB: All = All {code: 0x8c}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_2MUL: All = All {code: 0x8d}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_2DIV: All = All {code: 0x8e}; - /// Multiply the top stack item by -1 in place + /// Multiply the top stack item by -1 in place. pub const OP_NEGATE: All = All {code: 0x8f}; - /// Absolute value the top stack item in place + /// Absolute value the top stack item in place. pub const OP_ABS: All = All {code: 0x90}; - /// Map 0 to 1 and everything else to 0, in place + /// Map 0 to 1 and everything else to 0, in place. pub const OP_NOT: All = All {code: 0x91}; - /// Map 0 to 0 and everything else to 1, in place + /// Map 0 to 0 and everything else to 1, in place. pub const OP_0NOTEQUAL: All = All {code: 0x92}; - /// Pop two stack items and push their sum + /// Pop two stack items and push their sum. pub const OP_ADD: All = All {code: 0x93}; - /// Pop two stack items and push the second minus the top + /// Pop two stack items and push the second minus the top. pub const OP_SUB: All = All {code: 0x94}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_MUL: All = All {code: 0x95}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_DIV: All = All {code: 0x96}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_MOD: All = All {code: 0x97}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_LSHIFT: All = All {code: 0x98}; - /// Fail the script unconditionally, does not even need to be executed + /// Fail the script unconditionally, does not even need to be executed. pub const OP_RSHIFT: All = All {code: 0x99}; - /// Pop the top two stack items and push 1 if both are nonzero, else push 0 + /// Pop the top two stack items and push 1 if both are nonzero, else push 0. pub const OP_BOOLAND: All = All {code: 0x9a}; - /// Pop the top two stack items and push 1 if either is nonzero, else push 0 + /// Pop the top two stack items and push 1 if either is nonzero, else push 0. pub const OP_BOOLOR: All = All {code: 0x9b}; - /// Pop the top two stack items and push 1 if both are numerically equal, else push 0 + /// Pop the top two stack items and push 1 if both are numerically equal, else push 0. pub const OP_NUMEQUAL: All = All {code: 0x9c}; - /// Pop the top two stack items and return success if both are numerically equal, else return failure + /// Pop the top two stack items and return success if both are numerically equal, else return failure. pub const OP_NUMEQUALVERIFY: All = All {code: 0x9d}; - /// Pop the top two stack items and push 0 if both are numerically equal, else push 1 + /// Pop the top two stack items and push 0 if both are numerically equal, else push 1. pub const OP_NUMNOTEQUAL: All = All {code: 0x9e}; - /// Pop the top two items; push 1 if the second is less than the top, 0 otherwise + /// Pop the top two items; push 1 if the second is less than the top, 0 otherwise. pub const OP_LESSTHAN : All = All {code: 0x9f}; - /// Pop the top two items; push 1 if the second is greater than the top, 0 otherwise + /// Pop the top two items; push 1 if the second is greater than the top, 0 otherwise. pub const OP_GREATERTHAN : All = All {code: 0xa0}; - /// Pop the top two items; push 1 if the second is <= the top, 0 otherwise + /// Pop the top two items; push 1 if the second is <= the top, 0 otherwise. pub const OP_LESSTHANOREQUAL : All = All {code: 0xa1}; - /// Pop the top two items; push 1 if the second is >= the top, 0 otherwise + /// Pop the top two items; push 1 if the second is >= the top, 0 otherwise. pub const OP_GREATERTHANOREQUAL : All = All {code: 0xa2}; - /// Pop the top two items; push the smaller + /// Pop the top two items; push the smaller. pub const OP_MIN: All = All {code: 0xa3}; - /// Pop the top two items; push the larger + /// Pop the top two items; push the larger. pub const OP_MAX: All = All {code: 0xa4}; - /// Pop the top three items; if the top is >= the second and < the third, push 1, otherwise push 0 + /// Pop the top three items; if the top is >= the second and < the third, push 1, otherwise push 0. pub const OP_WITHIN: All = All {code: 0xa5}; - /// Pop the top stack item and push its RIPEMD160 hash + /// Pop the top stack item and push its RIPEMD160 hash. pub const OP_RIPEMD160: All = All {code: 0xa6}; - /// Pop the top stack item and push its SHA1 hash + /// Pop the top stack item and push its SHA1 hash. pub const OP_SHA1: All = All {code: 0xa7}; - /// Pop the top stack item and push its SHA256 hash + /// Pop the top stack item and push its SHA256 hash. pub const OP_SHA256: All = All {code: 0xa8}; - /// Pop the top stack item and push its RIPEMD(SHA256) hash + /// Pop the top stack item and push its RIPEMD(SHA256) hash. pub const OP_HASH160: All = All {code: 0xa9}; - /// Pop the top stack item and push its SHA256(SHA256) hash + /// Pop the top stack item and push its SHA256(SHA256) hash. pub const OP_HASH256: All = All {code: 0xaa}; - /// Ignore this and everything preceding when deciding what to sign when signature-checking + /// Ignore this and everything preceding when deciding what to sign when signature-checking. pub const OP_CODESEPARATOR: All = All {code: 0xab}; - /// pushing 1/0 for success/failure + /// pushing 1/0 for success/failure. pub const OP_CHECKSIG: All = All {code: 0xac}; - /// returning success/failure + /// returning success/failure. pub const OP_CHECKSIGVERIFY: All = All {code: 0xad}; /// Pop N, N pubkeys, M, M signatures, a dummy (due to bug in reference code), and verify that all M signatures are valid. - /// Push 1 for "all valid", 0 otherwise + /// Push 1 for "all valid", 0 otherwise. pub const OP_CHECKMULTISIG: All = All {code: 0xae}; - /// Like the above but return success/failure + /// Like the above but return success/failure. pub const OP_CHECKMULTISIGVERIFY: All = All {code: 0xaf}; - /// Does nothing + /// Does nothing. pub const OP_NOP1: All = All {code: 0xb0}; /// pub const OP_CLTV: All = All {code: 0xb1}; /// pub const OP_CSV: All = All {code: 0xb2}; - /// Does nothing + /// Does nothing. pub const OP_NOP4: All = All {code: 0xb3}; - /// Does nothing + /// Does nothing. pub const OP_NOP5: All = All {code: 0xb4}; - /// Does nothing + /// Does nothing. pub const OP_NOP6: All = All {code: 0xb5}; - /// Does nothing + /// Does nothing. pub const OP_NOP7: All = All {code: 0xb6}; - /// Does nothing + /// Does nothing. pub const OP_NOP8: All = All {code: 0xb7}; - /// Does nothing + /// Does nothing. pub const OP_NOP9: All = All {code: 0xb8}; - /// Does nothing + /// Does nothing. pub const OP_NOP10: All = All {code: 0xb9}; // Every other opcode acts as OP_RETURN - /// OP_CHECKSIGADD post tapscript + /// OP_CHECKSIGADD post tapscript. pub const OP_CHECKSIGADD: All = All {code: 0xba}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_187: All = All {code: 0xbb}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_188: All = All {code: 0xbc}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_189: All = All {code: 0xbd}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_190: All = All {code: 0xbe}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_191: All = All {code: 0xbf}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_192: All = All {code: 0xc0}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_193: All = All {code: 0xc1}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_194: All = All {code: 0xc2}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_195: All = All {code: 0xc3}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_196: All = All {code: 0xc4}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_197: All = All {code: 0xc5}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_198: All = All {code: 0xc6}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_199: All = All {code: 0xc7}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_200: All = All {code: 0xc8}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_201: All = All {code: 0xc9}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_202: All = All {code: 0xca}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_203: All = All {code: 0xcb}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_204: All = All {code: 0xcc}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_205: All = All {code: 0xcd}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_206: All = All {code: 0xce}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_207: All = All {code: 0xcf}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_208: All = All {code: 0xd0}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_209: All = All {code: 0xd1}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_210: All = All {code: 0xd2}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_211: All = All {code: 0xd3}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_212: All = All {code: 0xd4}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_213: All = All {code: 0xd5}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_214: All = All {code: 0xd6}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_215: All = All {code: 0xd7}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_216: All = All {code: 0xd8}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_217: All = All {code: 0xd9}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_218: All = All {code: 0xda}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_219: All = All {code: 0xdb}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_220: All = All {code: 0xdc}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_221: All = All {code: 0xdd}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_222: All = All {code: 0xde}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_223: All = All {code: 0xdf}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_224: All = All {code: 0xe0}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_225: All = All {code: 0xe1}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_226: All = All {code: 0xe2}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_227: All = All {code: 0xe3}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_228: All = All {code: 0xe4}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_229: All = All {code: 0xe5}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_230: All = All {code: 0xe6}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_231: All = All {code: 0xe7}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_232: All = All {code: 0xe8}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_233: All = All {code: 0xe9}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_234: All = All {code: 0xea}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_235: All = All {code: 0xeb}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_236: All = All {code: 0xec}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_237: All = All {code: 0xed}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_238: All = All {code: 0xee}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_239: All = All {code: 0xef}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_240: All = All {code: 0xf0}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_241: All = All {code: 0xf1}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_242: All = All {code: 0xf2}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_243: All = All {code: 0xf3}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_244: All = All {code: 0xf4}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_245: All = All {code: 0xf5}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_246: All = All {code: 0xf6}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_247: All = All {code: 0xf7}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_248: All = All {code: 0xf8}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_249: All = All {code: 0xf9}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_250: All = All {code: 0xfa}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_251: All = All {code: 0xfb}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_252: All = All {code: 0xfc}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_253: All = All {code: 0xfd}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_RETURN_254: All = All {code: 0xfe}; - /// Synonym for OP_RETURN + /// Synonym for OP_RETURN. pub const OP_INVALIDOPCODE: All = All {code: 0xff}; } @@ -632,10 +632,10 @@ impl fmt::Debug for All { all::OP_NUMEQUAL => write!(f, "NUMEQUAL"), all::OP_NUMEQUALVERIFY => write!(f, "NUMEQUALVERIFY"), all::OP_NUMNOTEQUAL => write!(f, "NUMNOTEQUAL"), - all::OP_LESSTHAN => write!(f, "LESSTHAN"), - all::OP_GREATERTHAN => write!(f, "GREATERTHAN"), - all::OP_LESSTHANOREQUAL => write!(f, "LESSTHANOREQUAL"), - all::OP_GREATERTHANOREQUAL => write!(f, "GREATERTHANOREQUAL"), + all::OP_LESSTHAN => write!(f, "LESSTHAN"), + all::OP_GREATERTHAN => write!(f, "GREATERTHAN"), + all::OP_LESSTHANOREQUAL => write!(f, "LESSTHANOREQUAL"), + all::OP_GREATERTHANOREQUAL => write!(f, "GREATERTHANOREQUAL"), all::OP_MIN => write!(f, "MIN"), all::OP_MAX => write!(f, "MAX"), all::OP_WITHIN => write!(f, "WITHIN"), @@ -659,19 +659,20 @@ impl fmt::Debug for All { } } - -/// Classification context for the opcode. Some opcodes like `OP_RESERVED` -/// abort the script in [`ClassifyContext::Legacy`] context, but will act as OP_SUCCESS in tapscript +/// Classification context for the opcode. +/// +/// Some opcodes like [`all::OP_RESERVED`] abort the script in `ClassifyContext::Legacy` context, +/// but will act as `OP_SUCCESSx` in `ClassifyContext::TapScript` (see BIP342 for full list). #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ClassifyContext { - /// Opcode used in tapscript context + /// Opcode used in tapscript context. TapScript, - /// Opcode used in legacy context + /// Opcode used in legacy context. Legacy, } impl All { - /// Classifies an Opcode into a broad class + /// Classifies an Opcode into a broad class. #[inline] pub fn classify(self, ctx: ClassifyContext) -> Class { use self::all::*; @@ -747,7 +748,6 @@ impl From for All { } } - display_from_debug!(All); #[cfg(feature = "serde")] @@ -761,31 +761,31 @@ impl serde::Serialize for All { } } -/// Empty stack is also FALSE +/// Empty stack is also FALSE. pub static OP_FALSE: All = all::OP_PUSHBYTES_0; -/// Number 1 is also TRUE +/// Number 1 is also TRUE. pub static OP_TRUE: All = all::OP_PUSHNUM_1; -/// previously called OP_NOP2 +/// Previously called OP_NOP2. pub static OP_NOP2: All = all::OP_CLTV; -/// previously called OP_NOP3 +/// Previously called OP_NOP3. pub static OP_NOP3: All = all::OP_CSV; -/// Broad categories of opcodes with similar behavior +/// Broad categories of opcodes with similar behavior. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Class { - /// Pushes the given number onto the stack + /// Pushes the given number onto the stack. PushNum(i32), - /// Pushes the given number of bytes onto the stack + /// Pushes the given number of bytes onto the stack. PushBytes(u32), - /// Fails the script if executed + /// Fails the script if executed. ReturnOp, - /// Succeeds the script even if not executed + /// Succeeds the script even if not executed. SuccessOp, - /// Fails the script even if not executed + /// Fails the script even if not executed. IllegalOp, - /// Does nothing + /// Does nothing. NoOp, - /// Any opcode not covered above + /// Any opcode not covered above. Ordinary(Ordinary) } @@ -830,7 +830,7 @@ macro_rules! ordinary_opcode { ); } -// "Ordinary" opcodes -- should be 60 of these +// "Ordinary" opcodes -- should be 61 of these ordinary_opcode! { // pushdata OP_PUSHDATA1, OP_PUSHDATA2, OP_PUSHDATA4, @@ -860,8 +860,8 @@ impl Ordinary { /// Encode as a byte #[inline] pub fn into_u8(self) -> u8 { - self as u8 - } + self as u8 + } } #[cfg(test)] @@ -1163,4 +1163,3 @@ mod tests { assert_eq!(unique.len(), 256); } } - diff --git a/src/blockdata/script.rs b/src/blockdata/script.rs index 81d7568b8..5d7343cd6 100644 --- a/src/blockdata/script.rs +++ b/src/blockdata/script.rs @@ -27,6 +27,7 @@ use prelude::*; use io; use core::{fmt, default::Default}; +use core::ops::Index; #[cfg(feature = "serde")] use serde; @@ -37,7 +38,7 @@ use hashes::{Hash, hex}; use policy::DUST_RELAY_TX_FEE; #[cfg(feature="bitcoinconsensus")] use bitcoinconsensus; #[cfg(feature="bitcoinconsensus")] use core::convert::From; -#[cfg(feature="bitcoinconsensus")] use OutPoint; +use OutPoint; use util::key::PublicKey; use util::address::WitnessVersion; @@ -49,6 +50,18 @@ use schnorr::{TapTweak, TweakedPublicKey, UntweakedPublicKey}; #[derive(Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)] pub struct Script(Box<[u8]>); +impl Index for Script +where + [u8]: Index, +{ + type Output = <[u8] as Index>::Output; + + #[inline] + fn index(&self, index: I) -> &Self::Output { + &self.0[index] + } +} + impl AsRef<[u8]> for Script { fn as_ref(&self) -> &[u8] { &self.0 @@ -89,9 +102,8 @@ impl fmt::UpperHex for Script { impl hex::FromHex for Script { fn from_byte_iter(iter: I) -> Result - where I: Iterator> + - ExactSizeIterator + - DoubleEndedIterator, + where + I: Iterator> + ExactSizeIterator + DoubleEndedIterator, { Vec::from_byte_iter(iter).map(|v| Script(Box::<[u8]>::from(v))) } @@ -109,6 +121,18 @@ impl ::core::str::FromStr for Script { pub struct Builder(Vec, Option); display_from_debug!(Builder); +impl Index for Builder +where + Vec: Index, +{ + type Output = as Index>::Output; + + #[inline] + fn index(&self, index: I) -> &Self::Output { + &self.0[index] + } +} + /// Ways that a script might fail. Not everything is split up as /// much as it could be; patches welcome if more detailed errors /// would help you. @@ -122,30 +146,40 @@ pub enum Error { /// Tried to read an array off the stack as a number when it was more than 4 bytes NumericOverflow, /// Error validating the script with bitcoinconsensus library - #[cfg(feature = "bitcoinconsensus")] - #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] - BitcoinConsensus(bitcoinconsensus::Error), + BitcoinConsensus(BitcoinConsensusError), /// Can not find the spent output - #[cfg(feature = "bitcoinconsensus")] - #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] UnknownSpentOutput(OutPoint), /// Can not serialize the spending transaction - #[cfg(feature = "bitcoinconsensus")] - #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] SerializationError } +/// A [`bitcoinconsensus::Error`] alias. Exists to enable the compiler to ensure `bitcoinconsensus` +/// feature gating is correct. +#[cfg(feature = "bitcoinconsensus")] +#[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] +pub type BitcoinConsensusError = bitcoinconsensus::Error; + +/// Dummy error type used when `bitcoinconsensus` feature is not enabled. +#[cfg(not(feature = "bitcoinconsensus"))] +#[cfg_attr(docsrs, doc(cfg(not(feature = "bitcoinconsensus"))))] +#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Clone, Copy)] +pub struct BitcoinConsensusError { + _uninhabited: Uninhabited, +} + +#[cfg(not(feature = "bitcoinconsensus"))] +#[cfg_attr(docsrs, doc(cfg(not(feature = "bitcoinconsensus"))))] +#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Clone, Copy)] +enum Uninhabited {} + impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let str = match *self { Error::NonMinimalPush => "non-minimal datapush", Error::EarlyEndOfScript => "unexpected end of script", Error::NumericOverflow => "numeric overflow (number on stack larger than 4 bytes)", - #[cfg(feature="bitcoinconsensus")] Error::BitcoinConsensus(ref _n) => "bitcoinconsensus verification failed", - #[cfg(feature="bitcoinconsensus")] Error::UnknownSpentOutput(ref _point) => "unknown spent output Transaction::verify()", - #[cfg(feature="bitcoinconsensus")] Error::SerializationError => "can not serialize the spending transaction in Transaction::verify()", }; f.write_str(str) @@ -172,13 +206,11 @@ impl From for Error { } } -#[cfg(feature="bitcoinconsensus")] +#[cfg(feature = "bitcoinconsensus")] #[doc(hidden)] impl From for Error { fn from(err: bitcoinconsensus::Error) -> Error { - match err { - _ => Error::BitcoinConsensus(err) - } + Error::BitcoinConsensus(err) } } /// Helper to encode an integer in script format @@ -284,10 +316,10 @@ fn read_uint_iter(data: &mut ::core::slice::Iter<'_, u8>, size: usize) -> Result } impl Script { - /// Creates a new empty script + /// Creates a new empty script. pub fn new() -> Script { Script(vec![].into_boxed_slice()) } - /// Generates P2PK-type of scriptPubkey + /// Generates P2PK-type of scriptPubkey. pub fn new_p2pk(pubkey: &PublicKey) -> Script { Builder::new() .push_key(pubkey) @@ -295,7 +327,7 @@ impl Script { .into_script() } - /// Generates P2PKH-type of scriptPubkey + /// Generates P2PKH-type of scriptPubkey. pub fn new_p2pkh(pubkey_hash: &PubkeyHash) -> Script { Builder::new() .push_opcode(opcodes::all::OP_DUP) @@ -306,7 +338,7 @@ impl Script { .into_script() } - /// Generates P2SH-type of scriptPubkey with a given hash of the redeem script + /// Generates P2SH-type of scriptPubkey with a given hash of the redeem script. pub fn new_p2sh(script_hash: &ScriptHash) -> Script { Builder::new() .push_opcode(opcodes::all::OP_HASH160) @@ -315,24 +347,24 @@ impl Script { .into_script() } - /// Generates P2WPKH-type of scriptPubkey + /// Generates P2WPKH-type of scriptPubkey. #[deprecated(since = "0.28.0", note = "use Script::new_v0_p2wpkh method instead")] pub fn new_v0_wpkh(pubkey_hash: &WPubkeyHash) -> Script { Script::new_v0_p2wpkh(pubkey_hash) } - /// Generates P2WPKH-type of scriptPubkey + /// Generates P2WPKH-type of scriptPubkey. pub fn new_v0_p2wpkh(pubkey_hash: &WPubkeyHash) -> Script { Script::new_witness_program(WitnessVersion::V0, &pubkey_hash[..]) } - /// Generates P2WSH-type of scriptPubkey with a given hash of the redeem script + /// Generates P2WSH-type of scriptPubkey with a given hash of the redeem script. #[deprecated(since = "0.28.0", note = "use Script::new_v0_p2wsh method instead")] pub fn new_v0_wsh(script_hash: &WScriptHash) -> Script { Script::new_v0_p2wsh(script_hash) } - /// Generates P2WSH-type of scriptPubkey with a given hash of the redeem script + /// Generates P2WSH-type of scriptPubkey with a given hash of the redeem script. pub fn new_v0_p2wsh(script_hash: &WScriptHash) -> Script { Script::new_witness_program(WitnessVersion::V0, &script_hash[..]) } @@ -349,7 +381,7 @@ impl Script { Script::new_witness_program(WitnessVersion::V1, &output_key.serialize()) } - /// Generates P2WSH-type of scriptPubkey with a given hash of the redeem script + /// Generates P2WSH-type of scriptPubkey with a given hash of the redeem script. pub fn new_witness_program(version: WitnessVersion, program: &[u8]) -> Script { Builder::new() .push_opcode(version.into()) @@ -357,7 +389,7 @@ impl Script { .into_script() } - /// Generates OP_RETURN-type of scriptPubkey for a given data + /// Generates OP_RETURN-type of scriptPubkey for the given data. pub fn new_op_return(data: &[u8]) -> Script { Builder::new() .push_opcode(opcodes::all::OP_RETURN) @@ -365,44 +397,44 @@ impl Script { .into_script() } - /// Returns 160-bit hash of the script + /// Returns 160-bit hash of the script. pub fn script_hash(&self) -> ScriptHash { ScriptHash::hash(self.as_bytes()) } - /// Returns 256-bit hash of the script for P2WSH outputs + /// Returns 256-bit hash of the script for P2WSH outputs. pub fn wscript_hash(&self) -> WScriptHash { WScriptHash::hash(self.as_bytes()) } - /// The length in bytes of the script + /// Returns the length in bytes of the script. pub fn len(&self) -> usize { self.0.len() } - /// Whether the script is the empty script + /// Returns whether the script is the empty script. pub fn is_empty(&self) -> bool { self.0.is_empty() } - /// Returns the script data + /// Returns the script data as a byte slice. pub fn as_bytes(&self) -> &[u8] { &*self.0 } - /// Returns a copy of the script data + /// Returns a copy of the script data. pub fn to_bytes(&self) -> Vec { self.0.clone().into_vec() } - /// Convert the script into a byte vector + /// Converts the script into a byte vector. pub fn into_bytes(self) -> Vec { self.0.into_vec() } - /// Compute the P2SH output corresponding to this redeem script + /// Computes the P2SH output corresponding to this redeem script. pub fn to_p2sh(&self) -> Script { Script::new_p2sh(&self.script_hash()) } - /// Compute the P2WSH output corresponding to this witnessScript (aka the "witness redeem - /// script") + /// Computes the P2WSH output corresponding to this witnessScript (aka the "witness redeem + /// script"). pub fn to_v0_p2wsh(&self) -> Script { Script::new_v0_p2wsh(&self.wscript_hash()) } - /// Compute P2TR output with a given internal key and a single script spending path equal to the - /// current script, assuming that the script is a Tapscript + /// Computes P2TR output with a given internal key and a single script spending path equal to + /// the current script, assuming that the script is a Tapscript. #[inline] pub fn to_v1_p2tr(&self, secp: &Secp256k1, internal_key: UntweakedPublicKey) -> Script { let leaf_hash = TapLeafHash::from_script(&self, LeafVersion::TapScript); @@ -410,12 +442,13 @@ impl Script { Script::new_v1_p2tr(&secp, internal_key, Some(merkle_root)) } + /// Returns witness version of the script, if any, assuming the script is a `scriptPubkey`. #[inline] - fn witness_version(&self) -> Option { + pub fn witness_version(&self) -> Option { self.0.get(0).and_then(|opcode| WitnessVersion::from_opcode(opcodes::All::from(*opcode)).ok()) } - /// Checks whether a script pubkey is a p2sh output + /// Checks whether a script pubkey is a P2SH output. #[inline] pub fn is_p2sh(&self) -> bool { self.0.len() == 23 @@ -424,7 +457,7 @@ impl Script { && self.0[22] == opcodes::all::OP_EQUAL.into_u8() } - /// Checks whether a script pubkey is a p2pkh output + /// Checks whether a script pubkey is a P2PKH output. #[inline] pub fn is_p2pkh(&self) -> bool { self.0.len() == 25 @@ -435,7 +468,7 @@ impl Script { && self.0[24] == opcodes::all::OP_CHECKSIG.into_u8() } - /// Checks whether a script pubkey is a p2pk output + /// Checks whether a script pubkey is a P2PK output. #[inline] pub fn is_p2pk(&self) -> bool { match self.len() { @@ -471,7 +504,7 @@ impl Script { && script_len - 2 == push_opbyte as usize } - /// Checks whether a script pubkey is a p2wsh output + /// Checks whether a script pubkey is a P2WSH output. #[inline] pub fn is_v0_p2wsh(&self) -> bool { self.0.len() == 34 @@ -479,7 +512,7 @@ impl Script { && self.0[1] == opcodes::all::OP_PUSHBYTES_32.into_u8() } - /// Checks whether a script pubkey is a p2wpkh output + /// Checks whether a script pubkey is a P2WPKH output. #[inline] pub fn is_v0_p2wpkh(&self) -> bool { self.0.len() == 22 @@ -487,7 +520,7 @@ impl Script { && self.0[1] == opcodes::all::OP_PUSHBYTES_20.into_u8() } - /// Checks whether a script pubkey is a P2TR output + /// Checks whether a script pubkey is a P2TR output. #[inline] pub fn is_v1_p2tr(&self) -> bool { self.0.len() == 34 @@ -495,7 +528,7 @@ impl Script { && self.0[1] == opcodes::all::OP_PUSHBYTES_32.into_u8() } - /// Check if this is an OP_RETURN output + /// Check if this is an OP_RETURN output. pub fn is_op_return (&self) -> bool { match self.0.first() { Some(b) => *b == opcodes::all::OP_RETURN.into_u8(), @@ -503,7 +536,7 @@ impl Script { } } - /// Whether a script can be proven to have no satisfying input + /// Checks whether a script can be proven to have no satisfying input. pub fn is_provably_unspendable(&self) -> bool { use blockdata::opcodes::Class::{ReturnOp, IllegalOp}; @@ -518,13 +551,12 @@ impl Script { } } - /// Gets the minimum value an output with this script should have in order to be - /// broadcastable on today's bitcoin network. + /// Returns the minimum value an output with this script should have in order to be + /// broadcastable on today's Bitcoin network. pub fn dust_value(&self) -> ::Amount { // This must never be lower than Bitcoin Core's GetDustThreshold() (as of v0.21) as it may - // otherwise allow users to create transactions which likely can never be - // broadcasted/confirmed. - let sats = DUST_RELAY_TX_FEE as u64 / 1000 * // The default dust relay fee is 3000 satoshi/kB (ie 3 sat/vByte) + // otherwise allow users to create transactions which likely can never be broadcast/confirmed. + let sats = DUST_RELAY_TX_FEE as u64 / 1000 * // The default dust relay fee is 3000 satoshi/kB (i.e. 3 sat/vByte) if self.is_op_return() { 0 } else if self.is_witness_program() { @@ -540,12 +572,14 @@ impl Script { ::Amount::from_sat(sats) } - /// Iterate over the script in the form of `Instruction`s, which are an enum covering - /// opcodes, datapushes and errors. At most one error will be returned and then the - /// iterator will end. To instead iterate over the script as sequence of bytes, treat - /// it as a slice using `script[..]` or convert it to a vector using `into_bytes()`. + /// Iterates over the script in the form of `Instruction`s, which are an enum covering opcodes, + /// datapushes and errors. + /// + /// At most one error will be returned and then the iterator will end. To instead iterate over + /// the script as sequence of bytes, treat it as a slice using `script[..]` or convert it to a + /// vector using `into_bytes()`. /// - /// To force minimal pushes, use [Self::instructions_minimal]. + /// To force minimal pushes, use [`Self::instructions_minimal`]. pub fn instructions(&self) -> Instructions { Instructions { data: &self.0[..], @@ -553,8 +587,7 @@ impl Script { } } - /// Iterate over the script in the form of `Instruction`s while enforcing - /// minimal pushes. + /// Iterates over the script in the form of `Instruction`s while enforcing minimal pushes. pub fn instructions_minimal(&self) -> Instructions { Instructions { data: &self.0[..], @@ -562,26 +595,27 @@ impl Script { } } - /// Shorthand for [Self::verify_with_flags] with flag [bitcoinconsensus::VERIFY_ALL] + /// Shorthand for [`Self::verify_with_flags`] with flag [bitcoinconsensus::VERIFY_ALL]. #[cfg(feature="bitcoinconsensus")] #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] pub fn verify (&self, index: usize, amount: ::Amount, spending: &[u8]) -> Result<(), Error> { self.verify_with_flags(index, amount, spending, ::bitcoinconsensus::VERIFY_ALL) } - /// Verify spend of an input script + /// Verifies spend of an input script. + /// /// # Parameters - /// * `index` - the input index in spending which is spending this transaction - /// * `amount` - the amount this script guards - /// * `spending` - the transaction that attempts to spend the output holding this script - /// * `flags` - verification flags, see [bitcoinconsensus::VERIFY_ALL] and similar + /// * `index` - The input index in spending which is spending this transaction. + /// * `amount` - The amount this script guards. + /// * `spending` - The transaction that attempts to spend the output holding this script. + /// * `flags` - Verification flags, see [`bitcoinconsensus::VERIFY_ALL`] and similar. #[cfg(feature="bitcoinconsensus")] #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] pub fn verify_with_flags>(&self, index: usize, amount: ::Amount, spending: &[u8], flags: F) -> Result<(), Error> { Ok(bitcoinconsensus::verify_with_flags (&self.0[..], amount.as_sat(), spending, index, flags.into())?) } - /// Write the assembly decoding of the script bytes to the formatter. + /// Writes the assembly decoding of the script bytes to the formatter. pub fn bytes_to_asm_fmt(script: &[u8], f: &mut dyn fmt::Write) -> fmt::Result { // This has to be a macro because it needs to break the loop macro_rules! read_push_data_len { @@ -661,41 +695,39 @@ impl Script { Ok(()) } - /// Write the assembly decoding of the script to the formatter. + /// Writes the assembly decoding of the script to the formatter. pub fn fmt_asm(&self, f: &mut dyn fmt::Write) -> fmt::Result { Script::bytes_to_asm_fmt(self.as_ref(), f) } - /// Create an assembly decoding of the script in the given byte slice. + /// Creates an assembly decoding of the script in the given byte slice. pub fn bytes_to_asm(script: &[u8]) -> String { let mut buf = String::new(); Script::bytes_to_asm_fmt(script, &mut buf).unwrap(); buf } - /// Get the assembly decoding of the script. + /// Returns the assembly decoding of the script. pub fn asm(&self) -> String { Script::bytes_to_asm(self.as_ref()) } } -/// Creates a new script from an existing vector +/// Creates a new script from an existing vector. impl From> for Script { fn from(v: Vec) -> Script { Script(v.into_boxed_slice()) } } -impl_index_newtype!(Script, u8); - -/// A "parsed opcode" which allows iterating over a Script in a more sensible way +/// A "parsed opcode" which allows iterating over a [`Script`] in a more sensible way. #[derive(Debug, PartialEq, Eq, Clone)] pub enum Instruction<'a> { - /// Push a bunch of data + /// Push a bunch of data. PushBytes(&'a [u8]), - /// Some non-push opcode + /// Some non-push opcode. Op(opcodes::All), } -/// Iterator over a script returning parsed opcodes +/// Iterator over a script returning parsed opcodes. pub struct Instructions<'a> { data: &'a [u8], enforce_minimal: bool, @@ -715,7 +747,7 @@ impl<'a> Iterator for Instructions<'a> { opcodes::Class::PushBytes(n) => { let n = n as usize; if self.data.len() < n + 1 { - self.data = &[]; // Kill iterator so that it does not return an infinite stream of errors + self.data = &[]; // Kill iterator so that it does not return an infinite stream of errors return Some(Err(Error::EarlyEndOfScript)); } if self.enforce_minimal { @@ -813,15 +845,15 @@ impl<'a> Iterator for Instructions<'a> { impl<'a> ::core::iter::FusedIterator for Instructions<'a> {} impl Builder { - /// Creates a new empty script + /// Creates a new empty script. pub fn new() -> Self { Builder(vec![], None) } - /// The length in bytes of the script + /// Returns the length in bytes of the script. pub fn len(&self) -> usize { self.0.len() } - /// Whether the script is the empty script + /// Checks whether the script is the empty script. pub fn is_empty(&self) -> bool { self.0.is_empty() } /// Adds instructions to push an integer onto the stack. Integers are @@ -849,7 +881,7 @@ impl Builder { self.push_slice(&build_scriptint(data)) } - /// Adds instructions to push some arbitrary data onto the stack + /// Adds instructions to push some arbitrary data onto the stack. pub fn push_slice(mut self, data: &[u8]) -> Builder { // Start with a PUSH opcode match data.len() as u64 { @@ -878,7 +910,7 @@ impl Builder { self } - /// Pushes a public key + /// Adds instructions to push a public key onto the stack. pub fn push_key(self, key: &PublicKey) -> Builder { if key.compressed { self.push_slice(&key.inner.serialize()[..]) @@ -887,7 +919,7 @@ impl Builder { } } - /// Adds a single opcode to the script + /// Adds a single opcode to the script. pub fn push_opcode(mut self, data: opcodes::All) -> Builder { self.0.push(data.into_u8()); self.1 = Some(data); @@ -896,7 +928,7 @@ impl Builder { /// Adds an `OP_VERIFY` to the script, unless the most-recently-added /// opcode has an alternate `VERIFY` form, in which case that opcode - /// is replaced. e.g. `OP_CHECKSIG` will become `OP_CHECKSIGVERIFY`. + /// is replaced e.g., `OP_CHECKSIG` will become `OP_CHECKSIGVERIFY`. pub fn push_verify(mut self) -> Builder { match self.1 { Some(opcodes::all::OP_EQUAL) => { @@ -919,18 +951,17 @@ impl Builder { } } - /// Converts the `Builder` into an unmodifiable `Script` + /// Converts the `Builder` into an unmodifiable `Script`. pub fn into_script(self) -> Script { Script(self.0.into_boxed_slice()) } } -/// Adds an individual opcode to the script impl Default for Builder { fn default() -> Builder { Builder::new() } } -/// Creates a new script from an existing vector +/// Creates a new builder from an existing vector. impl From> for Builder { fn from(v: Vec) -> Builder { let script = Script(v.into_boxed_slice()); @@ -942,13 +973,12 @@ impl From> for Builder { } } -impl_index_newtype!(Builder, u8); - #[cfg(feature = "serde")] #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] impl<'de> serde::Deserialize<'de> for Script { fn deserialize(deserializer: D) -> Result - where D: serde::Deserializer<'de>, + where + D: serde::Deserializer<'de>, { use core::fmt::Formatter; use hashes::hex::FromHex; @@ -964,20 +994,23 @@ impl<'de> serde::Deserialize<'de> for Script { } fn visit_str(self, v: &str) -> Result - where E: serde::de::Error, + where + E: serde::de::Error, { let v = Vec::from_hex(v).map_err(E::custom)?; Ok(Script::from(v)) } fn visit_borrowed_str(self, v: &'de str) -> Result - where E: serde::de::Error, + where + E: serde::de::Error, { self.visit_str(v) } fn visit_string(self, v: String) -> Result - where E: serde::de::Error, + where + E: serde::de::Error, { self.visit_str(&v) } @@ -994,7 +1027,8 @@ impl<'de> serde::Deserialize<'de> for Script { } fn visit_bytes(self, v: &[u8]) -> Result - where E: serde::de::Error, + where + E: serde::de::Error, { Ok(Script::from(v.to_vec())) } @@ -1020,13 +1054,9 @@ impl serde::Serialize for Script { } } -// Network serialization impl Encodable for Script { #[inline] - fn consensus_encode( - &self, - s: S, - ) -> Result { + fn consensus_encode(&self, s: S) -> Result { self.0.consensus_encode(s) } } @@ -1354,38 +1384,19 @@ mod test { let slop_v_nonmin: Result, Error> = nonminimal.instructions().collect(); let slop_v_nonmin_alt: Result, Error> = nonminimal_alt.instructions().collect(); - assert_eq!( - v_zero.unwrap(), - vec![ - Instruction::PushBytes(&[]), - ] - ); - assert_eq!( - v_zeropush.unwrap(), - vec![ - Instruction::PushBytes(&[0]), - ] - ); + assert_eq!(v_zero.unwrap(), vec![Instruction::PushBytes(&[])]); + assert_eq!(v_zeropush.unwrap(), vec![Instruction::PushBytes(&[0])]); assert_eq!( v_min.clone().unwrap(), - vec![ - Instruction::PushBytes(&[105]), - Instruction::Op(opcodes::OP_NOP3), - ] + vec![Instruction::PushBytes(&[105]), Instruction::Op(opcodes::OP_NOP3)] ); - assert_eq!( - v_nonmin.err().unwrap(), - Error::NonMinimalPush - ); + assert_eq!(v_nonmin.err().unwrap(), Error::NonMinimalPush); assert_eq!( v_nonmin_alt.clone().unwrap(), - vec![ - Instruction::PushBytes(&[105, 0]), - Instruction::Op(opcodes::OP_NOP3), - ] + vec![Instruction::PushBytes(&[105, 0]), Instruction::Op(opcodes::OP_NOP3)] ); assert_eq!(v_min.clone().unwrap(), slop_v_min.unwrap()); @@ -1395,7 +1406,7 @@ mod test { #[test] fn script_ord() { - let script_1 = Builder::new().push_slice(&[1,2,3,4]).into_script(); + let script_1 = Builder::new().push_slice(&[1, 2, 3, 4]).into_script(); let script_2 = Builder::new().push_int(10).into_script(); let script_3 = Builder::new().push_int(15).into_script(); let script_4 = Builder::new().push_opcode(opcodes::all::OP_RETURN).into_script(); @@ -1413,7 +1424,7 @@ mod test { } #[test] - #[cfg(feature="bitcoinconsensus")] + #[cfg(feature = "bitcoinconsensus")] fn test_bitcoinconsensus () { // a random segwit transaction from the blockchain using native segwit let spent = Builder::from(Vec::from_hex("0020701a8d401c84fb13e6baf169d59684e17abd9fa216c8cc5b9fc63d622ff8c58d").unwrap()).into_script(); diff --git a/src/blockdata/transaction.rs b/src/blockdata/transaction.rs index 244356387..8a34d05c0 100644 --- a/src/blockdata/transaction.rs +++ b/src/blockdata/transaction.rs @@ -42,30 +42,37 @@ use consensus::encode::MAX_VEC_SIZE; use hash_types::{SigHash, Txid, Wtxid}; use VarInt; -/// A reference to a transaction output +#[cfg(doc)] +use util::sighash::SchnorrSigHashType; + +/// Used for signature hash for invalid use of SIGHASH_SINGLE. +const UINT256_ONE: [u8; 32] = [ + 1, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0 +]; + +/// A reference to a transaction output. #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] pub struct OutPoint { - /// The referenced transaction's txid + /// The referenced transaction's txid. pub txid: Txid, - /// The index of the referenced output in its transaction's vout + /// The index of the referenced output in its transaction's vout. pub vout: u32, } serde_struct_human_string_impl!(OutPoint, "an OutPoint", txid, vout); impl OutPoint { - /// Create a new [OutPoint]. + /// Creates a new [`OutPoint`]. #[inline] pub fn new(txid: Txid, vout: u32) -> OutPoint { - OutPoint { - txid, - vout, - } + OutPoint { txid, vout } } /// Creates a "null" `OutPoint`. /// - /// This value is used for coinbase transactions because they don't have - /// any previous outputs. + /// This value is used for coinbase transactions because they don't have any previous outputs. #[inline] pub fn null() -> OutPoint { OutPoint { @@ -86,7 +93,7 @@ impl OutPoint { /// let tx = &block.txdata[0]; /// /// // Coinbase transactions don't have any previous output. - /// assert_eq!(tx.input[0].previous_output.is_null(), true); + /// assert!(tx.input[0].previous_output.is_null()); /// ``` #[inline] pub fn is_null(&self) -> bool { @@ -135,7 +142,7 @@ impl fmt::Display for ParseOutPointError { #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] -impl error::Error for ParseOutPointError { +impl error::Error for ParseOutPointError { fn cause(&self) -> Option<&dyn error::Error> { match *self { ParseOutPointError::Txid(ref e) => Some(e), @@ -146,7 +153,7 @@ impl error::Error for ParseOutPointError { } /// Parses a string-encoded transaction index (vout). -/// It does not permit leading zeroes or non-digit characters. +/// Does not permit leading zeroes or non-digit characters. fn parse_vout(s: &str) -> Result { if s.len() > 1 { let first = s.chars().next().unwrap(); @@ -183,10 +190,10 @@ impl ::core::str::FromStr for OutPoint { #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct TxIn { - /// The reference to the previous output that is being used an an input + /// The reference to the previous output that is being used an an input. pub previous_output: OutPoint, /// The script which pushes values on the stack which will cause - /// the referenced output's script to accept + /// the referenced output's script to be accepted. pub script_sig: Script, /// The sequence number, which suggests to miners which of two /// conflicting transactions should be preferred, or 0xFFFFFFFF @@ -216,13 +223,13 @@ impl Default for TxIn { #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct TxOut { - /// The value of the output, in satoshis + /// The value of the output, in satoshis. pub value: u64, - /// The script which must satisfy for the output to be spent + /// The script which must be satisfied for the output to be spent. pub script_pubkey: Script } -// This is used as a "null txout" in consensus signing code +// This is used as a "null txout" in consensus signing code. impl Default for TxOut { fn default() -> TxOut { TxOut { value: 0xffffffffffffffff, script_pubkey: Script::new() } @@ -264,12 +271,11 @@ impl Default for TxOut { pub struct Transaction { /// The protocol version, is currently expected to be 1 or 2 (BIP 68). pub version: i32, - /// Block number before which this transaction is valid, or 0 for - /// valid immediately. + /// Block number before which this transaction is valid, or 0 for valid immediately. pub lock_time: u32, - /// List of inputs + /// List of transaction inputs. pub input: Vec, - /// List of outputs + /// List of transaction outputs. pub output: Vec, } @@ -310,20 +316,27 @@ impl Transaction { } /// Encodes the signing data from which a signature hash for a given input index with a given - /// sighash flag can be computed. To actually produce a scriptSig, this hash needs to be run - /// through an ECDSA signer, the SigHashType appended to the resulting sig, and a script - /// written around this, but this is the general (and hard) part. + /// sighash flag can be computed. + /// + /// To actually produce a scriptSig, this hash needs to be run through an ECDSA signer, the + /// [`EcdsaSigHashType`] appended to the resulting sig, and a script written around this, but + /// this is the general (and hard) part. /// - /// The `sighash_type` supports arbitrary `u32` value, instead of just [`SigHashType`], - /// because internally 4 bytes are being hashed, even though only lowest byte - /// is appended to signature in a transaction. + /// The `sighash_type` supports an arbitrary `u32` value, instead of just [`EcdsaSigHashType`], + /// because internally 4 bytes are being hashed, even though only the lowest byte is appended to + /// signature in a transaction. /// - /// *Warning* This does NOT attempt to support OP_CODESEPARATOR. In general this would require - /// evaluating `script_pubkey` to determine which separators get evaluated and which don't, - /// which we don't have the information to determine. + /// # Warning + /// + /// - Does NOT attempt to support OP_CODESEPARATOR. In general this would require evaluating + /// `script_pubkey` to determine which separators get evaluated and which don't, which we don't + /// have the information to determine. + /// - Does NOT handle the sighash single bug, you should either handle that manually or use + /// [`Self::signature_hash()`] instead. /// /// # Panics - /// Panics if `input_index` is greater than or equal to `self.input.len()` + /// + /// If `input_index` is out of bounds (greater than or equal to `self.input.len()`). pub fn encode_signing_data_to>( &self, mut writer: Write, @@ -331,20 +344,20 @@ impl Transaction { script_pubkey: &Script, sighash_type: U, ) -> Result<(), encode::Error> { - let sighash_type : u32 = sighash_type.into(); + let sighash_type: u32 = sighash_type.into(); assert!(input_index < self.input.len()); // Panic on OOB - let (sighash, anyone_can_pay) = EcdsaSigHashType::from_u32_consensus(sighash_type).split_anyonecanpay_flag(); - - // Special-case sighash_single bug because this is easy enough. - if sighash == EcdsaSigHashType::Single && input_index >= self.output.len() { - writer.write_all(&[1, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0])?; - return Ok(()); + if self.is_invalid_use_of_sighash_single(sighash_type, input_index) { + // We cannot correctly handle the SIGHASH_SINGLE bug here because usage of this function + // will result in the data written to the writer being hashed, however the correct + // handling of the SIGHASH_SINGLE bug is to return the 'one array' - either implement + // this behaviour manually or use `signature_hash()`. + writer.write(b"[not a transaction] SIGHASH_SINGLE bug")?; + return Ok(()) } + let (sighash, anyone_can_pay) = EcdsaSigHashType::from_consensus(sighash_type).split_anyonecanpay_flag(); + // Build tx to sign let mut tx = Transaction { version: self.version, @@ -392,61 +405,109 @@ impl Transaction { } /// Computes a signature hash for a given input index with a given sighash flag. - /// To actually produce a scriptSig, this hash needs to be run through an - /// ECDSA signer, the SigHashType appended to the resulting sig, and a - /// script written around this, but this is the general (and hard) part. /// - /// *Warning* This does NOT attempt to support OP_CODESEPARATOR. In general - /// this would require evaluating `script_pubkey` to determine which separators - /// get evaluated and which don't, which we don't have the information to - /// determine. + /// To actually produce a scriptSig, this hash needs to be run through an ECDSA signer, the + /// [`EcdsaSigHashType`] appended to the resulting sig, and a script written around this, but + /// this is the general (and hard) part. + /// + /// The `sighash_type` supports an arbitrary `u32` value, instead of just [`EcdsaSigHashType`], + /// because internally 4 bytes are being hashed, even though only the lowest byte is appended to + /// signature in a transaction. + /// + /// This function correctly handles the sighash single bug by returning the 'one array'. The + /// sighash single bug becomes exploitable when one tries to sign a transaction with + /// `SIGHASH_SINGLE` and there is not a corresponding output with the same index as the input. + /// + /// # Warning + /// + /// Does NOT attempt to support OP_CODESEPARATOR. In general this would require evaluating + /// `script_pubkey` to determine which separators get evaluated and which don't, which we don't + /// have the information to determine. /// /// # Panics - /// Panics if `input_index` is greater than or equal to `self.input.len()` /// + /// If `input_index` is out of bounds (greater than or equal to `self.input.len()`). pub fn signature_hash( &self, input_index: usize, script_pubkey: &Script, sighash_u32: u32 ) -> SigHash { + if self.is_invalid_use_of_sighash_single(sighash_u32, input_index) { + return SigHash::from_slice(&UINT256_ONE).expect("const-size array"); + } + let mut engine = SigHash::engine(); self.encode_signing_data_to(&mut engine, input_index, script_pubkey, sighash_u32) .expect("engines don't error"); SigHash::from_engine(engine) } - /// Gets the "weight" of this transaction, as defined by BIP141. For transactions with an empty - /// witness, this is simply the consensus-serialized size times 4. For transactions with a - /// witness, this is the non-witness consensus-serialized size multiplied by 3 plus the - /// with-witness consensus-serialized size. + fn is_invalid_use_of_sighash_single(&self, sighash: u32, input_index: usize) -> bool { + let ty = EcdsaSigHashType::from_consensus(sighash); + ty == EcdsaSigHashType::Single && input_index >= self.output.len() + } + + /// Returns the "weight" of this transaction, as defined by BIP141. #[inline] + #[deprecated(since = "0.28.0", note = "Please use `transaction::weight` instead.")] pub fn get_weight(&self) -> usize { - self.get_scaled_size(WITNESS_SCALE_FACTOR) + self.weight() } - /// Gets the regular byte-wise consensus-serialized size of this transaction. + /// Returns the "weight" of this transaction, as defined by BIP141. + /// + /// For transactions with an empty witness, this is simply the consensus-serialized size times + /// four. For transactions with a witness, this is the non-witness consensus-serialized size + /// multiplied by three plus the with-witness consensus-serialized size. #[inline] + pub fn weight(&self) -> usize { + self.scaled_size(WITNESS_SCALE_FACTOR) + } + + /// Returns the regular byte-wise consensus-serialized size of this transaction. + #[inline] + #[deprecated(since = "0.28.0", note = "Please use `transaction::size` instead.")] pub fn get_size(&self) -> usize { - self.get_scaled_size(1) + self.size() } - /// Gets the "vsize" of this transaction. Will be `ceil(weight / 4.0)`. - /// Note this implements the virtual size as per [`bip141`], which is different - /// to what is implemented in Bitcoin Core. The computation should be the same - /// for any remotely sane transaction, and a standardness-rule-correct version - /// is available in the [`policy`] module. + /// Returns the regular byte-wise consensus-serialized size of this transaction. + #[inline] + pub fn size(&self) -> usize { + self.scaled_size(1) + } + + /// Returns the "virtual size" (vsize) of this transaction. + #[inline] + #[deprecated(since = "0.28.0", note = "Please use `transaction::vsize` instead.")] + pub fn get_vsize(&self) -> usize { + self.vsize() + } + + /// Returns the "virtual size" (vsize) of this transaction. /// - /// [`bip141`]: https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki + /// Will be `ceil(weight / 4.0)`. Note this implements the virtual size as per [`BIP141`], which + /// is different to what is implemented in Bitcoin Core. The computation should be the same for + /// any remotely sane transaction, and a standardness-rule-correct version is available in the + /// [`policy`] module. + /// + /// [`BIP141`]: https://github.com/bitcoin/bips/blob/master/bip-0141.mediawiki /// [`policy`]: ../policy/mod.rs.html #[inline] - pub fn get_vsize(&self) -> usize { - let weight = self.get_weight(); + pub fn vsize(&self) -> usize { + let weight = self.weight(); (weight + WITNESS_SCALE_FACTOR - 1) / WITNESS_SCALE_FACTOR } - /// Gets the size of this transaction excluding the witness data. + /// Returns the size of this transaction excluding the witness data. + #[deprecated(since = "0.28.0", note = "Please use `transaction::strippedsize` instead.")] pub fn get_strippedsize(&self) -> usize { + self.strippedsize() + } + + /// Returns the size of this transaction excluding the witness data. + pub fn strippedsize(&self) -> usize { let mut input_size = 0; for input in &self.input { input_size += 32 + 4 + 4 + // outpoint (32+4) + nSequence @@ -471,8 +532,8 @@ impl Transaction { non_input_size + input_size } - /// Internal utility function for get_{size,weight} - fn get_scaled_size(&self, scale_factor: usize) -> usize { + /// Internal utility function for size/weight functions. + fn scaled_size(&self, scale_factor: usize) -> usize { let mut input_weight = 0; let mut inputs_with_witnesses = 0; for input in &self.input { @@ -506,20 +567,25 @@ impl Transaction { } } - /// Shorthand for [Self::verify_with_flags] with flag [bitcoinconsensus::VERIFY_ALL] + /// Shorthand for [`Self::verify_with_flags`] with flag [`bitcoinconsensus::VERIFY_ALL`]. #[cfg(feature="bitcoinconsensus")] #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] pub fn verify(&self, spent: S) -> Result<(), script::Error> - where S: FnMut(&OutPoint) -> Option { + where + S: FnMut(&OutPoint) -> Option + { self.verify_with_flags(spent, ::bitcoinconsensus::VERIFY_ALL) } - /// Verify that this transaction is able to spend its inputs - /// The lambda spent should not return the same TxOut twice! + /// Verify that this transaction is able to spend its inputs. + /// The `spent` closure should not return the same [`TxOut`] twice! #[cfg(feature="bitcoinconsensus")] #[cfg_attr(docsrs, doc(cfg(feature = "bitcoinconsensus")))] pub fn verify_with_flags(&self, mut spent: S, flags: F) -> Result<(), script::Error> - where S: FnMut(&OutPoint) -> Option, F : Into { + where + S: FnMut(&OutPoint) -> Option, + F: Into + { let tx = encode::serialize(&*self); let flags: u32 = flags.into(); for (idx, input) in self.input.iter().enumerate() { @@ -548,10 +614,7 @@ impl Transaction { impl_consensus_encoding!(TxOut, value, script_pubkey); impl Encodable for OutPoint { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let len = self.txid.consensus_encode(&mut s)?; Ok(len + self.vout.consensus_encode(s)?) } @@ -566,10 +629,7 @@ impl Decodable for OutPoint { } impl Encodable for TxIn { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += self.previous_output.consensus_encode(&mut s)?; len += self.script_sig.consensus_encode(&mut s)?; @@ -589,12 +649,11 @@ impl Decodable for TxIn { } impl Encodable for Transaction { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += self.version.consensus_encode(&mut s)?; + // To avoid serialization ambiguity, no inputs means we use BIP141 serialization (see + // `Transaction` docs for full explanation). let mut have_witness = self.input.is_empty(); for input in &self.input { if !input.witness.is_empty() { @@ -647,9 +706,7 @@ impl Decodable for Transaction { } } // We don't support anything else - x => { - Err(encode::Error::UnsupportedSegwitFlag(x)) - } + x => Err(encode::Error::UnsupportedSegwitFlag(x)), } // non-segwit } else { @@ -679,28 +736,29 @@ impl fmt::Display for NonStandardSigHashType { impl error::Error for NonStandardSigHashType {} /// Legacy Hashtype of an input's signature -#[deprecated(since="0.28.0", note="Please use [`EcdsaSigHashType`] instead")] +#[deprecated(since = "0.28.0", note = "Please use [`EcdsaSigHashType`] instead")] pub type SigHashType = EcdsaSigHashType; -/// Hashtype of an input's signature, encoded in the last byte of the signature -/// Fixed values so they can be casted as integer types for encoding -/// See also [`crate::SchnorrSigHashType`] +/// Hashtype of an input's signature, encoded in the last byte of the signature. +/// +/// Fixed values so they can be cast as integer types for encoding (see also +/// [`SchnorrSigHashType`]). #[derive(PartialEq, Eq, Debug, Copy, Clone)] pub enum EcdsaSigHashType { - /// 0x1: Sign all outputs + /// 0x1: Sign all outputs. All = 0x01, - /// 0x2: Sign no outputs --- anyone can choose the destination + /// 0x2: Sign no outputs --- anyone can choose the destination. None = 0x02, /// 0x3: Sign the output whose index matches this input's index. If none exists, /// sign the hash `0000000000000000000000000000000000000000000000000000000000000001`. /// (This rule is probably an unintentional C++ism, but it's consensus so we have /// to follow it.) Single = 0x03, - /// 0x81: Sign all outputs but only this input + /// 0x81: Sign all outputs but only this input. AllPlusAnyoneCanPay = 0x81, - /// 0x82: Sign no outputs and only this input + /// 0x82: Sign no outputs and only this input. NonePlusAnyoneCanPay = 0x82, - /// 0x83: Sign one output and only this input (see `Single` for what "one output" means) + /// 0x83: Sign one output and only this input (see `Single` for what "one output" means). SinglePlusAnyoneCanPay = 0x83 } serde_string_impl!(EcdsaSigHashType, "a EcdsaSigHashType data"); @@ -730,39 +788,40 @@ impl str::FromStr for EcdsaSigHashType { "SIGHASH_ALL|SIGHASH_ANYONECANPAY" => Ok(EcdsaSigHashType::AllPlusAnyoneCanPay), "SIGHASH_NONE|SIGHASH_ANYONECANPAY" => Ok(EcdsaSigHashType::NonePlusAnyoneCanPay), "SIGHASH_SINGLE|SIGHASH_ANYONECANPAY" => Ok(EcdsaSigHashType::SinglePlusAnyoneCanPay), - _ => Err(SigHashTypeParseError { string: s.to_owned() }), + _ => Err(SigHashTypeParseError { unrecognized: s.to_owned() }), } } } impl EcdsaSigHashType { - /// Break the sighash flag into the "real" sighash flag and the ANYONECANPAY boolean + /// Splits the sighash flag into the "real" sighash flag and the ANYONECANPAY boolean. pub(crate) fn split_anyonecanpay_flag(self) -> (EcdsaSigHashType, bool) { match self { - EcdsaSigHashType::All => (EcdsaSigHashType::All, false), - EcdsaSigHashType::None => (EcdsaSigHashType::None, false), - EcdsaSigHashType::Single => (EcdsaSigHashType::Single, false), - EcdsaSigHashType::AllPlusAnyoneCanPay => (EcdsaSigHashType::All, true), - EcdsaSigHashType::NonePlusAnyoneCanPay => (EcdsaSigHashType::None, true), - EcdsaSigHashType::SinglePlusAnyoneCanPay => (EcdsaSigHashType::Single, true) + EcdsaSigHashType::All => (EcdsaSigHashType::All, false), + EcdsaSigHashType::None => (EcdsaSigHashType::None, false), + EcdsaSigHashType::Single => (EcdsaSigHashType::Single, false), + EcdsaSigHashType::AllPlusAnyoneCanPay => (EcdsaSigHashType::All, true), + EcdsaSigHashType::NonePlusAnyoneCanPay => (EcdsaSigHashType::None, true), + EcdsaSigHashType::SinglePlusAnyoneCanPay => (EcdsaSigHashType::Single, true) } } - /// Reads a 4-byte uint32 as a sighash type. - #[deprecated(since="0.26.1", note="please use `from_u32_consensus` or `from_u32_standard` instead")] - pub fn from_u32(n: u32) -> EcdsaSigHashType { - Self::from_u32_consensus(n) + /// Creates a [`EcdsaSigHashType`] from a raw `u32`. + #[deprecated(since="0.28.0", note="please use `from_consensus`")] + pub fn from_u32_consensus(n: u32) -> EcdsaSigHashType { + EcdsaSigHashType::from_consensus(n) } - /// Reads a 4-byte uint32 as a sighash type. + /// Creates a [`EcdsaSigHashType`] from a raw `u32`. /// /// **Note**: this replicates consensus behaviour, for current standardness rules correctness - /// you probably want [Self::from_u32_standard]. + /// you probably want [`Self::from_standard`]. + /// /// This might cause unexpected behavior because it does not roundtrip. That is, - /// `EcdsaSigHashType::from_u32_consensus(n) as u32 != n` for non-standard values of - /// `n`. While verifying signatures, the user should retain the `n` and use it compute the - /// signature hash message. - pub fn from_u32_consensus(n: u32) -> EcdsaSigHashType { + /// `EcdsaSigHashType::from_consensus(n) as u32 != n` for non-standard values of `n`. While + /// verifying signatures, the user should retain the `n` and use it compute the signature hash + /// message. + pub fn from_consensus(n: u32) -> EcdsaSigHashType { // In Bitcoin Core, the SignatureHash function will mask the (int32) value with // 0x1f to (apparently) deactivate ACP when checking for SINGLE and NONE bits. // We however want to be matching also against on ACP-masked ALL, SINGLE, and NONE. @@ -782,9 +841,18 @@ impl EcdsaSigHashType { } } - /// Read a 4-byte uint32 as a standard sighash type, returning an error if the type - /// is non standard. + /// Creates a [`EcdsaSigHashType`] from a raw `u32`. + #[deprecated(since="0.28.0", note="please use `from_standard`")] pub fn from_u32_standard(n: u32) -> Result { + EcdsaSigHashType::from_standard(n) + } + + /// Creates a [`EcdsaSigHashType`] from a raw `u32`. + /// + /// # Errors + /// + /// If `n` is a non-standard sighash value. + pub fn from_standard(n: u32) -> Result { match n { // Standard sighashes, see https://github.com/bitcoin/bitcoin/blob/b805dbb0b9c90dadef0424e5b3bf86ac308e103e/src/script/interpreter.cpp#L189-L198 0x01 => Ok(EcdsaSigHashType::All), @@ -797,14 +865,10 @@ impl EcdsaSigHashType { } } - /// Converts to a u32 - pub fn as_u32(self) -> u32 { self as u32 } -} - -impl From for u32 { - fn from(t: EcdsaSigHashType) -> u32 { - t.as_u32() - } + /// Converts [`EcdsaSigHashType`] to a `u32` sighash flag. + /// + /// The returned value is guaranteed to be a valid according to standardness rules. + pub fn to_u32(self) -> u32 { self as u32 } } /// Error returned when parsing `SigHashType` fails. @@ -812,12 +876,13 @@ impl From for u32 { /// This is currently returned for unrecognized sighash strings. #[derive(Debug, Clone)] pub struct SigHashTypeParseError { - string: String, + /// The unrecognized string we attempted to parse. + pub unrecognized: String, } impl fmt::Display for SigHashTypeParseError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "can't recognize SIGHASH string '{}'", self.string) + write!(f, "Unrecognized SIGHASH string '{}'", self.unrecognized) } } @@ -827,7 +892,7 @@ impl ::std::error::Error for SigHashTypeParseError {} #[cfg(test)] mod tests { - use super::{OutPoint, ParseOutPointError, Transaction, TxIn, NonStandardSigHashType}; + use super::*; use core::str::FromStr; use blockdata::constants::WITNESS_SCALE_FACTOR; @@ -925,10 +990,10 @@ mod tests { "a6eab3c14ab5272a58a5ba91505ba1a4b6d7a3a9fcbd187b6cd99a7b6d548cb7".to_string()); assert_eq!(format!("{:x}", realtx.wtxid()), "a6eab3c14ab5272a58a5ba91505ba1a4b6d7a3a9fcbd187b6cd99a7b6d548cb7".to_string()); - assert_eq!(realtx.get_weight(), tx_bytes.len()*WITNESS_SCALE_FACTOR); - assert_eq!(realtx.get_size(), tx_bytes.len()); - assert_eq!(realtx.get_vsize(), tx_bytes.len()); - assert_eq!(realtx.get_strippedsize(), tx_bytes.len()); + assert_eq!(realtx.weight(), tx_bytes.len()*WITNESS_SCALE_FACTOR); + assert_eq!(realtx.size(), tx_bytes.len()); + assert_eq!(realtx.vsize(), tx_bytes.len()); + assert_eq!(realtx.strippedsize(), tx_bytes.len()); } #[test] @@ -960,23 +1025,23 @@ mod tests { assert_eq!(format!("{:x}", realtx.wtxid()), "80b7d8a82d5d5bf92905b06f2014dd699e03837ca172e3a59d51426ebbe3e7f5".to_string()); const EXPECTED_WEIGHT: usize = 442; - assert_eq!(realtx.get_weight(), EXPECTED_WEIGHT); - assert_eq!(realtx.get_size(), tx_bytes.len()); - assert_eq!(realtx.get_vsize(), 111); + assert_eq!(realtx.weight(), EXPECTED_WEIGHT); + assert_eq!(realtx.size(), tx_bytes.len()); + assert_eq!(realtx.vsize(), 111); // Since // size = stripped_size + witness_size // weight = WITNESS_SCALE_FACTOR * stripped_size + witness_size // then, // stripped_size = (weight - size) / (WITNESS_SCALE_FACTOR - 1) let expected_strippedsize = (EXPECTED_WEIGHT - tx_bytes.len()) / (WITNESS_SCALE_FACTOR - 1); - assert_eq!(realtx.get_strippedsize(), expected_strippedsize); + assert_eq!(realtx.strippedsize(), expected_strippedsize); // Construct a transaction without the witness data. let mut tx_without_witness = realtx.clone(); tx_without_witness.input.iter_mut().for_each(|input| input.witness.clear()); - assert_eq!(tx_without_witness.get_weight(), expected_strippedsize*WITNESS_SCALE_FACTOR); - assert_eq!(tx_without_witness.get_size(), expected_strippedsize); - assert_eq!(tx_without_witness.get_vsize(), expected_strippedsize); - assert_eq!(tx_without_witness.get_strippedsize(), expected_strippedsize); + assert_eq!(tx_without_witness.weight(), expected_strippedsize*WITNESS_SCALE_FACTOR); + assert_eq!(tx_without_witness.size(), expected_strippedsize); + assert_eq!(tx_without_witness.vsize(), expected_strippedsize); + assert_eq!(tx_without_witness.strippedsize(), expected_strippedsize); } #[test] @@ -1060,7 +1125,7 @@ mod tests { assert_eq!(format!("{:x}", tx.wtxid()), "d6ac4a5e61657c4c604dcde855a1db74ec6b3e54f32695d72c5e11c7761ea1b4"); assert_eq!(format!("{:x}", tx.txid()), "9652aa62b0e748caeec40c4cb7bc17c6792435cc3dfe447dd1ca24f912a1c6ec"); - assert_eq!(tx.get_weight(), 2718); + assert_eq!(tx.weight(), 2718); // non-segwit tx from my mempool let tx_bytes = Vec::from_hex( @@ -1085,25 +1150,6 @@ mod tests { serde_round_trip!(tx); } - fn run_test_sighash(tx: &str, script: &str, input_index: usize, hash_type: i32, expected_result: &str) { - let tx: Transaction = deserialize(&Vec::from_hex(tx).unwrap()[..]).unwrap(); - let script = Script::from(Vec::from_hex(script).unwrap()); - let mut raw_expected = Vec::from_hex(expected_result).unwrap(); - raw_expected.reverse(); - let expected_result = SigHash::from_slice(&raw_expected[..]).unwrap(); - - let actual_result = if raw_expected[0] % 2 == 0 { - // tx.signature_hash and cache.legacy_signature_hash are the same, this if helps to test - // both the codepaths without repeating the test code - tx.signature_hash(input_index, &script, hash_type as u32) - } else { - let cache = SigHashCache::new(&tx); - cache.legacy_signature_hash(input_index, &script, hash_type as u32).unwrap() - }; - - assert_eq!(actual_result, expected_result); - } - // Test decoding transaction `4be105f158ea44aec57bf12c5817d073a712ab131df6f37786872cfc70734188` // from testnet, which is the first BIP144-encoded transaction I encountered. #[test] @@ -1111,7 +1157,7 @@ mod tests { fn test_segwit_tx_decode() { let tx_bytes = Vec::from_hex("010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff3603da1b0e00045503bd5704c7dd8a0d0ced13bb5785010800000000000a636b706f6f6c122f4e696e6a61506f6f6c2f5345475749542fffffffff02b4e5a212000000001976a914876fbb82ec05caa6af7a3b5e5a983aae6c6cc6d688ac0000000000000000266a24aa21a9edf91c46b49eb8a29089980f02ee6b57e7d63d33b18b4fddac2bcd7db2a39837040120000000000000000000000000000000000000000000000000000000000000000000000000").unwrap(); let tx: Transaction = deserialize(&tx_bytes).unwrap(); - assert_eq!(tx.get_weight(), 780); + assert_eq!(tx.weight(), 780); serde_round_trip!(tx); let consensus_encoded = serialize(&tx); @@ -1120,12 +1166,14 @@ mod tests { #[test] fn test_sighashtype_fromstr_display() { - let sighashtypes = vec![("SIGHASH_ALL", EcdsaSigHashType::All), + let sighashtypes = vec![ + ("SIGHASH_ALL", EcdsaSigHashType::All), ("SIGHASH_NONE", EcdsaSigHashType::None), ("SIGHASH_SINGLE", EcdsaSigHashType::Single), ("SIGHASH_ALL|SIGHASH_ANYONECANPAY", EcdsaSigHashType::AllPlusAnyoneCanPay), ("SIGHASH_NONE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::NonePlusAnyoneCanPay), - ("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::SinglePlusAnyoneCanPay)]; + ("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", EcdsaSigHashType::SinglePlusAnyoneCanPay) + ]; for (s, sht) in sighashtypes { assert_eq!(sht.to_string(), s); assert_eq!(EcdsaSigHashType::from_str(s).unwrap(), sht); @@ -1143,7 +1191,7 @@ mod tests { "SigHash_NONE", ]; for s in sht_mistakes { - assert_eq!(EcdsaSigHashType::from_str(s).unwrap_err().to_string(), format!("can't recognize SIGHASH string '{}'", s)); + assert_eq!(EcdsaSigHashType::from_str(s).unwrap_err().to_string(), format!("Unrecognized SIGHASH string '{}'", s)); } } @@ -1152,12 +1200,53 @@ mod tests { fn test_sighashtype_standard() { let nonstandard_hashtype = 0x04; // This type is not well defined, by consensus it becomes ALL - assert_eq!(EcdsaSigHashType::from_u32(nonstandard_hashtype), EcdsaSigHashType::All); assert_eq!(EcdsaSigHashType::from_u32_consensus(nonstandard_hashtype), EcdsaSigHashType::All); // But it's policy-invalid to use it! assert_eq!(EcdsaSigHashType::from_u32_standard(nonstandard_hashtype), Err(NonStandardSigHashType(0x04))); } + #[test] + fn sighash_single_bug() { + const SIGHASH_SINGLE: u32 = 3; + // We need a tx with more inputs than outputs. + let mut input = Vec::new(); + input.push(TxIn::default()); + input.push(TxIn::default()); + let mut output = Vec::new(); + output.push(TxOut::default()); + + let tx = Transaction { + version: 1, + lock_time: 0, + input: input, + output: output, // TODO: Use Vec::from([TxOut]) once we bump MSRV. + }; + let script = Script::new(); + let got = tx.signature_hash(1, &script, SIGHASH_SINGLE); + let want = SigHash::from_slice(&UINT256_ONE).unwrap(); + + assert_eq!(got, want) + } + + fn run_test_sighash(tx: &str, script: &str, input_index: usize, hash_type: i32, expected_result: &str) { + let tx: Transaction = deserialize(&Vec::from_hex(tx).unwrap()[..]).unwrap(); + let script = Script::from(Vec::from_hex(script).unwrap()); + let mut raw_expected = Vec::from_hex(expected_result).unwrap(); + raw_expected.reverse(); + let expected_result = SigHash::from_slice(&raw_expected[..]).unwrap(); + + let actual_result = if raw_expected[0] % 2 == 0 { + // tx.signature_hash and cache.legacy_signature_hash are the same, this if helps to test + // both the codepaths without repeating the test code + tx.signature_hash(input_index, &script, hash_type as u32) + } else { + let cache = SigHashCache::new(&tx); + cache.legacy_signature_hash(input_index, &script, hash_type as u32).unwrap() + }; + + assert_eq!(actual_result, expected_result); + } + // These test vectors were stolen from libbtc, which is Copyright 2014 Jonas Schnelli MIT // They were transformed by replacing {...} with run_test_sighash(...), then the ones containing // OP_CODESEPARATOR in their pubkeys were removed @@ -1489,7 +1578,7 @@ mod tests { // test that we fail with repeated use of same input let mut double_spending = spending.clone(); let re_use = double_spending.input[0].clone(); - double_spending.input.push (re_use); + double_spending.input.push(re_use); assert!(double_spending.verify(|point: &OutPoint| { if let Some(tx) = spent2.remove(&point.txid) { @@ -1525,13 +1614,13 @@ mod benches { const SOME_TX: &'static str = "0100000001a15d57094aa7a21a28cb20b59aab8fc7d1149a3bdbcddba9c622e4f5f6a99ece010000006c493046022100f93bb0e7d8db7bd46e40132d1f8242026e045f03a0efe71bbb8e3f475e970d790221009337cd7f1f929f00cc6ff01f03729b069a7c21b59b1736ddfee5db5946c5da8c0121033b9b137ee87d5a812d6f506efdd37f0affa7ffc310711c06c7f3e097c9447c52ffffffff0100e1f505000000001976a9140389035a9225b3839e2bbf32d826a1e222031fd888ac00000000"; #[bench] - pub fn bench_transaction_get_size(bh: &mut Bencher) { + pub fn bench_transaction_size(bh: &mut Bencher) { let raw_tx = Vec::from_hex(SOME_TX).unwrap(); let mut tx: Transaction = deserialize(&raw_tx).unwrap(); bh.iter(|| { - black_box(black_box(&mut tx).get_size()); + black_box(black_box(&mut tx).size()); }); } diff --git a/src/blockdata/witness.rs b/src/blockdata/witness.rs index e31a7908b..3c700007d 100644 --- a/src/blockdata/witness.rs +++ b/src/blockdata/witness.rs @@ -202,10 +202,8 @@ impl Witness { self.last = self.content.len(); let element_len_varint = VarInt(new_element.len() as u64); let current_content_len = self.content.len(); - self.content.resize( - current_content_len + element_len_varint.len() + new_element.len(), - 0, - ); + self.content + .resize(current_content_len + element_len_varint.len() + new_element.len(), 0); let end_varint = current_content_len + element_len_varint.len(); element_len_varint .consensus_encode(&mut self.content[current_content_len..end_varint]) @@ -359,14 +357,9 @@ mod test { for (i, wit_el) in tx.input[0].witness.iter().enumerate() { assert_eq!(expected_wit[i], wit_el.to_hex()); } - assert_eq!( - expected_wit[1], - tx.input[0].witness.last().unwrap().to_hex() - ); - assert_eq!( - expected_wit[0], - tx.input[0].witness.second_to_last().unwrap().to_hex() - ); + assert_eq!(expected_wit[1], tx.input[0].witness.last().unwrap().to_hex()); + assert_eq!(expected_wit[0], tx.input[0].witness.second_to_last().unwrap().to_hex()); + let tx_bytes_back = serialize(&tx); assert_eq!(tx_bytes_back, tx_bytes); } diff --git a/src/consensus/encode.rs b/src/consensus/encode.rs index ec2aa198f..4c24748b8 100644 --- a/src/consensus/encode.rs +++ b/src/consensus/encode.rs @@ -61,7 +61,7 @@ pub enum Error { actual: u32, }, /// Tried to allocate an oversized vector - OversizedVectorAllocation{ + OversizedVectorAllocation { /// The capacity requested requested: usize, /// The maximum capacity @@ -164,9 +164,7 @@ pub fn deserialize(data: &[u8]) -> Result { /// Deserialize an object from a vector, but will not report an error if said deserialization /// doesn't consume the entire vector. -pub fn deserialize_partial( - data: &[u8], -) -> Result<(T, usize), Error> { +pub fn deserialize_partial(data: &[u8]) -> Result<(T, usize), Error> { let mut decoder = Cursor::new(data); let rv = Decodable::consensus_decode(&mut decoder)?; let consumed = decoder.position() as usize; @@ -333,8 +331,8 @@ pub struct VarInt(pub u64); pub struct CheckedData(pub Vec); // Primitive types -macro_rules! impl_int_encodable{ - ($ty:ident, $meth_dec:ident, $meth_enc:ident) => ( +macro_rules! impl_int_encodable { + ($ty:ident, $meth_dec:ident, $meth_enc:ident) => { impl Decodable for $ty { #[inline] fn consensus_decode(mut d: D) -> Result { @@ -343,15 +341,12 @@ macro_rules! impl_int_encodable{ } impl Encodable for $ty { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { s.$meth_enc(*self)?; Ok(mem::size_of::<$ty>()) } } - ) + } } impl_int_encodable!(u8, read_u8, emit_u8); @@ -439,7 +434,6 @@ impl Decodable for VarInt { } } - // Booleans impl Encodable for bool { #[inline] @@ -498,13 +492,10 @@ impl Decodable for Cow<'static, str> { // Arrays macro_rules! impl_array { - ( $size:expr ) => ( + ( $size:expr ) => { impl Encodable for [u8; $size] { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { s.emit_slice(&self[..])?; Ok(self.len()) } @@ -518,7 +509,7 @@ macro_rules! impl_array { Ok(ret) } } - ); + }; } impl_array!(2); @@ -554,10 +545,7 @@ macro_rules! impl_vec { ($type: ty) => { impl Encodable for Vec<$type> { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += VarInt(self.len() as u64).consensus_encode(&mut s)?; for c in self.iter() { @@ -571,8 +559,8 @@ macro_rules! impl_vec { fn consensus_decode(mut d: D) -> Result { let len = VarInt::consensus_decode(&mut d)?.0; let byte_size = (len as usize) - .checked_mul(mem::size_of::<$type>()) - .ok_or(self::Error::ParseFailed("Invalid length"))?; + .checked_mul(mem::size_of::<$type>()) + .ok_or(self::Error::ParseFailed("Invalid length"))?; if byte_size > MAX_VEC_SIZE { return Err(self::Error::OversizedVectorAllocation { requested: byte_size, max: MAX_VEC_SIZE }) } @@ -712,7 +700,7 @@ impl Encodable for sync::Arc { // Tuples macro_rules! tuple_encode { - ($($x:ident),*) => ( + ($($x:ident),*) => { impl <$($x: Encodable),*> Encodable for ($($x),*) { #[inline] #[allow(non_snake_case)] @@ -734,7 +722,7 @@ macro_rules! tuple_encode { Ok(($({let $x = Decodable::consensus_decode(&mut d)?; $x }),*)) } } - ); + }; } tuple_encode!(T0, T1); @@ -1033,7 +1021,7 @@ mod tests { let witness = vec![vec![0u8; 3_999_999]; 2]; let ser = serialize(&witness); let mut reader = io::Cursor::new(ser); - let err = Vec::>::consensus_decode(&mut reader); + let err = Vec::>::consensus_decode(&mut reader); assert!(err.is_err()); } diff --git a/src/hash_types.rs b/src/hash_types.rs index 253d993b5..19a59c9a0 100644 --- a/src/hash_types.rs +++ b/src/hash_types.rs @@ -60,9 +60,11 @@ hash_newtype!(FilterHeader, sha256d::Hash, 32, doc="Filter header, as defined in impl_hashencode!(Txid); impl_hashencode!(Wtxid); -impl_hashencode!(SigHash); impl_hashencode!(BlockHash); +impl_hashencode!(SigHash); + impl_hashencode!(TxMerkleNode); impl_hashencode!(WitnessMerkleNode); + impl_hashencode!(FilterHash); impl_hashencode!(FilterHeader); diff --git a/src/internal_macros.rs b/src/internal_macros.rs index daf78ab3e..e1bfe7525 100644 --- a/src/internal_macros.rs +++ b/src/internal_macros.rs @@ -93,59 +93,17 @@ macro_rules! impl_array_newtype { } } - impl_index_newtype!($thing, $ty); - } -} - -/// Implements standard indexing methods for a given wrapper type -macro_rules! impl_index_newtype { - ($thing:ident, $ty:ty) => { - - impl ::core::ops::Index for $thing { - type Output = $ty; - - #[inline] - fn index(&self, index: usize) -> &$ty { - &self.0[index] - } - } - - impl ::core::ops::Index<::core::ops::Range> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, index: ::core::ops::Range) -> &[$ty] { - &self.0[index] - } - } - - impl ::core::ops::Index<::core::ops::RangeTo> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, index: ::core::ops::RangeTo) -> &[$ty] { - &self.0[index] - } - } - - impl ::core::ops::Index<::core::ops::RangeFrom> for $thing { - type Output = [$ty]; + impl $crate::core::ops::Index for $thing + where + [$ty]: $crate::core::ops::Index, + { + type Output = <[$ty] as $crate::core::ops::Index>::Output; #[inline] - fn index(&self, index: ::core::ops::RangeFrom) -> &[$ty] { + fn index(&self, index: I) -> &Self::Output { &self.0[index] } } - - impl ::core::ops::Index<::core::ops::RangeFull> for $thing { - type Output = [$ty]; - - #[inline] - fn index(&self, _: ::core::ops::RangeFull) -> &[$ty] { - &self.0[..] - } - } - } } @@ -202,7 +160,7 @@ macro_rules! serde_string_impl { self.visit_str(v) } - fn visit_string(self, v: String) -> Result + fn visit_string(self, v: $crate::prelude::String) -> Result where E: $crate::serde::de::Error, { @@ -264,7 +222,7 @@ macro_rules! serde_struct_human_string_impl { self.visit_str(v) } - fn visit_string(self, v: String) -> Result + fn visit_string(self, v: $crate::prelude::String) -> Result where E: $crate::serde::de::Error, { @@ -446,9 +404,10 @@ macro_rules! impl_bytes_newtype { impl $crate::hashes::hex::FromHex for $t { fn from_byte_iter(iter: I) -> Result - where I: ::core::iter::Iterator> + - ::core::iter::ExactSizeIterator + - ::core::iter::DoubleEndedIterator, + where + I: ::core::iter::Iterator> + + ::core::iter::ExactSizeIterator + + ::core::iter::DoubleEndedIterator, { if iter.len() == $len { let mut ret = [0; $len]; diff --git a/src/network/address.rs b/src/network/address.rs index 9cba27e5c..5260ce096 100644 --- a/src/network/address.rs +++ b/src/network/address.rs @@ -38,7 +38,7 @@ pub struct Address { pub port: u16 } -const ONION : [u16; 3] = [0xFD87, 0xD87E, 0xEB43]; +const ONION: [u16; 3] = [0xFD87, 0xD87E, 0xEB43]; impl Address { /// Create an address message for a socket @@ -58,10 +58,7 @@ impl Address { if addr[0..3] == ONION { return Err(io::Error::from(io::ErrorKind::AddrNotAvailable)); } - let ipv6 = Ipv6Addr::new( - addr[0],addr[1],addr[2],addr[3], - addr[4],addr[5],addr[6],addr[7] - ); + let ipv6 = Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7]); if let Some(ipv4) = ipv6.to_ipv4() { Ok(SocketAddr::V4(SocketAddrV4::new(ipv4, self.port))) } else { @@ -82,10 +79,7 @@ fn addr_to_be(addr: [u16; 8]) -> [u16; 8] { impl Encodable for Address { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let len = self.services.consensus_encode(&mut s)? + addr_to_be(self.address).consensus_encode(&mut s)? @@ -112,9 +106,9 @@ impl fmt::Debug for Address { let ipv6 = Ipv6Addr::from(self.address); match ipv6.to_ipv4() { - Some(addr) => write!(f, "Address {{services: {}, address: {}, port: {}}}", + Some(addr) => write!(f, "Address {{services: {}, address: {}, port: {}}}", self.services, addr, self.port), - None => write!(f, "Address {{services: {}, address: {}, port: {}}}", + None => write!(f, "Address {{services: {}, address: {}, port: {}}}", self.services, ipv6, self.port) } } @@ -149,12 +143,11 @@ pub enum AddrV2 { impl Encodable for AddrV2 { fn consensus_encode(&self, e: W) -> Result { fn encode_addr(mut e: W, network: u8, bytes: &[u8]) -> Result { - let len = - network.consensus_encode(&mut e)? + - VarInt(bytes.len() as u64).consensus_encode(&mut e)? + - bytes.len(); - e.emit_slice(bytes)?; - Ok(len) + let len = network.consensus_encode(&mut e)? + + VarInt(bytes.len() as u64).consensus_encode(&mut e)? + + bytes.len(); + e.emit_slice(bytes)?; + Ok(len) } Ok(match *self { AddrV2::Ipv4(ref addr) => encode_addr(e, 1, &addr.octets())?, @@ -182,7 +175,7 @@ impl Decodable for AddrV2 { } let addr: [u8; 4] = Decodable::consensus_decode(&mut d)?; AddrV2::Ipv4(Ipv4Addr::new(addr[0], addr[1], addr[2], addr[3])) - }, + }, 2 => { if len != 16 { return Err(encode::Error::ParseFailed("Invalid IPv6 address")); @@ -194,11 +187,8 @@ impl Decodable for AddrV2 { if addr[0..6] == [0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0xFFFF] { return Err(encode::Error::ParseFailed("IPV4 wrapped address sent with IPv6 network id")); } - AddrV2::Ipv6(Ipv6Addr::new( - addr[0],addr[1],addr[2],addr[3], - addr[4],addr[5],addr[6],addr[7] - )) - }, + AddrV2::Ipv6(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7])) + }, 3 => { if len != 10 { return Err(encode::Error::ParseFailed("Invalid TorV2 address")); @@ -219,7 +209,7 @@ impl Decodable for AddrV2 { } let hash = Decodable::consensus_decode(&mut d)?; AddrV2::I2p(hash) - }, + }, 6 => { if len != 16 { return Err(encode::Error::ParseFailed("Invalid CJDNS address")); @@ -230,17 +220,14 @@ impl Decodable for AddrV2 { return Err(encode::Error::ParseFailed("Invalid CJDNS address")); } let addr = addr_to_be(addr); - AddrV2::Cjdns(Ipv6Addr::new( - addr[0],addr[1],addr[2],addr[3], - addr[4],addr[5],addr[6],addr[7] - )) + AddrV2::Cjdns(Ipv6Addr::new(addr[0], addr[1], addr[2], addr[3], addr[4], addr[5], addr[6], addr[7])) }, _ => { // len already checked above to be <= 512 let mut addr = vec![0u8; len as usize]; d.read_slice(&mut addr)?; AddrV2::Unknown(network_id, addr) - } + } }) } } @@ -282,12 +269,12 @@ impl Encodable for AddrV2Message { //TODO `len += io::Write::write(&mut e, &self.port.to_be_bytes())?;` when MSRV >= 1.32 len += self.port.swap_bytes().consensus_encode(e)?; Ok(len) - } + } } impl Decodable for AddrV2Message { fn consensus_decode(mut d: D) -> Result { - Ok(AddrV2Message{ + Ok(AddrV2Message { time: Decodable::consensus_decode(&mut d)?, services: ServiceFlags::from(VarInt::consensus_decode(&mut d)?.0), addr: Decodable::consensus_decode(&mut d)?, diff --git a/src/network/constants.rs b/src/network/constants.rs index ed4f67d80..149940f63 100644 --- a/src/network/constants.rs +++ b/src/network/constants.rs @@ -144,7 +144,7 @@ impl ServiceFlags { /// WITNESS indicates that a node can be asked for blocks and transactions including witness /// data. pub const WITNESS: ServiceFlags = ServiceFlags(1 << 3); - + /// COMPACT_FILTERS means the node will service basic block filter requests. /// See BIP157 and BIP158 for details on how this is implemented. pub const COMPACT_FILTERS: ServiceFlags = ServiceFlags(1 << 6); @@ -274,10 +274,7 @@ impl ops::BitXorAssign for ServiceFlags { impl Encodable for ServiceFlags { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { self.0.consensus_encode(&mut s) } } @@ -296,39 +293,16 @@ mod tests { #[test] fn serialize_test() { - assert_eq!( - serialize(&Network::Bitcoin.magic()), - &[0xf9, 0xbe, 0xb4, 0xd9] - ); - assert_eq!( - serialize(&Network::Testnet.magic()), - &[0x0b, 0x11, 0x09, 0x07] - ); - assert_eq!( - serialize(&Network::Signet.magic()), - &[0x0a, 0x03, 0xcf, 0x40] - ); - assert_eq!( - serialize(&Network::Regtest.magic()), - &[0xfa, 0xbf, 0xb5, 0xda] - ); - - assert_eq!( - deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(), - Some(Network::Bitcoin.magic()) - ); - assert_eq!( - deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(), - Some(Network::Testnet.magic()) - ); - assert_eq!( - deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(), - Some(Network::Signet.magic()) - ); - assert_eq!( - deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(), - Some(Network::Regtest.magic()) - ); + assert_eq!(serialize(&Network::Bitcoin.magic()), &[0xf9, 0xbe, 0xb4, 0xd9]); + assert_eq!(serialize(&Network::Testnet.magic()), &[0x0b, 0x11, 0x09, 0x07]); + assert_eq!(serialize(&Network::Signet.magic()), &[0x0a, 0x03, 0xcf, 0x40]); + assert_eq!(serialize(&Network::Regtest.magic()), &[0xfa, 0xbf, 0xb5, 0xda]); + + assert_eq!(deserialize(&[0xf9, 0xbe, 0xb4, 0xd9]).ok(), Some(Network::Bitcoin.magic())); + assert_eq!(deserialize(&[0x0b, 0x11, 0x09, 0x07]).ok(), Some(Network::Testnet.magic())); + assert_eq!(deserialize(&[0x0a, 0x03, 0xcf, 0x40]).ok(), Some(Network::Signet.magic())); + assert_eq!(deserialize(&[0xfa, 0xbf, 0xb5, 0xda]).ok(), Some(Network::Regtest.magic())); + } #[test] @@ -371,7 +345,7 @@ mod tests { flags2 ^= ServiceFlags::WITNESS; assert_eq!(flags2, ServiceFlags::GETUTXO); - + flags2 |= ServiceFlags::COMPACT_FILTERS; flags2 ^= ServiceFlags::GETUTXO; assert_eq!(flags2, ServiceFlags::COMPACT_FILTERS); @@ -385,4 +359,3 @@ mod tests { assert_eq!("ServiceFlags(WITNESS|COMPACT_FILTERS|0xb0)", flag.to_string()); } } - diff --git a/src/network/message.rs b/src/network/message.rs index 6a3212787..49aa7c01e 100644 --- a/src/network/message.rs +++ b/src/network/message.rs @@ -75,10 +75,7 @@ impl AsRef for CommandString { impl Encodable for CommandString { #[inline] - fn consensus_encode( - &self, - s: S, - ) -> Result { + fn consensus_encode(&self, s: S) -> Result { let mut rawbytes = [0u8; 12]; let strbytes = self.0.as_bytes(); debug_assert!(strbytes.len() <= 12); @@ -116,7 +113,7 @@ impl fmt::Display for CommandStringError { #[cfg_attr(docsrs, doc(cfg(feature = "std")))] #[cfg(feature = "std")] -impl ::std::error::Error for CommandStringError { } +impl ::std::error::Error for CommandStringError {} /// A Network message #[derive(Clone, Debug, PartialEq, Eq)] @@ -281,10 +278,7 @@ struct HeaderSerializationWrapper<'a>(&'a Vec); impl<'a> Encodable for HeaderSerializationWrapper<'a> { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += VarInt(self.0.len() as u64).consensus_encode(&mut s)?; for header in self.0.iter() { @@ -296,10 +290,7 @@ impl<'a> Encodable for HeaderSerializationWrapper<'a> { } impl Encodable for RawNetworkMessage { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += self.magic.consensus_encode(&mut s)?; len += self.command().consensus_encode(&mut s)?; diff --git a/src/network/message_blockdata.rs b/src/network/message_blockdata.rs index a5d37423d..28913df9f 100644 --- a/src/network/message_blockdata.rs +++ b/src/network/message_blockdata.rs @@ -54,14 +54,10 @@ pub enum Inventory { impl Encodable for Inventory { #[inline] - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { macro_rules! encode_inv { ($code:expr, $item:expr) => { - u32::consensus_encode(&$code, &mut s)? + - $item.consensus_encode(&mut s)? + u32::consensus_encode(&$code, &mut s)? + $item.consensus_encode(&mut s)? } } Ok(match *self { diff --git a/src/network/stream_reader.rs b/src/network/stream_reader.rs index 036fe2442..665e3268e 100644 --- a/src/network/stream_reader.rs +++ b/src/network/stream_reader.rs @@ -41,7 +41,7 @@ impl fmt::Debug for StreamReader { impl StreamReader { /// Constructs new stream reader for a given input stream `stream` - #[deprecated(since="0.28.0", note="wrap your stream into a buffered reader if necessary and use consensus_encode directly")] + #[deprecated(since = "0.28.0", note = "wrap your stream into a buffered reader if necessary and use consensus_encode directly")] pub fn new(stream: R, _buffer_size: Option) -> StreamReader { StreamReader { stream: BufReader::new(stream), @@ -49,7 +49,7 @@ impl StreamReader { } /// Reads stream and parses next message from its current input - #[deprecated(since="0.28.0", note="wrap your stream into a buffered reader if necessary and use consensus_encode directly")] + #[deprecated(since = "0.28.0", note = "wrap your stream into a buffered reader if necessary and use consensus_encode directly")] pub fn read_next(&mut self) -> Result { Decodable::consensus_decode(&mut self.stream) } @@ -222,7 +222,7 @@ mod test { let istream = TcpStream::connect(format!("127.0.0.1:{}", port)).unwrap(); let reader = BufReader::new(istream); - return (handle, reader) + (handle, reader) } #[test] diff --git a/src/policy/mod.rs b/src/policy.rs similarity index 100% rename from src/policy/mod.rs rename to src/policy.rs diff --git a/src/serde_utils.rs b/src/serde_utils.rs index 739f5eb2a..da4011e51 100644 --- a/src/serde_utils.rs +++ b/src/serde_utils.rs @@ -13,8 +13,8 @@ pub mod btreemap_byte_values { use hashes::hex::{FromHex, ToHex}; use serde; - pub fn serialize(v: &BTreeMap>, s: S) - -> Result where + pub fn serialize(v: &BTreeMap>, s: S) -> Result + where S: serde::Serializer, T: serde::Serialize + ::core::hash::Hash + Eq + Ord, { @@ -32,15 +32,16 @@ pub mod btreemap_byte_values { } } - pub fn deserialize<'de, D, T>(d: D) - -> Result>, D::Error> where + pub fn deserialize<'de, D, T>(d: D) -> Result>, D::Error> + where D: serde::Deserializer<'de>, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord, { use ::core::marker::PhantomData; struct Visitor(PhantomData); - impl<'de, T> serde::de::Visitor<'de> for Visitor where + impl<'de, T> serde::de::Visitor<'de> for Visitor + where T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord, { type Value = BTreeMap>; @@ -79,8 +80,8 @@ pub mod btreemap_as_seq { use prelude::*; use serde; - pub fn serialize(v: &BTreeMap, s: S) - -> Result where + pub fn serialize(v: &BTreeMap, s: S) -> Result + where S: serde::Serializer, T: serde::Serialize + ::core::hash::Hash + Eq + Ord, U: serde::Serialize, @@ -99,8 +100,8 @@ pub mod btreemap_as_seq { } } - pub fn deserialize<'de, D, T, U>(d: D) - -> Result, D::Error> where + pub fn deserialize<'de, D, T, U>(d: D) -> Result, D::Error> + where D: serde::Deserializer<'de>, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord, U: serde::Deserialize<'de>, @@ -108,7 +109,8 @@ pub mod btreemap_as_seq { use ::core::marker::PhantomData; struct Visitor(PhantomData<(T, U)>); - impl<'de, T, U> serde::de::Visitor<'de> for Visitor where + impl<'de, T, U> serde::de::Visitor<'de> for Visitor + where T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord, U: serde::Deserialize<'de>, { @@ -164,8 +166,8 @@ pub mod btreemap_as_seq_byte_values { &'a [u8], ); - pub fn serialize(v: &BTreeMap>, s: S) - -> Result where + pub fn serialize(v: &BTreeMap>, s: S) -> Result + where S: serde::Serializer, T: serde::Serialize + ::core::hash::Hash + Eq + Ord + 'static, { @@ -183,15 +185,16 @@ pub mod btreemap_as_seq_byte_values { } } - pub fn deserialize<'de, D, T>(d: D) - -> Result>, D::Error> where + pub fn deserialize<'de, D, T>(d: D) -> Result>, D::Error> + where D: serde::Deserializer<'de>, T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord, { use ::core::marker::PhantomData; struct Visitor(PhantomData); - impl<'de, T> serde::de::Visitor<'de> for Visitor where + impl<'de, T> serde::de::Visitor<'de> for Visitor + where T: serde::Deserialize<'de> + ::core::hash::Hash + Eq + Ord, { type Value = BTreeMap>; @@ -228,7 +231,8 @@ pub mod hex_bytes { use serde; pub fn serialize(bytes: &T, s: S) -> Result - where T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer + where + T: serde::Serialize + AsRef<[u8]>, S: serde::Serializer { // Don't do anything special when not human readable. if !s.is_human_readable() { @@ -239,7 +243,8 @@ pub mod hex_bytes { } pub fn deserialize<'de, D, B>(d: D) -> Result - where D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex, + where + D: serde::Deserializer<'de>, B: serde::Deserialize<'de> + FromHex, { struct Visitor(::core::marker::PhantomData); @@ -251,7 +256,8 @@ pub mod hex_bytes { } fn visit_bytes(self, v: &[u8]) -> Result - where E: serde::de::Error, + where + E: serde::de::Error, { if let Ok(hex) = ::core::str::from_utf8(v) { FromHex::from_hex(hex).map_err(E::custom) @@ -261,7 +267,8 @@ pub mod hex_bytes { } fn visit_str(self, v: &str) -> Result - where E: serde::de::Error, + where + E: serde::de::Error, { FromHex::from_hex(v).map_err(E::custom) } diff --git a/src/test_macros.rs b/src/test_macros.rs index 244aa6c91..46adc579c 100644 --- a/src/test_macros.rs +++ b/src/test_macros.rs @@ -27,4 +27,3 @@ macro_rules! serde_round_trip ( assert_eq!($var, decoded); }) ); - diff --git a/src/util/address.rs b/src/util/address.rs index a1718332c..982ae118f 100644 --- a/src/util/address.rs +++ b/src/util/address.rs @@ -39,9 +39,9 @@ use core::num::ParseIntError; use core::str::FromStr; #[cfg(feature = "std")] use std::error; -use secp256k1::{Secp256k1, Verification}; +use secp256k1::{Secp256k1, Verification, XOnlyPublicKey}; use bech32; -use hashes::Hash; +use hashes::{sha256, Hash, HashEngine}; use hash_types::{PubkeyHash, ScriptHash}; use blockdata::{script, opcodes}; use blockdata::constants::{PUBKEY_ADDRESS_PREFIX_MAIN, SCRIPT_ADDRESS_PREFIX_MAIN, PUBKEY_ADDRESS_PREFIX_TEST, SCRIPT_ADDRESS_PREFIX_TEST, MAX_SCRIPT_ELEMENT_SIZE}; @@ -94,17 +94,10 @@ impl fmt::Display for Error { Error::InvalidWitnessVersion(v) => write!(f, "invalid witness script version: {}", v), Error::UnparsableWitnessVersion(_) => write!(f, "incorrect format of a witness version byte"), Error::MalformedWitnessVersion => f.write_str("bitcoin script opcode does not match any known witness version, the script is malformed"), - Error::InvalidWitnessProgramLength(l) => write!(f, - "the witness program must be between 2 and 40 bytes in length: length={}", l, - ), - Error::InvalidSegwitV0ProgramLength(l) => write!(f, - "a v0 witness program must be either of length 20 or 32 bytes: length={}", l, - ), - Error::UncompressedPubkey => write!(f, - "an uncompressed pubkey was used where it is not allowed", - ), - Error::ExcessiveScriptSize => write!(f, - "Script size exceed 520 bytes") + Error::InvalidWitnessProgramLength(l) => write!(f, "the witness program must be between 2 and 40 bytes in length: length={}", l), + Error::InvalidSegwitV0ProgramLength(l) => write!(f, "a v0 witness program must be either of length 20 or 32 bytes: length={}", l), + Error::UncompressedPubkey => write!(f, "an uncompressed pubkey was used where it is not allowed"), + Error::ExcessiveScriptSize => write!(f, "Script size exceed 520 bytes"), } } } @@ -395,14 +388,11 @@ impl Payload { /// Generates a script pubkey spending to this [Payload]. pub fn script_pubkey(&self) -> script::Script { match *self { - Payload::PubkeyHash(ref hash) => - script::Script::new_p2pkh(hash), - Payload::ScriptHash(ref hash) => - script::Script::new_p2sh(hash), - Payload::WitnessProgram { - version, - program: ref prog, - } => script::Script::new_witness_program(version, prog) + Payload::PubkeyHash(ref hash) => script::Script::new_p2pkh(hash), + Payload::ScriptHash(ref hash) => script::Script::new_p2sh(hash), + Payload::WitnessProgram { version, program: ref prog } => { + script::Script::new_witness_program(version, prog) + } } } @@ -622,10 +612,7 @@ impl Address { /// Creates a pay to taproot address from a pre-tweaked output key. /// /// This method is not recommended for use, [`Address::p2tr()`] should be used where possible. - pub fn p2tr_tweaked( - output_key: TweakedPublicKey, - network: Network - ) -> Address { + pub fn p2tr_tweaked(output_key: TweakedPublicKey, network: Network) -> Address { Address { network, payload: Payload::p2tr_tweaked(output_key), @@ -727,6 +714,37 @@ impl Address { (Network::Testnet, _) | (Network::Regtest, _) | (Network::Signet, _) => true } } + + /// Returns true if the given pubkey is directly related to the address payload. + /// + /// This is determined by directly comparing the address payload with either the + /// hash of the given public key or the segwit redeem hash generated from the + /// given key. For taproot addresses, the supplied key is assumed to be tweaked + pub fn is_related_to_pubkey(&self, pubkey: &PublicKey) -> bool { + let pubkey_hash = pubkey.pubkey_hash(); + let payload = self.payload_as_bytes(); + let xonly_pubkey = XOnlyPublicKey::from(pubkey.inner); + + (*pubkey_hash == *payload) || (xonly_pubkey.serialize() == *payload) || (*segwit_redeem_hash(&pubkey_hash) == *payload) + } + + /// Returns true if the supplied xonly public key can be used to derive the address. + /// + /// This will only work for Taproot addresses. The Public Key is + /// assumed to have already been tweaked. + pub fn is_related_to_xonly_pubkey(&self, xonly_pubkey: &XOnlyPublicKey) -> bool { + let payload = self.payload_as_bytes(); + payload == xonly_pubkey.serialize() + } + + /// Return the address payload as a byte slice + fn payload_as_bytes(&self) -> &[u8] { + match &self.payload { + Payload::ScriptHash(hash) => hash, + Payload::PubkeyHash(hash) => hash, + Payload::WitnessProgram { program, .. } => program, + } + } } // Alternate formatting `{:#}` is used to return uppercase version of bech32 addresses which should @@ -870,6 +888,14 @@ impl fmt::Debug for Address { } } +/// Convert a byte array of a pubkey hash into a segwit redeem hash +fn segwit_redeem_hash(pubkey_hash: &[u8]) -> ::hashes::hash160::Hash { + let mut sha_engine = sha256::Hash::engine(); + sha_engine.input(&[0, 20]); + sha_engine.input(pubkey_hash); + ::hashes::hash160::Hash::from_engine(sha_engine) +} + #[cfg(test)] mod tests { use core::str::FromStr; @@ -1264,7 +1290,7 @@ mod tests { } #[test] - fn p2tr_from_untweaked(){ + fn p2tr_from_untweaked() { //Test case from BIP-086 let internal_key = XOnlyPublicKey::from_str("cc8a4bc64d897bddc5fbc2f670f7a8ba0b386779106cf1223c6fc5d7cd6fc115").unwrap(); let secp = Secp256k1::verification_only(); @@ -1273,4 +1299,95 @@ mod tests { assert_eq!(address.address_type(), Some(AddressType::P2tr)); roundtrips(&address); } + + #[test] + fn test_is_related_to_pubkey_p2wpkh() { + let address_string = "bc1qhvd6suvqzjcu9pxjhrwhtrlj85ny3n2mqql5w4"; + let address = Address::from_str(address_string).expect("address"); + + let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; + let pubkey = PublicKey::from_str(pubkey_string).expect("pubkey"); + + let result = address.is_related_to_pubkey(&pubkey); + assert!(result); + + let unused_pubkey = PublicKey::from_str("02ba604e6ad9d3864eda8dc41c62668514ef7d5417d3b6db46e45cc4533bff001c").expect("pubkey"); + assert!(!address.is_related_to_pubkey(&unused_pubkey)) + } + + #[test] + fn test_is_related_to_pubkey_p2shwpkh() { + let address_string = "3EZQk4F8GURH5sqVMLTFisD17yNeKa7Dfs"; + let address = Address::from_str(address_string).expect("address"); + + let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; + let pubkey = PublicKey::from_str(pubkey_string).expect("pubkey"); + + let result = address.is_related_to_pubkey(&pubkey); + assert!(result); + + let unused_pubkey = PublicKey::from_str("02ba604e6ad9d3864eda8dc41c62668514ef7d5417d3b6db46e45cc4533bff001c").expect("pubkey"); + assert!(!address.is_related_to_pubkey(&unused_pubkey)) + } + + #[test] + fn test_is_related_to_pubkey_p2pkh() { + let address_string = "1J4LVanjHMu3JkXbVrahNuQCTGCRRgfWWx"; + let address = Address::from_str(address_string).expect("address"); + + let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; + let pubkey = PublicKey::from_str(pubkey_string).expect("pubkey"); + + let result = address.is_related_to_pubkey(&pubkey); + assert!(result); + + let unused_pubkey = PublicKey::from_str("02ba604e6ad9d3864eda8dc41c62668514ef7d5417d3b6db46e45cc4533bff001c").expect("pubkey"); + assert!(!address.is_related_to_pubkey(&unused_pubkey)) + } + + #[test] + fn test_is_related_to_pubkey_p2pkh_uncompressed_key() { + let address_string = "msvS7KzhReCDpQEJaV2hmGNvuQqVUDuC6p"; + let address = Address::from_str(address_string).expect("address"); + + let pubkey_string = "04e96e22004e3db93530de27ccddfdf1463975d2138ac018fc3e7ba1a2e5e0aad8e424d0b55e2436eb1d0dcd5cb2b8bcc6d53412c22f358de57803a6a655fbbd04"; + let pubkey = PublicKey::from_str(pubkey_string).expect("pubkey"); + + let result = address.is_related_to_pubkey(&pubkey); + assert!(result); + + let unused_pubkey = PublicKey::from_str("02ba604e6ad9d3864eda8dc41c62668514ef7d5417d3b6db46e45cc4533bff001c").expect("pubkey"); + assert!(!address.is_related_to_pubkey(&unused_pubkey)) + } + + #[test] + fn test_is_related_to_pubkey_p2tr(){ + let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; + let pubkey = PublicKey::from_str(pubkey_string).expect("pubkey"); + let xonly_pubkey = XOnlyPublicKey::from(pubkey.inner); + let tweaked_pubkey = TweakedPublicKey::dangerous_assume_tweaked(xonly_pubkey); + let address = Address::p2tr_tweaked(tweaked_pubkey, Network::Bitcoin); + + assert_eq!(address, Address::from_str("bc1pgllnmtxs0g058qz7c6qgaqq4qknwrqj9z7rqn9e2dzhmcfmhlu4sfadf5e").expect("address")); + + let result = address.is_related_to_pubkey(&pubkey); + assert!(result); + + let unused_pubkey = PublicKey::from_str("02ba604e6ad9d3864eda8dc41c62668514ef7d5417d3b6db46e45cc4533bff001c").expect("pubkey"); + assert!(!address.is_related_to_pubkey(&unused_pubkey)); + } + + #[test] + fn test_is_related_to_xonly_pubkey(){ + let pubkey_string = "0347ff3dacd07a1f43805ec6808e801505a6e18245178609972a68afbc2777ff2b"; + let pubkey = PublicKey::from_str(pubkey_string).expect("pubkey"); + let xonly_pubkey = XOnlyPublicKey::from(pubkey.inner); + let tweaked_pubkey = TweakedPublicKey::dangerous_assume_tweaked(xonly_pubkey); + let address = Address::p2tr_tweaked(tweaked_pubkey, Network::Bitcoin); + + assert_eq!(address, Address::from_str("bc1pgllnmtxs0g058qz7c6qgaqq4qknwrqj9z7rqn9e2dzhmcfmhlu4sfadf5e").expect("address")); + + let result = address.is_related_to_xonly_pubkey(&xonly_pubkey); + assert!(result); + } } diff --git a/src/util/amount.rs b/src/util/amount.rs index 48958e0cc..c9cca6846 100644 --- a/src/util/amount.rs +++ b/src/util/amount.rs @@ -175,7 +175,7 @@ impl fmt::Display for ParseAmountError { ParseAmountError::PossiblyConfusingDenomination(ref d) => { let (letter, upper, lower) = match d.chars().next() { Some('M') => ('M', "Mega", "milli"), - Some('P') => ('P',"Peta", "pico"), + Some('P') => ('P', "Peta", "pico"), // This panic could be avoided by adding enum ConfusingDenomination { Mega, Peta } but is it worth it? _ => panic!("invalid error information"), }; @@ -599,7 +599,7 @@ impl FromStr for Amount { } impl ::core::iter::Sum for Amount { - fn sum>(iter: I) -> Self { + fn sum>(iter: I) -> Self { let sats: u64 = iter.map(|amt| amt.0).sum(); Amount::from_sat(sats) } @@ -933,7 +933,7 @@ impl FromStr for SignedAmount { } impl ::core::iter::Sum for SignedAmount { - fn sum>(iter: I) -> Self { + fn sum>(iter: I) -> Self { let sats: i64 = iter.map(|amt| amt.0).sum(); SignedAmount::from_sat(sats) } @@ -946,7 +946,7 @@ pub trait CheckedSum: private::SumSeal { fn checked_sum(self) -> Option; } -impl CheckedSum for T where T: Iterator { +impl CheckedSum for T where T: Iterator { fn checked_sum(mut self) -> Option { let first = Some(self.next().unwrap_or_default()); @@ -957,14 +957,11 @@ impl CheckedSum for T where T: Iterator { } } -impl CheckedSum for T where T: Iterator { +impl CheckedSum for T where T: Iterator { fn checked_sum(mut self) -> Option { let first = Some(self.next().unwrap_or_default()); - self.fold( - first, - |acc, item| acc.and_then(|acc| acc.checked_add(item)) - ) + self.fold(first, |acc, item| acc.and_then(|acc| acc.checked_add(item))) } } @@ -974,8 +971,8 @@ mod private { /// Used to seal the `CheckedSum` trait pub trait SumSeal {} - impl SumSeal for T where T: Iterator {} - impl SumSeal for T where T: Iterator {} + impl SumSeal for T where T: Iterator {} + impl SumSeal for T where T: Iterator {} } #[cfg(feature = "serde")] @@ -1133,12 +1130,13 @@ pub mod serde { fn visit_none(self) -> Result where - E: de::Error { + E: de::Error, + { Ok(None) } fn visit_some(self, d: D) -> Result where - D: Deserializer<'de> + D: Deserializer<'de>, { Ok(Some(X::des_sat(d)?)) } @@ -1187,7 +1185,7 @@ pub mod serde { ) -> Result, D::Error> { struct VisitOptAmt(PhantomData); - impl<'de, X :SerdeAmountForOpt> de::Visitor<'de> for VisitOptAmt { + impl<'de, X: SerdeAmountForOpt> de::Visitor<'de> for VisitOptAmt { type Value = Option; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { @@ -1196,7 +1194,8 @@ pub mod serde { fn visit_none(self) -> Result where - E: de::Error { + E: de::Error, + { Ok(None) } fn visit_some(self, d: D) -> Result @@ -1398,12 +1397,12 @@ mod tests { let sa = SignedAmount::from_sat; let ua = Amount::from_sat; - assert_eq!(Amount::max_value().to_signed(), Err(E::TooBig)); + assert_eq!(Amount::max_value().to_signed(), Err(E::TooBig)); assert_eq!(ua(i64::max_value() as u64).to_signed(), Ok(sa(i64::max_value()))); - assert_eq!(ua(0).to_signed(), Ok(sa(0))); + assert_eq!(ua(0).to_signed(), Ok(sa(0))); assert_eq!(ua(1).to_signed(), Ok( sa(1))); - assert_eq!(ua(1).to_signed(), Ok(sa(1))); - assert_eq!(ua(i64::max_value() as u64 + 1).to_signed(), Err(E::TooBig)); + assert_eq!(ua(1).to_signed(), Ok(sa(1))); + assert_eq!(ua(i64::max_value() as u64 + 1).to_signed(), Err(E::TooBig)); assert_eq!(sa(-1).to_unsigned(), Err(E::Negative)); assert_eq!(sa(i64::max_value()).to_unsigned(), Ok(ua(i64::max_value() as u64))); @@ -1532,10 +1531,7 @@ mod tests { samt: SignedAmount::from_sat(-123456789), }, &[ - serde_test::Token::Struct { - name: "T", - len: 2, - }, + serde_test::Token::Struct { name: "T", len: 2 }, serde_test::Token::Str("amt"), serde_test::Token::U64(123456789), serde_test::Token::Str("samt"), diff --git a/src/util/base58.rs b/src/util/base58.rs index af52481db..6bbdc9ade 100644 --- a/src/util/base58.rs +++ b/src/util/base58.rs @@ -178,7 +178,7 @@ pub fn from_check(data: &str) -> Result, Error> { fn format_iter(writer: &mut W, data: I) -> Result<(), fmt::Error> where - I: Iterator + Clone, + I: Iterator + Clone, W: fmt::Write { let mut ret = SmallVec::new(); @@ -219,7 +219,7 @@ where fn encode_iter(data: I) -> String where - I: Iterator + Clone, + I: Iterator + Clone, { let mut ret = String::new(); format_iter(&mut ret, data).expect("writing into string shouldn't fail"); @@ -293,10 +293,10 @@ mod tests { // Addresses let addr = Vec::from_hex("00f8917303bfa8ef24f292e8fa1419b20460ba064d").unwrap(); assert_eq!(&check_encode_slice(&addr[..]), "1PfJpZsjreyVrqeoAfabrRwwjQyoSQMmHH"); - } + } - #[test] - fn test_base58_decode() { + #[test] + fn test_base58_decode() { // Basics assert_eq!(from("1").ok(), Some(vec![0u8])); assert_eq!(from("2").ok(), Some(vec![1u8])); diff --git a/src/util/bip143.rs b/src/util/bip143.rs index b9024ff14..ec4c34578 100644 --- a/src/util/bip143.rs +++ b/src/util/bip143.rs @@ -33,7 +33,7 @@ use util::sighash; /// Parts of a sighash which are common across inputs or signatures, and which are /// sufficient (in conjunction with a private key) to sign the transaction #[derive(Clone, PartialEq, Eq, Debug)] -#[deprecated(since="0.24.0", note="please use [sighash::SigHashCache] instead")] +#[deprecated(since = "0.24.0", note = "please use [sighash::SigHashCache] instead")] pub struct SighashComponents { tx_version: i32, tx_locktime: u32, @@ -107,13 +107,13 @@ impl SighashComponents { } /// A replacement for SigHashComponents which supports all sighash modes -#[deprecated(since="0.27.0", note="please use [sighash::SigHashCache] instead")] -pub struct SigHashCache> { +#[deprecated(since = "0.28.0", note = "please use [sighash::SigHashCache] instead")] +pub struct SigHashCache> { cache: sighash::SigHashCache, } #[allow(deprecated)] -impl> SigHashCache { +impl> SigHashCache { /// Compute the sighash components from an unsigned transaction and auxiliary /// in a lazy manner when required. /// For the generated sighashes to be valid, no fields in the transaction may change except for @@ -155,7 +155,7 @@ impl> SigHashCache { } #[allow(deprecated)] -impl> SigHashCache { +impl> SigHashCache { /// When the SigHashCache is initialized with a mutable reference to a transaction instead of a /// regular reference, this method is available to allow modification to the witnesses. /// diff --git a/src/util/bip158.rs b/src/util/bip158.rs index a97cd9728..1dc67ca1c 100644 --- a/src/util/bip158.rs +++ b/src/util/bip158.rs @@ -29,21 +29,21 @@ //! fn get_script_for_coin(coin: &OutPoint) -> Result { //! // get utxo ... //! } -//! +//! //! // create a block filter for a block (server side) //! let filter = BlockFilter::new_script_filter(&block, get_script_for_coin)?; //! //! // or create a filter from known raw data //! let filter = BlockFilter::new(content); -//! +//! //! // read and evaluate a filter -//! +//! //! let query: Iterator = // .. some scripts you care about //! if filter.match_any(&block_hash, &mut query.map(|s| s.as_bytes())) { //! // get this block //! } //! ``` -//! +//! use prelude::*; @@ -519,7 +519,7 @@ mod test { use super::*; extern crate serde_json; - use self::serde_json::{Value}; + use self::serde_json::Value; use consensus::encode::deserialize; use std::collections::HashMap; @@ -576,7 +576,7 @@ mod test { } #[test] - fn test_filter () { + fn test_filter() { let mut patterns = HashSet::new(); patterns.insert(Vec::from_hex("000000").unwrap()); diff --git a/src/util/bip32.rs b/src/util/bip32.rs index 6008c3483..65a973f5f 100644 --- a/src/util/bip32.rs +++ b/src/util/bip32.rs @@ -21,6 +21,7 @@ use prelude::*; use io::Write; use core::{fmt, str::FromStr, default::Default}; +use core::ops::Index; #[cfg(feature = "std")] use std::error; #[cfg(feature = "serde")] use serde; @@ -238,9 +239,20 @@ pub trait IntoDerivationPath { /// A BIP-32 derivation path. #[derive(Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] pub struct DerivationPath(Vec); -impl_index_newtype!(DerivationPath, ChildNumber); serde_string_impl!(DerivationPath, "a BIP-32 derivation path"); +impl Index for DerivationPath +where + Vec: Index, +{ + type Output = as Index>::Output; + + #[inline] + fn index(&self, index: I) -> &Self::Output { + &self.0[index] + } +} + impl Default for DerivationPath { fn default() -> DerivationPath { DerivationPath::master() @@ -284,7 +296,7 @@ impl<'a> From<&'a [ChildNumber]> for DerivationPath { } impl ::core::iter::FromIterator for DerivationPath { - fn from_iter(iter: T) -> Self where T: IntoIterator { + fn from_iter(iter: T) -> Self where T: IntoIterator { DerivationPath(Vec::from_iter(iter)) } } @@ -486,11 +498,11 @@ impl fmt::Display for Error { #[cfg_attr(docsrs, doc(cfg(feature = "std")))] impl error::Error for Error { fn cause(&self) -> Option<&dyn error::Error> { - if let Error::Secp256k1(ref e) = *self { - Some(e) - } else { - None - } + if let Error::Secp256k1(ref e) = *self { + Some(e) + } else { + None + } } } @@ -901,34 +913,26 @@ mod tests { assert_eq!(indexed.child(ChildNumber::from_hardened_idx(2).unwrap()), path); } - fn test_path(secp: &Secp256k1, - network: Network, - seed: &[u8], - path: DerivationPath, - expected_sk: &str, - expected_pk: &str) { - + fn test_path( + secp: &Secp256k1, + network: Network, + seed: &[u8], + path: DerivationPath, + expected_sk: &str, + expected_pk: &str) + { let mut sk = ExtendedPrivKey::new_master(network, seed).unwrap(); let mut pk = ExtendedPubKey::from_priv(secp, &sk); // Check derivation convenience method for ExtendedPrivKey - assert_eq!( - &sk.derive_priv(secp, &path).unwrap().to_string()[..], - expected_sk - ); + assert_eq!(&sk.derive_priv(secp, &path).unwrap().to_string()[..], expected_sk); // Check derivation convenience method for ExtendedPubKey, should error // appropriately if any ChildNumber is hardened if path.0.iter().any(|cnum| cnum.is_hardened()) { - assert_eq!( - pk.derive_pub(secp, &path), - Err(Error::CannotDeriveFromHardenedKey) - ); + assert_eq!(pk.derive_pub(secp, &path), Err(Error::CannotDeriveFromHardenedKey)); } else { - assert_eq!( - &pk.derive_pub(secp, &path).unwrap().to_string()[..], - expected_pk - ); + assert_eq!(&pk.derive_pub(secp, &path).unwrap().to_string()[..], expected_pk); } // Derive keys, checking hardened and non-hardened derivation one-by-one diff --git a/src/util/contracthash.rs b/src/util/contracthash.rs deleted file mode 100644 index 919254f0f..000000000 --- a/src/util/contracthash.rs +++ /dev/null @@ -1,399 +0,0 @@ -// Rust Bitcoin Library -// Written in 2015 by -// Andrew Poelstra -// -// To the extent possible under law, the author(s) have dedicated all -// copyright and related and neighboring rights to this software to -// the public domain worldwide. This software is distributed without -// any warranty. -// -// You should have received a copy of the CC0 Public Domain Dedication -// along with this software. -// If not, see . -// - -//! Pay-to-contract-hash support. -//! -//! See Appendix A of the Blockstream sidechains whitepaper at -//! for details of what this does. -//! -//! This module is deprecated. - -#![cfg_attr(not(test), deprecated)] - -use prelude::*; - -use core::fmt; -#[cfg(feature = "std")] use std::error; - -use secp256k1::{self, Secp256k1}; -use PrivateKey; -use PublicKey; -use hashes::{sha256, Hash, HashEngine, Hmac, HmacEngine}; -use blockdata::{opcodes, script}; - -use hash_types::ScriptHash; -use network::constants::Network; -use util::address; - -/// Encoding of "pubkey here" in script; from Bitcoin Core `src/script/script.h` -static PUBKEY: u8 = 0xFE; - -/// A contract-hash error -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] -pub enum Error { - /// Other secp256k1 related error - Secp(secp256k1::Error), - /// Script parsing error - Script(script::Error), - /// Encountered an uncompressed key in a script we were deserializing. The - /// reserialization will compress it which might be surprising so we call - /// this an error. - UncompressedKey, - /// Expected a public key when deserializing a script, but we got something else. - ExpectedKey, - /// Expected some sort of CHECKSIG operator when deserializing a script, but - /// we got something else. - ExpectedChecksig, - /// Did not have enough keys to instantiate a script template - TooFewKeys(usize), - /// Had too many keys; template does not match key list - TooManyKeys(usize) -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Error::Secp(ref e) => fmt::Display::fmt(&e, f), - Error::Script(ref e) => fmt::Display::fmt(&e, f), - Error::UncompressedKey => f.write_str("encountered uncompressed secp public key"), - Error::ExpectedKey => f.write_str("expected key when deserializing script"), - Error::ExpectedChecksig => f.write_str("expected OP_*CHECKSIG* when deserializing script"), - Error::TooFewKeys(n) => write!(f, "got {} keys, which was not enough", n), - Error::TooManyKeys(n) => write!(f, "got {} keys, which was too many", n) - } - } -} - -#[cfg(feature = "std")] -#[cfg_attr(docsrs, doc(cfg(feature = "std")))] -impl ::std::error::Error for Error { - fn cause(&self) -> Option<&dyn error::Error> { - match *self { - Error::Secp(ref e) => Some(e), - Error::Script(ref e) => Some(e), - _ => None - } - } -} - -/// An element of a script template -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -enum TemplateElement { - Op(opcodes::All), - Key -} - -/// A script template -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Template(Vec); - -impl Template { - /// Instantiate a template - pub fn to_script(&self, keys: &[PublicKey]) -> Result { - let mut key_index = 0; - let mut ret = script::Builder::new(); - for elem in &self.0 { - ret = match *elem { - TemplateElement::Op(opcode) => ret.push_opcode(opcode), - TemplateElement::Key => { - if key_index == keys.len() { - return Err(Error::TooFewKeys(key_index)); - } - key_index += 1; - ret.push_key(&keys[key_index - 1]) - } - } - } - if key_index == keys.len() { - Ok(ret.into_script()) - } else { - Err(Error::TooManyKeys(keys.len())) - } - } - - /// Returns the number of keys this template requires to instantiate - pub fn required_keys(&self) -> usize { - self.0.iter().filter(|e| **e == TemplateElement::Key).count() - } - - /// If the first push in the template is a number, return this number. For the - /// common case of standard multisig templates, such a number will exist and - /// will represent the number of signatures that are required for the script - /// to pass. - pub fn first_push_as_number(&self) -> Option { - if !self.0.is_empty() { - if let TemplateElement::Op(op) = self.0[0] { - if let opcodes::Class::PushNum(n) = op.classify(opcodes::ClassifyContext::Legacy) { - if n >= 0 { - return Some(n as usize); - } - } - } - } - None - } -} - -impl<'a> From<&'a [u8]> for Template { - fn from(slice: &'a [u8]) -> Template { - Template(slice.iter().map(|&byte| { - if byte == PUBKEY { - TemplateElement::Key - } else { - TemplateElement::Op(opcodes::All::from(byte)) - } - }).collect()) - } -} - -/// Tweak a single key using some arbitrary data -pub fn tweak_key(secp: &Secp256k1, mut key: PublicKey, contract: &[u8]) -> PublicKey { - let hmac_result = compute_tweak(&key, contract); - key.inner.add_exp_assign(secp, &hmac_result[..]).expect("HMAC cannot produce invalid tweak"); - key -} - -/// Tweak keys using some arbitrary data -pub fn tweak_keys(secp: &Secp256k1, keys: &[PublicKey], contract: &[u8]) -> Vec { - keys.iter().cloned().map(|key| tweak_key(secp, key, contract)).collect() -} - -/// Compute a tweak from some given data for the given public key -pub fn compute_tweak(pk: &PublicKey, contract: &[u8]) -> Hmac { - let mut hmac_engine: HmacEngine = if pk.compressed { - HmacEngine::new(&pk.inner.serialize()) - } else { - HmacEngine::new(&pk.inner.serialize_uncompressed()) - }; - hmac_engine.input(contract); - Hmac::from_engine(hmac_engine) -} - -/// Tweak a secret key using some arbitrary data (calls `compute_tweak` internally) -pub fn tweak_secret_key(secp: &Secp256k1, key: &PrivateKey, contract: &[u8]) -> Result { - // Compute public key - let pk = PublicKey::from_private_key(secp, key); - // Compute tweak - let hmac_sk = compute_tweak(&pk, contract); - // Execute the tweak - let mut key = *key; - key.inner.add_assign(&hmac_sk[..]).map_err(Error::Secp)?; - // Return - Ok(key) -} - -/// Takes a contract, template and key set and runs through all the steps -pub fn create_address(secp: &Secp256k1, - network: Network, - contract: &[u8], - keys: &[PublicKey], - template: &Template) - -> Result { - let keys = tweak_keys(secp, keys, contract); - let script = template.to_script(&keys)?; - Ok(address::Address { - network, - payload: address::Payload::ScriptHash( - ScriptHash::hash(&script[..]) - ) - }) -} - -/// Extract the keys and template from a completed script -pub fn untemplate(script: &script::Script) -> Result<(Template, Vec), Error> { - let mut ret = script::Builder::new(); - let mut retkeys = vec![]; - - #[derive(Copy, Clone, PartialEq, Eq)] - enum Mode { - SeekingKeys, - CopyingKeys, - SeekingCheckMulti - } - - let mut mode = Mode::SeekingKeys; - for instruction in script.instructions() { - if let Err(e) = instruction { - return Err(Error::Script(e)); - } - match instruction.unwrap() { - script::Instruction::PushBytes(data) => { - let n = data.len(); - ret = match PublicKey::from_slice(data) { - Ok(key) => { - if n == 65 { return Err(Error::UncompressedKey); } - if mode == Mode::SeekingCheckMulti { return Err(Error::ExpectedChecksig); } - retkeys.push(key); - mode = Mode::CopyingKeys; - ret.push_opcode(opcodes::All::from(PUBKEY)) - } - Err(_) => { - // Arbitrary pushes are only allowed before we've found any keys. - // Otherwise we have to wait for a N CHECKSIG pair. - match mode { - Mode::SeekingKeys => { ret.push_slice(data) } - Mode::CopyingKeys => { return Err(Error::ExpectedKey); }, - Mode::SeekingCheckMulti => { return Err(Error::ExpectedChecksig); } - } - } - } - } - script::Instruction::Op(op) => { - match op.classify(opcodes::ClassifyContext::Legacy) { - // CHECKSIG should only come after a list of keys - opcodes::Class::Ordinary(opcodes::Ordinary::OP_CHECKSIG) | - opcodes::Class::Ordinary(opcodes::Ordinary::OP_CHECKSIGVERIFY) => { - if mode == Mode::SeekingKeys { return Err(Error::ExpectedKey); } - mode = Mode::SeekingKeys; - } - // CHECKMULTISIG should only come after a number - opcodes::Class::Ordinary(opcodes::Ordinary::OP_CHECKMULTISIG) | - opcodes::Class::Ordinary(opcodes::Ordinary::OP_CHECKMULTISIGVERIFY) => { - if mode == Mode::SeekingKeys { return Err(Error::ExpectedKey); } - if mode == Mode::CopyingKeys { return Err(Error::ExpectedKey); } - mode = Mode::SeekingKeys; - } - // Numbers after keys mean we expect a CHECKMULTISIG. - opcodes::Class::PushNum(_) => { - if mode == Mode::SeekingCheckMulti { return Err(Error::ExpectedChecksig); } - if mode == Mode::CopyingKeys { mode = Mode::SeekingCheckMulti; } - } - // All other opcodes do nothing - _ => {} - } - ret = ret.push_opcode(op); - } - } - } - Ok((Template::from(&ret[..]), retkeys)) -} - -#[cfg(test)] -mod tests { - use secp256k1::Secp256k1; - use hashes::hex::FromHex; - use secp256k1::rand::thread_rng; - use core::str::FromStr; - - use blockdata::script::Script; - use network::constants::Network; - - use super::*; - use PublicKey; - - macro_rules! hex (($hex:expr) => (Vec::from_hex($hex).unwrap())); - macro_rules! hex_key (($hex:expr) => (PublicKey::from_slice(&hex!($hex)).unwrap())); - macro_rules! alpha_template(() => (Template::from(&hex!("55fefefefefefefe57AE")[..]))); - macro_rules! alpha_keys(() => ( - &[hex_key!("0269992fb441ae56968e5b77d46a3e53b69f136444ae65a94041fc937bdb28d933"), - hex_key!("021df31471281d4478df85bfce08a10aab82601dca949a79950f8ddf7002bd915a"), - hex_key!("02174c82021492c2c6dfcbfa4187d10d38bed06afb7fdcd72c880179fddd641ea1"), - hex_key!("033f96e43d72c33327b6a4631ccaa6ea07f0b106c88b9dc71c9000bb6044d5e88a"), - hex_key!("0313d8748790f2a86fb524579b46ce3c68fedd58d2a738716249a9f7d5458a15c2"), - hex_key!("030b632eeb079eb83648886122a04c7bf6d98ab5dfb94cf353ee3e9382a4c2fab0"), - hex_key!("02fb54a7fcaa73c307cfd70f3fa66a2e4247a71858ca731396343ad30c7c4009ce")] - )); - - #[test] - fn sanity() { - let secp = Secp256k1::new(); - let keys = alpha_keys!(); - // This is the first withdraw ever, in alpha a94f95cc47b444c10449c0eed51d895e4970560c4a1a9d15d46124858abc3afe - let contract = hex!("5032534894ffbf32c1f1c0d3089b27c98fd991d5d7329ebd7d711223e2cde5a9417a1fa3e852c576"); - - let addr = create_address(&secp, Network::Testnet, &contract, keys, &alpha_template!()).unwrap(); - assert_eq!(addr.to_string(), "2N3zXjbwdTcPsJiy8sUK9FhWJhqQCxA8Jjr".to_owned()); - } - - #[test] - fn script() { - let alpha_keys = alpha_keys!(); - let alpha_template = alpha_template!(); - - let alpha_redeem = Script::from(hex!("55210269992fb441ae56968e5b77d46a3e53b69f136444ae65a94041fc937bdb28d93321021df31471281d4478df85bfce08a10aab82601dca949a79950f8ddf7002bd915a2102174c82021492c2c6dfcbfa4187d10d38bed06afb7fdcd72c880179fddd641ea121033f96e43d72c33327b6a4631ccaa6ea07f0b106c88b9dc71c9000bb6044d5e88a210313d8748790f2a86fb524579b46ce3c68fedd58d2a738716249a9f7d5458a15c221030b632eeb079eb83648886122a04c7bf6d98ab5dfb94cf353ee3e9382a4c2fab02102fb54a7fcaa73c307cfd70f3fa66a2e4247a71858ca731396343ad30c7c4009ce57ae")); - let (template, keys) = untemplate(&alpha_redeem).unwrap(); - - assert_eq!(keys, alpha_keys); - assert_eq!(template, alpha_template); - } - - #[test] - fn tweak_secret() { - let secp = Secp256k1::new(); - let (sk1, pk1) = secp.generate_keypair(&mut thread_rng()); - let (sk2, pk2) = secp.generate_keypair(&mut thread_rng()); - let (sk3, pk3) = secp.generate_keypair(&mut thread_rng()); - - let sk1 = PrivateKey::new(sk1, Network::Bitcoin); - let sk2 = PrivateKey::new_uncompressed(sk2, Network::Bitcoin); - let sk3 = PrivateKey::new(sk3, Network::Bitcoin); - let pks = [ - PublicKey::new(pk1), - PublicKey::new_uncompressed(pk2), - PublicKey::new(pk3), - ]; - let contract = b"if bottle mt dont remembr drink wont pay"; - - // Directly compute tweaks on pubkeys - let tweaked_pks = tweak_keys(&secp, &pks, &contract[..]); - // Compute tweaks on secret keys - let tweaked_pk1 = PublicKey::from_private_key(&secp, &tweak_secret_key(&secp, &sk1, &contract[..]).unwrap()); - let tweaked_pk2 = PublicKey::from_private_key(&secp, &tweak_secret_key(&secp, &sk2, &contract[..]).unwrap()); - let tweaked_pk3 = PublicKey::from_private_key(&secp, &tweak_secret_key(&secp, &sk3, &contract[..]).unwrap()); - // Check equality - assert_eq!(tweaked_pks[0], tweaked_pk1); - assert_eq!(tweaked_pks[1], tweaked_pk2); - assert_eq!(tweaked_pks[2], tweaked_pk3); - } - - #[test] - fn tweak_fixed_vector() { - let secp = Secp256k1::new(); - - let pks = [ - PublicKey::from_str("02ba604e6ad9d3864eda8dc41c62668514ef7d5417d3b6db46e45cc4533bff001c").unwrap(), - PublicKey::from_str("0365c0755ea55ce85d8a1900c68a524dbfd1c0db45ac3b3840dbb10071fe55e7a8").unwrap(), - PublicKey::from_str("0202313ca315889b2e69c94cf86901119321c7288139ba53ac022b7af3dc250054").unwrap(), - ]; - let tweaked_pks = [ - PublicKey::from_str("03b3597221b5982a3f1a77aed50f0015d1b6edfc69023ef7f25cfac0e8af1b2041").unwrap(), - PublicKey::from_str("0296ece1fd954f7ae94f8d6bad19fd6d583f5b36335cf13135a3053a22f3c1fb05").unwrap(), - PublicKey::from_str("0230bb1ca5dbc7fcf49294c2c3e582e5582eabf7c87e885735dc774da45d610e51").unwrap(), - ]; - let contract = b"if bottle mt dont remembr drink wont pay"; - - // Directly compute tweaks on pubkeys - assert_eq!( - tweak_keys(&secp, &pks, &contract[..]), - tweaked_pks - ); - } - - #[test] - fn bad_key_number() { - let alpha_keys = alpha_keys!(); - let template_short = Template::from(&hex!("55fefefefefefe57AE")[..]); - let template_long = Template::from(&hex!("55fefefefefefefefe57AE")[..]); - let template = Template::from(&hex!("55fefefefefefefe57AE")[..]); - - assert_eq!(template_short.required_keys(), 6); - assert_eq!(template_long.required_keys(), 8); - assert_eq!(template.required_keys(), 7); - assert_eq!(template_short.to_script(alpha_keys), Err(Error::TooManyKeys(7))); - assert_eq!(template_long.to_script(alpha_keys), Err(Error::TooFewKeys(7))); - assert!(template.to_script(alpha_keys).is_ok()); - } -} - - diff --git a/src/util/ecdsa.rs b/src/util/ecdsa.rs index 5a04b1cc9..e4861769d 100644 --- a/src/util/ecdsa.rs +++ b/src/util/ecdsa.rs @@ -47,7 +47,7 @@ impl EcdsaSig { pub fn from_slice(sl: &[u8]) -> Result { let (hash_ty, sig) = sl.split_last() .ok_or(EcdsaSigError::EmptySignature)?; - let hash_ty = EcdsaSigHashType::from_u32_standard(*hash_ty as u32) + let hash_ty = EcdsaSigHashType::from_standard(*hash_ty as u32) .map_err(|_| EcdsaSigError::NonStandardSigHashType(*hash_ty as u32))?; let sig = secp256k1::ecdsa::Signature::from_der(sig) .map_err(EcdsaSigError::Secp256k1)?; @@ -80,7 +80,7 @@ impl FromStr for EcdsaSig { .ok_or(EcdsaSigError::EmptySignature)?; Ok(EcdsaSig { sig: secp256k1::ecdsa::Signature::from_der(signature)?, - hash_ty: EcdsaSigHashType::from_u32_standard(*sighash_byte as u32)? + hash_ty: EcdsaSigHashType::from_standard(*sighash_byte as u32)? }) } } diff --git a/src/util/hash.rs b/src/util/hash.rs index 268ac15ae..6a14d07b3 100644 --- a/src/util/hash.rs +++ b/src/util/hash.rs @@ -36,7 +36,8 @@ use consensus::encode::Encodable; /// - `Some(hash)` if `hashes` contains one element. A single hash is by definition the merkle root. /// - `Some(merkle_root)` if length of `hashes` is greater than one. pub fn bitcoin_merkle_root_inline(hashes: &mut [T]) -> Option - where T: Hash + Encodable, +where + T: Hash + Encodable, ::Engine: io::Write, { match hashes.len() { @@ -53,9 +54,10 @@ pub fn bitcoin_merkle_root_inline(hashes: &mut [T]) -> Option /// - `Some(hash)` if `hashes` contains one element. A single hash is by definition the merkle root. /// - `Some(merkle_root)` if length of `hashes` is greater than one. pub fn bitcoin_merkle_root(mut hashes: I) -> Option - where T: Hash + Encodable, - ::Engine: io::Write, - I: Iterator, +where + T: Hash + Encodable, + ::Engine: io::Write, + I: Iterator, { let first = hashes.next()?; let second = match hashes.next() { @@ -84,8 +86,9 @@ pub fn bitcoin_merkle_root(mut hashes: I) -> Option // `hashes` must contain at least one hash. fn merkle_root_r(hashes: &mut [T]) -> T - where T: Hash + Encodable, - ::Engine: io::Write, +where + T: Hash + Encodable, + ::Engine: io::Write, { if hashes.len() == 1 { return hashes[0] diff --git a/src/util/key.rs b/src/util/key.rs index 6fff8a957..fa92e460a 100644 --- a/src/util/key.rs +++ b/src/util/key.rs @@ -31,7 +31,6 @@ use hashes::{Hash, hash160, hex, hex::FromHex}; use hash_types::{PubkeyHash, WPubkeyHash}; use util::base58; - /// A key-related error. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] pub enum Error { @@ -45,7 +44,6 @@ pub enum Error { Hex(hex::Error) } - impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -158,14 +156,10 @@ impl PublicKey { let mut bytes = [0; 65]; reader.read_exact(&mut bytes[0..1])?; - let bytes = if bytes[0] < 4 { - &mut bytes[..33] - } else { - &mut bytes[..65] - }; + let bytes = if bytes[0] < 4 { &mut bytes[..33] } else { &mut bytes[..65] }; reader.read_exact(&mut bytes[1..])?; - Self::from_slice(bytes).map_err(|e|{ + Self::from_slice(bytes).map_err(|e| { // Need a static string for core2 #[cfg(feature = "std")] let reason = e; @@ -189,10 +183,12 @@ impl PublicKey { /// Deserialize a public key from a slice pub fn from_slice(data: &[u8]) -> Result { - let compressed: bool = match data.len() { + let compressed = match data.len() { 33 => true, 65 => false, - len => { return Err(base58::Error::InvalidLength(len).into()); }, + len => { + return Err(base58::Error::InvalidLength(len).into()); + }, }; if !compressed && data[0] != 0x04 { @@ -285,10 +281,7 @@ impl PrivateKey { /// Deserialize a private key from a slice pub fn from_slice(data: &[u8], network: Network) -> Result { - Ok(PrivateKey::new( - secp256k1::SecretKey::from_slice(data)?, - network, - )) + Ok(PrivateKey::new(secp256k1::SecretKey::from_slice(data)?, network)) } /// Format the private key to WIF format. @@ -323,13 +316,17 @@ impl PrivateKey { let compressed = match data.len() { 33 => false, 34 => true, - _ => { return Err(Error::Base58(base58::Error::InvalidLength(data.len()))); } + _ => { + return Err(Error::Base58(base58::Error::InvalidLength(data.len()))); + } }; let network = match data[0] { 128 => Network::Bitcoin, 239 => Network::Testnet, - x => { return Err(Error::Base58(base58::Error::InvalidAddressVersion(x))); } + x => { + return Err(Error::Base58(base58::Error::InvalidAddressVersion(x))); + } }; Ok(PrivateKey { diff --git a/src/util/merkleblock.rs b/src/util/merkleblock.rs index 7931862ce..cc70f5818 100644 --- a/src/util/merkleblock.rs +++ b/src/util/merkleblock.rs @@ -189,9 +189,7 @@ impl PartialMerkleTree { } // there can never be more hashes provided than one for every txid if self.hashes.len() as u32 > self.num_transactions { - return Err(BadFormat( - "Proof contains more hashes than transactions".to_owned(), - )); + return Err(BadFormat("Proof contains more hashes than transactions".to_owned())); }; // there must be at least one bit per node in the partial tree, and at least one node per hash if self.bits.len() < self.hashes.len() { @@ -246,13 +244,7 @@ impl PartialMerkleTree { } /// Recursive function that traverses tree nodes, storing the data as bits and hashes - fn traverse_and_build( - &mut self, - height: u32, - pos: u32, - txids: &[Txid], - matches: &[bool], - ) { + fn traverse_and_build(&mut self, height: u32, pos: u32, txids: &[Txid], matches: &[bool]) { // Determine whether this node is the parent of at least one matched txid let mut parent_of_match = false; let mut p = pos << height; @@ -348,10 +340,7 @@ impl PartialMerkleTree { } impl Encodable for PartialMerkleTree { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let ret = self.num_transactions.consensus_encode(&mut s)? + self.hashes.consensus_encode(&mut s)?; let mut bytes: Vec = vec![0; (self.bits.len() + 7) / 8]; @@ -432,7 +421,9 @@ impl MerkleBlock { /// assert_eq!(txid, matches[0]); /// ``` pub fn from_block_with_predicate(block: &Block, match_txids: F) -> Self - where F: Fn(&Txid) -> bool { + where + F: Fn(&Txid) -> bool + { let block_txids: Vec<_> = block.txdata.iter().map(Transaction::txid).collect(); Self::from_header_txids_with_predicate(&block.header, &block_txids, match_txids) } @@ -440,7 +431,7 @@ impl MerkleBlock { /// Create a MerkleBlock from a block, that contains proofs for specific txids. #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - #[deprecated(since="0.26.2", note="use from_block_with_predicate")] + #[deprecated(since = "0.26.2", note = "use from_block_with_predicate")] pub fn from_block(block: &Block, match_txids: &::std::collections::HashSet) -> Self { Self::from_block_with_predicate(block, |t| match_txids.contains(t)) } @@ -453,7 +444,10 @@ impl MerkleBlock { header: &BlockHeader, block_txids: &[Txid], match_txids: F, - ) -> Self where F: Fn(&Txid) -> bool { + ) -> Self + where + F: Fn(&Txid) -> bool + { let matches: Vec = block_txids .iter() .map(match_txids) @@ -469,7 +463,7 @@ impl MerkleBlock { /// Create a MerkleBlock from the block's header and txids, that should contain proofs for match_txids. #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - #[deprecated(since="0.26.2", note="use from_header_txids_with_predicate")] + #[deprecated(since = "0.26.2", note = "use from_header_txids_with_predicate")] pub fn from_header_txids( header: &BlockHeader, block_txids: &[Txid], @@ -497,10 +491,7 @@ impl MerkleBlock { } impl Encodable for MerkleBlock { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let len = self.header.consensus_encode(&mut s)? + self.txn.consensus_encode(s)?; Ok(len) diff --git a/src/util/misc.rs b/src/util/misc.rs index e7211f1f0..ad1215578 100644 --- a/src/util/misc.rs +++ b/src/util/misc.rs @@ -217,8 +217,12 @@ mod message_signing { /// instance of it, returning the number of instances removed. /// Loops through the vector opcode by opcode, skipping pushed data. pub fn script_find_and_remove(haystack: &mut Vec, needle: &[u8]) -> usize { - if needle.len() > haystack.len() { return 0; } - if needle.is_empty() { return 0; } + if needle.len() > haystack.len() { + return 0; + } + if needle.is_empty() { + return 0; + } let mut top = haystack.len() - needle.len(); let mut n_deleted = 0; @@ -233,7 +237,9 @@ pub fn script_find_and_remove(haystack: &mut Vec, needle: &[u8]) -> usize { // This is ugly but prevents infinite loop in case of overflow let overflow = top < needle.len(); top = top.wrapping_sub(needle.len()); - if overflow { break; } + if overflow { + break; + } } else { i += match opcodes::All::from((*haystack)[i]).classify(opcodes::ClassifyContext::Legacy) { opcodes::Class::PushBytes(n) => n as usize + 1, @@ -349,4 +355,28 @@ mod tests { Err(MessageSignatureError::UnsupportedAddressType(AddressType::P2sh)) ); } + + #[test] + #[cfg(all(feature = "secp-recovery", feature = "base64"))] + fn test_incorrect_message_signature() { + use secp256k1; + use util::key::PublicKey; + + let secp = secp256k1::Secp256k1::new(); + let message = "a different message from what was signed"; + let msg_hash = super::signed_msg_hash(&message); + + // Signature of msg = "rust-bitcoin MessageSignature test" + // Signed with pk "UuOGDsfLPr4HIMKQX0ipjJeRaj1geCq3yPUF2COP5ME=" + let signature_base64 = "IAM2qX24tYx/bdBTIgVLhD8QEAjrPlJpmjB4nZHdRYGIBa4DmVulAcwjPnWe6Q5iEwXH6F0pUCJP/ZeHPWS1h1o="; + let pubkey_base64 = "A1FTfMEntPpAty3qkEo0q2Dc1FEycI10a3jmwEFy+Qr6"; + let signature = super::MessageSignature::from_base64(signature_base64).expect("message signature"); + + let pubkey = PublicKey::from_slice( + &::base64::decode(&pubkey_base64).expect("base64 string") + ).expect("pubkey slice"); + + let p2pkh = ::Address::p2pkh(&pubkey, ::Network::Bitcoin); + assert_eq!(signature.is_signed_by_address(&secp, &p2pkh, msg_hash), Ok(false)); + } } diff --git a/src/util/mod.rs b/src/util/mod.rs index ba02fd706..bace2e45b 100644 --- a/src/util/mod.rs +++ b/src/util/mod.rs @@ -25,7 +25,6 @@ pub mod amount; pub mod base58; pub mod bip32; pub mod bip143; -pub mod contracthash; pub mod hash; pub mod merkleblock; pub mod misc; diff --git a/src/util/psbt/map/input.rs b/src/util/psbt/map/input.rs index 2afb931e2..dd660d67e 100644 --- a/src/util/psbt/map/input.rs +++ b/src/util/psbt/map/input.rs @@ -14,11 +14,13 @@ use prelude::*; use io; +use core::fmt; +use core::str::FromStr; use secp256k1; use blockdata::script::Script; use blockdata::witness::Witness; -use blockdata::transaction::{Transaction, TxOut, NonStandardSigHashType}; +use blockdata::transaction::{Transaction, TxOut, NonStandardSigHashType, SigHashTypeParseError}; use consensus::encode; use hashes::{self, hash160, ripemd160, sha256, sha256d}; use secp256k1::XOnlyPublicKey; @@ -67,11 +69,11 @@ const PSBT_IN_TAP_SCRIPT_SIG: u8 = 0x14; /// Type: Taproot Leaf Script PSBT_IN_TAP_LEAF_SCRIPT = 0x14 const PSBT_IN_TAP_LEAF_SCRIPT: u8 = 0x15; /// Type: Taproot Key BIP 32 Derivation Path PSBT_IN_TAP_BIP32_DERIVATION = 0x16 -const PSBT_IN_TAP_BIP32_DERIVATION : u8 = 0x16; +const PSBT_IN_TAP_BIP32_DERIVATION: u8 = 0x16; /// Type: Taproot Internal Key PSBT_IN_TAP_INTERNAL_KEY = 0x17 -const PSBT_IN_TAP_INTERNAL_KEY : u8 = 0x17; +const PSBT_IN_TAP_INTERNAL_KEY: u8 = 0x17; /// Type: Taproot Merkle Root PSBT_IN_TAP_MERKLE_ROOT = 0x18 -const PSBT_IN_TAP_MERKLE_ROOT : u8 = 0x18; +const PSBT_IN_TAP_MERKLE_ROOT: u8 = 0x18; /// Type: Proprietary Use Type PSBT_IN_PROPRIETARY = 0xFC const PSBT_IN_PROPRIETARY: u8 = 0xFC; @@ -133,9 +135,9 @@ pub struct Input { #[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq"))] pub tap_key_origins: BTreeMap, KeySource)>, /// Taproot Internal key. - pub tap_internal_key : Option, + pub tap_internal_key: Option, /// Taproot Merkle root. - pub tap_merkle_root : Option, + pub tap_merkle_root: Option, /// Proprietary key-value pairs for this input. #[cfg_attr(feature = "serde", serde(with = "::serde_utils::btreemap_as_seq_byte_values"))] pub proprietary: BTreeMap>, @@ -155,15 +157,49 @@ pub struct PsbtSigHashType { pub (in ::util::psbt) inner: u32, } +impl fmt::Display for PsbtSigHashType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.schnorr_hash_ty() { + Ok(SchnorrSigHashType::Reserved) | Err(_) => write!(f, "{:#x}", self.inner), + Ok(schnorr_hash_ty) => fmt::Display::fmt(&schnorr_hash_ty, f), + } + } +} + +impl FromStr for PsbtSigHashType { + type Err = SigHashTypeParseError; + + #[inline] + fn from_str(s: &str) -> Result { + // We accept strings of form: "SIGHASH_ALL" etc. + // + // NB: some of Schnorr sighash types are non-standard for pre-taproot + // inputs. We also do not support SIGHASH_RESERVED in verbatim form + // ("0xFF" string should be used instead). + match SchnorrSigHashType::from_str(s) { + Ok(SchnorrSigHashType::Reserved) => return Err(SigHashTypeParseError{ unrecognized: s.to_owned() }), + Ok(ty) => return Ok(ty.into()), + Err(_) => {} + } + + // We accept non-standard sighash values. + // TODO: Swap `trim_left_matches` for `trim_start_matches` once MSRV >= 1.30. + if let Ok(inner) = u32::from_str_radix(s.trim_left_matches("0x"), 16) { + return Ok(PsbtSigHashType { inner }); + } + + Err(SigHashTypeParseError{ unrecognized: s.to_owned() }) + } +} impl From for PsbtSigHashType { fn from(ecdsa_hash_ty: EcdsaSigHashType) -> Self { - PsbtSigHashType {inner: ecdsa_hash_ty as u32} + PsbtSigHashType { inner: ecdsa_hash_ty as u32 } } } impl From for PsbtSigHashType { fn from(schnorr_hash_ty: SchnorrSigHashType) -> Self { - PsbtSigHashType {inner: schnorr_hash_ty as u32} + PsbtSigHashType { inner: schnorr_hash_ty as u32 } } } @@ -171,7 +207,7 @@ impl PsbtSigHashType { /// Returns the [`EcdsaSigHashType`] if the [`PsbtSigHashType`] can be /// converted to one. pub fn ecdsa_hash_ty(self) -> Result { - EcdsaSigHashType::from_u32_standard(self.inner) + EcdsaSigHashType::from_standard(self.inner) } /// Returns the [`SchnorrSigHashType`] if the [`PsbtSigHashType`] can be @@ -184,29 +220,42 @@ impl PsbtSigHashType { } } - /// Obtains the inner sighash byte from this [`PsbtSigHashType`]. - pub fn inner(self) -> u32 { + /// Creates a [`PsbtSigHashType`] from a raw `u32`. + /// + /// Allows construction of a non-standard or non-valid sighash flag + /// ([`EcdsaSigHashType`], [`SchnorrSigHashType`] respectively). + pub fn from_u32(n: u32) -> PsbtSigHashType { + PsbtSigHashType { inner: n } + } + + + /// Converts [`PsbtSigHashType`] to a raw `u32` sighash flag. + /// + /// No guarantees are made as to the standardness or validity of the returned value. + pub fn to_u32(self) -> u32 { self.inner } } impl Input { - /// Obtains the [`EcdsaSigHashType`] for this input if one is specified. - /// If no sighash type is specified, returns ['EcdsaSigHashType::All'] + /// Obtains the [`EcdsaSigHashType`] for this input if one is specified. If no sighash type is + /// specified, returns [`EcdsaSigHashType::All`]. /// - /// Errors: - /// If the sighash type is not a standard ecdsa sighash type + /// # Errors + /// + /// If the `sighash_type` field is set to a non-standard ECDSA sighash value. pub fn ecdsa_hash_ty(&self) -> Result { self.sighash_type .map(|sighash_type| sighash_type.ecdsa_hash_ty()) .unwrap_or(Ok(EcdsaSigHashType::All)) } - /// Obtains the [`SchnorrSigHashType`] for this input if one is specified. - /// If no sighash type is specified, returns ['SchnorrSigHashType::Default'] + /// Obtains the [`SchnorrSigHashType`] for this input if one is specified. If no sighash type is + /// specified, returns [`SchnorrSigHashType::Default`]. + /// + /// # Errors /// - /// Errors: - /// If the sighash type is an invalid schnorr sighash type + /// If the `sighash_type` field is set to a invalid Schnorr sighash value. pub fn schnorr_hash_ty(&self) -> Result { self.sighash_type .map(|sighash_type| sighash_type.schnorr_hash_ty()) @@ -287,7 +336,7 @@ impl Input { self.tap_script_sigs <= | } } - PSBT_IN_TAP_LEAF_SCRIPT=> { + PSBT_IN_TAP_LEAF_SCRIPT => { impl_psbt_insert_pair! { self.tap_scripts <= |< raw_value: (Script, LeafVersion)> } @@ -487,3 +536,71 @@ where btree_map::Entry::Occupied(_) => Err(psbt::Error::DuplicateKey(raw_key).into()), } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn psbt_sighash_type_ecdsa() { + for ecdsa in &[ + EcdsaSigHashType::All, + EcdsaSigHashType::None, + EcdsaSigHashType::Single, + EcdsaSigHashType::AllPlusAnyoneCanPay, + EcdsaSigHashType::NonePlusAnyoneCanPay, + EcdsaSigHashType::SinglePlusAnyoneCanPay, + ] { + let sighash = PsbtSigHashType::from(*ecdsa); + let s = format!("{}", sighash); + let back = PsbtSigHashType::from_str(&s).unwrap(); + assert_eq!(back, sighash); + assert_eq!(back.ecdsa_hash_ty().unwrap(), *ecdsa); + } + } + + #[test] + fn psbt_sighash_type_schnorr() { + for schnorr in &[ + SchnorrSigHashType::Default, + SchnorrSigHashType::All, + SchnorrSigHashType::None, + SchnorrSigHashType::Single, + SchnorrSigHashType::AllPlusAnyoneCanPay, + SchnorrSigHashType::NonePlusAnyoneCanPay, + SchnorrSigHashType::SinglePlusAnyoneCanPay, + ] { + let sighash = PsbtSigHashType::from(*schnorr); + let s = format!("{}", sighash); + let back = PsbtSigHashType::from_str(&s).unwrap(); + assert_eq!(back, sighash); + assert_eq!(back.schnorr_hash_ty().unwrap(), *schnorr); + } + } + + #[test] + fn psbt_sighash_type_schnorr_notstd() { + for (schnorr, schnorr_str) in &[ + (SchnorrSigHashType::Reserved, "0xff"), + ] { + let sighash = PsbtSigHashType::from(*schnorr); + let s = format!("{}", sighash); + assert_eq!(&s, schnorr_str); + let back = PsbtSigHashType::from_str(&s).unwrap(); + assert_eq!(back, sighash); + assert_eq!(back.schnorr_hash_ty().unwrap(), *schnorr); + } + } + + #[test] + fn psbt_sighash_type_notstd() { + let nonstd = 0xdddddddd; + let sighash = PsbtSigHashType { inner: nonstd }; + let s = format!("{}", sighash); + let back = PsbtSigHashType::from_str(&s).unwrap(); + + assert_eq!(back, sighash); + assert_eq!(back.ecdsa_hash_ty(), Err(NonStandardSigHashType(nonstd))); + assert_eq!(back.schnorr_hash_ty(), Err(sighash::Error::InvalidSigHashType(nonstd))); + } +} diff --git a/src/util/psbt/map/mod.rs b/src/util/psbt/map/mod.rs index 44bffebad..d5b05d794 100644 --- a/src/util/psbt/map/mod.rs +++ b/src/util/psbt/map/mod.rs @@ -32,16 +32,10 @@ pub(super) trait Map { fn get_pairs(&self) -> Result, io::Error>; /// Encodes map data with bitcoin consensus encoding. - fn consensus_encode_map( - &self, - mut s: S, - ) -> Result { + fn consensus_encode_map(&self, mut s: S) -> Result { let mut len = 0; for pair in Map::get_pairs(self)? { - len += encode::Encodable::consensus_encode( - &pair, - &mut s, - )?; + len += encode::Encodable::consensus_encode(&pair, &mut s)?; } Ok(len + encode::Encodable::consensus_encode(&0x00_u8, s)?) diff --git a/src/util/psbt/map/output.rs b/src/util/psbt/map/output.rs index e03049c40..a77a5729e 100644 --- a/src/util/psbt/map/output.rs +++ b/src/util/psbt/map/output.rs @@ -13,6 +13,7 @@ // use prelude::*; +use core; use io; @@ -25,7 +26,7 @@ use util::psbt::map::Map; use util::psbt::raw; use util::psbt::Error; -use util::taproot::TapLeafHash; +use util::taproot::{LeafInfo, TapLeafHash}; use util::taproot::{NodeInfo, TaprootBuilder}; @@ -117,6 +118,45 @@ impl TapTree { pub fn into_inner(self) -> TaprootBuilder { self.0 } + + /// Returns iterator for a taproot script tree, operating in DFS order over leaf depth and + /// leaf script pairs. + pub fn iter(&self) -> TapTreeIter { + self.into_iter() + } +} + +/// Iterator for a taproot script tree, operating in DFS order over leaf depth and +/// leaf script pairs. +pub struct TapTreeIter<'tree> { + leaf_iter: core::slice::Iter<'tree, LeafInfo>, +} + +impl<'tree> Iterator for TapTreeIter<'tree> { + type Item = (u8, &'tree Script); + + fn next(&mut self) -> Option { + self.leaf_iter.next().map(|leaf_info| { + (leaf_info.merkle_branch.as_inner().len() as u8, &leaf_info.script) + }) + } +} + +impl<'tree> IntoIterator for &'tree TapTree { + type Item = (u8, &'tree Script); + type IntoIter = TapTreeIter<'tree>; + + fn into_iter(self) -> Self::IntoIter { + match (self.0.branch().len(), self.0.branch().last()) { + (1, Some(Some(root))) => { + TapTreeIter { + leaf_iter: root.leaves.iter() + } + } + // This should be unreachable as we Taptree is already finalized + _ => unreachable!("non-finalized tree builder inside TapTree"), + } + } } impl Output { diff --git a/src/util/psbt/mod.rs b/src/util/psbt/mod.rs index 5e427d96b..ef3acdc8d 100644 --- a/src/util/psbt/mod.rs +++ b/src/util/psbt/mod.rs @@ -214,7 +214,7 @@ mod display_from_str { #[cfg(feature = "std")] #[cfg_attr(docsrs, doc(cfg(feature = "std")))] - impl ::std::error::Error for PsbtParseError { } + impl ::std::error::Error for PsbtParseError {} #[cfg_attr(docsrs, doc(cfg(feature = "base64")))] impl Display for PartiallySignedTransaction { @@ -238,10 +238,7 @@ mod display_from_str { pub use self::display_from_str::PsbtParseError; impl Encodable for PartiallySignedTransaction { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += b"psbt".consensus_encode(&mut s)?; @@ -345,10 +342,7 @@ mod tests { inputs: vec![], outputs: vec![], }; - assert_eq!( - serialize_hex(&psbt), - "70736274ff01000a0200000000000000000000" - ); + assert_eq!(serialize_hex(&psbt), "70736274ff01000a0200000000000000000000"); } #[test] @@ -390,12 +384,8 @@ mod tests { hd_keypaths.insert(pk.public_key, (fprint, dpath.into())); let expected: Output = Output { - redeem_script: Some(hex_script!( - "76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac" - )), - witness_script: Some(hex_script!( - "a9143545e6e33b832c47050f24d3eeb93c9c03948bc787" - )), + redeem_script: Some(hex_script!("76a914d0c59903c5bac2868760e90fd521a4665aa7652088ac")), + witness_script: Some(hex_script!("a9143545e6e33b832c47050f24d3eeb93c9c03948bc787")), bip32_derivation: hd_keypaths, ..Default::default() }; @@ -441,13 +431,8 @@ mod tests { version: 0, proprietary: Default::default(), unknown: Default::default(), - inputs: vec![ - Input::default(), - ], - outputs: vec![ - Output::default(), - Output::default() - ] + inputs: vec![Input::default()], + outputs: vec![Output::default(), Output::default()], }; let actual: PartiallySignedTransaction = deserialize(&serialize(&expected)).unwrap(); @@ -804,8 +789,7 @@ mod tests { let psbt_non_witness_utxo = (&psbt.inputs[0].non_witness_utxo).as_ref().unwrap(); assert_eq!(tx_input.previous_output.txid, psbt_non_witness_utxo.txid()); - assert!( - psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize] + assert!(psbt_non_witness_utxo.output[tx_input.previous_output.vout as usize] .script_pubkey .is_p2pkh() ); @@ -871,9 +855,7 @@ mod tests { let tx = &psbt.unsigned_tx; assert_eq!( tx.txid(), - Txid::from_hex( - "75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115" - ).unwrap() + Txid::from_hex("75c5c9665a570569ad77dd1279e6fd4628a093c4dcbf8d41532614044c14c115").unwrap(), ); let mut unknown: BTreeMap> = BTreeMap::new(); @@ -893,7 +875,6 @@ mod tests { use super::*; use super::serialize; - #[test] fn invalid_vectors() { let err = hex_psbt!("70736274ff010071020000000127744ababf3027fe0d6cf23a96eee2efb188ef52301954585883e69b6624b2420000000000ffffffff02787c01000000000016001483a7e34bd99ff03a4962ef8a1a101bb295461ece606b042a010000001600147ac369df1b20e033d6116623957b0ac49f3c52e8000000000001012b00f2052a010000002251205a2c2cf5b52cf31f83ad2e8da63ff03183ecd8f609c7510ae8a48e03910a075701172102fe349064c98d6e2a853fa3c9b12bd8b304a19c195c60efa7ee2393046d3fa232000000").unwrap_err(); @@ -971,7 +952,7 @@ mod tests { } #[test] - fn serialize_and_deserialize_preimage_psbt(){ + fn serialize_and_deserialize_preimage_psbt() { // create a sha preimage map let mut sha256_preimages = BTreeMap::new(); sha256_preimages.insert(sha256::Hash::hash(&[1u8, 2u8]), vec![1u8, 2u8]); @@ -1071,7 +1052,7 @@ mod tests { unserialized.inputs[0].hash160_preimages = hash160_preimages; unserialized.inputs[0].sha256_preimages = sha256_preimages; - let rtt : PartiallySignedTransaction = hex_psbt!(&serialize_hex(&unserialized)).unwrap(); + let rtt: PartiallySignedTransaction = hex_psbt!(&serialize_hex(&unserialized)).unwrap(); assert_eq!(rtt, unserialized); // Now add an ripemd160 with incorrect preimage @@ -1080,7 +1061,7 @@ mod tests { unserialized.inputs[0].ripemd160_preimages = ripemd160_preimages; // Now the roundtrip should fail as the preimage is incorrect. - let rtt : Result = hex_psbt!(&serialize_hex(&unserialized)); + let rtt: Result = hex_psbt!(&serialize_hex(&unserialized)); assert!(rtt.is_err()); } @@ -1093,7 +1074,7 @@ mod tests { key: b"test".to_vec(), }, b"test".to_vec()); assert!(!psbt.proprietary.is_empty()); - let rtt : PartiallySignedTransaction = hex_psbt!(&serialize_hex(&psbt)).unwrap(); + let rtt: PartiallySignedTransaction = hex_psbt!(&serialize_hex(&psbt)).unwrap(); assert!(!rtt.proprietary.is_empty()); } diff --git a/src/util/psbt/raw.rs b/src/util/psbt/raw.rs index bc11a9760..7e028570a 100644 --- a/src/util/psbt/raw.rs +++ b/src/util/psbt/raw.rs @@ -56,7 +56,7 @@ pub type ProprietaryType = u8; /// structure according to BIP 174. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct ProprietaryKey where Subtype: Copy + From + Into { +pub struct ProprietaryKey where Subtype: Copy + From + Into { /// Proprietary type prefix used for grouping together keys under some /// application and avoid namespace collision #[cfg_attr(feature = "serde", serde(with = "::serde_utils::hex_bytes"))] @@ -99,18 +99,12 @@ impl Decodable for Key { key.push(Decodable::consensus_decode(&mut d)?); } - Ok(Key { - type_value, - key, - }) + Ok(Key { type_value, key }) } } impl Encodable for Key { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let mut len = 0; len += VarInt((self.key.len() + 1) as u64).consensus_encode(&mut s)?; @@ -125,10 +119,7 @@ impl Encodable for Key { } impl Encodable for Pair { - fn consensus_encode( - &self, - mut s: S, - ) -> Result { + fn consensus_encode(&self, mut s: S) -> Result { let len = self.key.consensus_encode(&mut s)?; Ok(len + self.value.consensus_encode(s)?) } @@ -159,11 +150,7 @@ impl Decodable for ProprietaryKey where Subtype: Copy + From { impl Serialize for PsbtSigHashType { fn serialize(&self) -> Vec { - serialize(&self.inner()) + serialize(&self.to_u32()) } } @@ -367,3 +367,15 @@ impl Deserialize for TapTree { fn key_source_len(key_source: &KeySource) -> usize { 4 + 4 * (key_source.1).as_ref().len() } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn can_deserialize_non_standard_psbt_sig_hash_type() { + let non_standard_sighash = [222u8, 0u8, 0u8, 0u8]; // 32 byte value. + let sighash = PsbtSigHashType::deserialize(&non_standard_sighash); + assert!(sighash.is_ok()) + } +} diff --git a/src/util/schnorr.rs b/src/util/schnorr.rs index 3ecce4a2c..8542915f7 100644 --- a/src/util/schnorr.rs +++ b/src/util/schnorr.rs @@ -40,6 +40,8 @@ pub type UntweakedPublicKey = ::XOnlyPublicKey; /// Tweaked BIP-340 X-coord-only public key #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", serde(transparent))] pub struct TweakedPublicKey(::XOnlyPublicKey); impl fmt::LowerHex for TweakedPublicKey { @@ -58,9 +60,9 @@ impl fmt::Display for TweakedPublicKey { pub type UntweakedKeyPair = ::KeyPair; /// Tweaked BIP-340 key pair -#[derive(Clone)] -#[cfg_attr(feature = "std", derive(Debug))] -// TODO: Add other derives once secp256k1 v0.21.3 released +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", serde(transparent))] pub struct TweakedKeyPair(::KeyPair); /// A trait for tweaking BIP340 key types (x-only public keys and key pairs). @@ -174,7 +176,7 @@ impl TweakedPublicKey { /// the y-coordinate is represented by only a single bit, as x determines /// it up to one bit. #[inline] - pub fn serialize(&self) -> [u8; constants::SCHNORRSIG_PUBLIC_KEY_SIZE] { + pub fn serialize(&self) -> [u8; constants::SCHNORR_PUBLIC_KEY_SIZE] { self.0.serialize() } } diff --git a/src/util/sighash.rs b/src/util/sighash.rs index ad248fd7b..b4482c894 100644 --- a/src/util/sighash.rs +++ b/src/util/sighash.rs @@ -20,10 +20,12 @@ //! and legacy (before Bip143). //! -pub use blockdata::transaction::EcdsaSigHashType; +use prelude::*; + +pub use blockdata::transaction::{EcdsaSigHashType, SigHashTypeParseError}; use blockdata::witness::Witness; use consensus::{encode, Encodable}; -use core::fmt; +use core::{str, fmt}; use core::ops::{Deref, DerefMut}; use core::borrow::Borrow; use hashes::{sha256, sha256d, Hash}; @@ -36,7 +38,7 @@ use super::taproot::LeafVersion; /// Efficiently calculates signature hash message for legacy, segwit and taproot inputs. #[derive(Debug)] -pub struct SigHashCache> { +pub struct SigHashCache> { /// Access to transaction required for various introspection, moreover type /// `T: Deref` allows to accept borrow and mutable borrow, the /// latter in particular is necessary for [`SigHashCache::witness_mut`] @@ -105,7 +107,6 @@ pub struct ScriptPath<'s> { /// Hashtype of an input's signature, encoded in the last byte of the signature /// Fixed values so they can be casted as integer types for encoding #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum SchnorrSigHashType { /// 0x0: Used when not explicitly specified, defaulting to [`SchnorrSigHashType::All`] Default = 0x00, @@ -128,6 +129,41 @@ pub enum SchnorrSigHashType { /// Reserved for future use, `#[non_exhaustive]` is not available with current MSRV Reserved = 0xFF, } +serde_string_impl!(SchnorrSigHashType, "a SchnorrSigHashType data"); + +impl fmt::Display for SchnorrSigHashType { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let s = match self { + SchnorrSigHashType::Default => "SIGHASH_DEFAULT", + SchnorrSigHashType::All => "SIGHASH_ALL", + SchnorrSigHashType::None => "SIGHASH_NONE", + SchnorrSigHashType::Single => "SIGHASH_SINGLE", + SchnorrSigHashType::AllPlusAnyoneCanPay => "SIGHASH_ALL|SIGHASH_ANYONECANPAY", + SchnorrSigHashType::NonePlusAnyoneCanPay => "SIGHASH_NONE|SIGHASH_ANYONECANPAY", + SchnorrSigHashType::SinglePlusAnyoneCanPay => "SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", + SchnorrSigHashType::Reserved => "SIGHASH_RESERVED", + }; + f.write_str(s) + } +} + +impl str::FromStr for SchnorrSigHashType { + type Err = SigHashTypeParseError; + + fn from_str(s: &str) -> Result { + match s { + "SIGHASH_DEFAULT" => Ok(SchnorrSigHashType::Default), + "SIGHASH_ALL" => Ok(SchnorrSigHashType::All), + "SIGHASH_NONE" => Ok(SchnorrSigHashType::None), + "SIGHASH_SINGLE" => Ok(SchnorrSigHashType::Single), + "SIGHASH_ALL|SIGHASH_ANYONECANPAY" => Ok(SchnorrSigHashType::AllPlusAnyoneCanPay), + "SIGHASH_NONE|SIGHASH_ANYONECANPAY" => Ok(SchnorrSigHashType::NonePlusAnyoneCanPay), + "SIGHASH_SINGLE|SIGHASH_ANYONECANPAY" => Ok(SchnorrSigHashType::SinglePlusAnyoneCanPay), + "SIGHASH_RESERVED" => Ok(SchnorrSigHashType::Reserved), + _ => Err(SigHashTypeParseError{ unrecognized: s.to_owned() }), + } + } +} /// Possible errors in computing the signature message #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] @@ -290,12 +326,13 @@ impl SchnorrSigHashType { 0x81 => Ok(SchnorrSigHashType::AllPlusAnyoneCanPay), 0x82 => Ok(SchnorrSigHashType::NonePlusAnyoneCanPay), 0x83 => Ok(SchnorrSigHashType::SinglePlusAnyoneCanPay), + 0xFF => Ok(SchnorrSigHashType::Reserved), x => Err(Error::InvalidSigHashType(x as u32)), } } } -impl> SigHashCache { +impl> SigHashCache { /// Compute the sighash components from an unsigned transaction and auxiliary /// in a lazy manner when required. /// For the generated sighashes to be valid, no fields in the transaction may change except for @@ -564,7 +601,7 @@ impl> SigHashCache { } self.tx.lock_time.consensus_encode(&mut writer)?; - sighash_type.as_u32().consensus_encode(&mut writer)?; + sighash_type.to_u32().consensus_encode(&mut writer)?; Ok(()) } @@ -692,7 +729,7 @@ impl> SigHashCache { } } -impl> SigHashCache { +impl> SigHashCache { /// When the SigHashCache is initialized with a mutable reference to a transaction instead of a /// regular reference, this method is available to allow modification to the witnesses. /// @@ -1107,4 +1144,39 @@ mod tests { let json_str = include_str!("../../test_data/bip341_tests.json"); serde_json::from_str(json_str).expect("JSON was not well-formatted") } + + #[test] + fn sighashtype_fromstr_display() { + let sighashtypes = vec![ + ("SIGHASH_DEFAULT", SchnorrSigHashType::Default), + ("SIGHASH_ALL", SchnorrSigHashType::All), + ("SIGHASH_NONE", SchnorrSigHashType::None), + ("SIGHASH_SINGLE", SchnorrSigHashType::Single), + ("SIGHASH_ALL|SIGHASH_ANYONECANPAY", SchnorrSigHashType::AllPlusAnyoneCanPay), + ("SIGHASH_NONE|SIGHASH_ANYONECANPAY", SchnorrSigHashType::NonePlusAnyoneCanPay), + ("SIGHASH_SINGLE|SIGHASH_ANYONECANPAY", SchnorrSigHashType::SinglePlusAnyoneCanPay), + ("SIGHASH_RESERVED", SchnorrSigHashType::Reserved), + ]; + for (s, sht) in sighashtypes { + assert_eq!(sht.to_string(), s); + assert_eq!(SchnorrSigHashType::from_str(s).unwrap(), sht); + } + let sht_mistakes = vec![ + "SIGHASH_ALL | SIGHASH_ANYONECANPAY", + "SIGHASH_NONE |SIGHASH_ANYONECANPAY", + "SIGHASH_SINGLE| SIGHASH_ANYONECANPAY", + "SIGHASH_ALL SIGHASH_ANYONECANPAY", + "SIGHASH_NONE |", + "SIGHASH_SIGNLE", + "DEFAULT", + "ALL", + "sighash_none", + "Sighash_none", + "SigHash_None", + "SigHash_NONE", + ]; + for s in sht_mistakes { + assert_eq!(SchnorrSigHashType::from_str(s).unwrap_err().to_string(), format!("Unrecognized SIGHASH string '{}'", s)); + } + } } diff --git a/src/util/taproot.rs b/src/util/taproot.rs index f020b1175..69d381b70 100644 --- a/src/util/taproot.rs +++ b/src/util/taproot.rs @@ -53,7 +53,7 @@ const MIDSTATE_TAPTWEAK: [u8; 32] = [ ]; // d129a2f3701c655d6583b6c3b941972795f4e23294fd54f4a2ae8d8547ca590b -/// The SHA-256 midstate value for the TapSigHash hash. +/// The SHA-256 midstate value for the [`TapSighashHash`]. const MIDSTATE_TAPSIGHASH: [u8; 32] = [ 245, 4, 164, 37, 215, 248, 120, 59, 19, 99, 134, 138, 227, 229, 86, 88, 110, 238, 148, 93, 188, 120, 136, 221, 2, 166, 226, 195, 24, 115, 254, 159, @@ -99,8 +99,8 @@ sha256t_hash_newtype!(TapSighashHash, TapSighashTag, MIDSTATE_TAPSIGHASH, 64, ); impl TapTweakHash { - /// Create a new BIP341 [`TapTweakHash`] from key and tweak - /// Produces H_taptweak(P||R) where P is internal key and R is the merkle root + /// Creates a new BIP341 [`TapTweakHash`] from key and tweak. Produces `H_taptweak(P||R)` where + /// `P` is the internal key and `R` is the merkle root. pub fn from_key_and_tweak( internal_key: UntweakedPublicKey, merkle_root: Option, @@ -118,7 +118,7 @@ impl TapTweakHash { } impl TapLeafHash { - /// function to compute leaf hash from components + /// Computes the leaf hash from components. pub fn from_script(script: &Script, ver: LeafVersion) -> TapLeafHash { let mut eng = TapLeafHash::engine(); ver.to_consensus() @@ -131,24 +131,24 @@ impl TapLeafHash { } } -/// Maximum depth of a Taproot Tree Script spend path +/// Maximum depth of a taproot tree script spend path. // https://github.com/bitcoin/bitcoin/blob/e826b22da252e0599c61d21c98ff89f366b3120f/src/script/interpreter.h#L229 pub const TAPROOT_CONTROL_MAX_NODE_COUNT: usize = 128; -/// Size of a taproot control node +/// Size of a taproot control node. // https://github.com/bitcoin/bitcoin/blob/e826b22da252e0599c61d21c98ff89f366b3120f/src/script/interpreter.h#L228 pub const TAPROOT_CONTROL_NODE_SIZE: usize = 32; -/// Tapleaf mask for getting the leaf version from first byte of control block +/// Tapleaf mask for getting the leaf version from first byte of control block. // https://github.com/bitcoin/bitcoin/blob/e826b22da252e0599c61d21c98ff89f366b3120f/src/script/interpreter.h#L225 pub const TAPROOT_LEAF_MASK: u8 = 0xfe; -/// Tapscript leaf version +/// Tapscript leaf version. // https://github.com/bitcoin/bitcoin/blob/e826b22da252e0599c61d21c98ff89f366b3120f/src/script/interpreter.h#L226 pub const TAPROOT_LEAF_TAPSCRIPT: u8 = 0xc0; -/// Taproot annex prefix +/// Taproot annex prefix. pub const TAPROOT_ANNEX_PREFIX: u8 = 0x50; -/// Tapscript control base size +/// Tapscript control base size. // https://github.com/bitcoin/bitcoin/blob/e826b22da252e0599c61d21c98ff89f366b3120f/src/script/interpreter.h#L227 pub const TAPROOT_CONTROL_BASE_SIZE: usize = 33; -/// Tapscript control max size +/// Tapscript control max size. // https://github.com/bitcoin/bitcoin/blob/e826b22da252e0599c61d21c98ff89f366b3120f/src/script/interpreter.h#L230 pub const TAPROOT_CONTROL_MAX_SIZE: usize = TAPROOT_CONTROL_BASE_SIZE + TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT; @@ -156,105 +156,71 @@ pub const TAPROOT_CONTROL_MAX_SIZE: usize = // type alias for versioned tap script corresponding merkle proof type ScriptMerkleProofMap = BTreeMap<(Script, LeafVersion), BTreeSet>; -/// Data structure for representing Taproot spending information. -/// Taproot output corresponds to a combination of a -/// single public key condition (known the internal key), and zero or more -/// general conditions encoded in scripts organized in the form of a binary tree. +/// Represents taproot spending information. /// -/// Taproot can be spent be either: -/// - Spending using the key path i.e., with secret key corresponding to the output_key -/// - By satisfying any of the scripts in the script spent path. Each script can be satisfied by providing -/// a witness stack consisting of the script's inputs, plus the script itself and the control block. +/// Taproot output corresponds to a combination of a single public key condition (known as the +/// internal key), and zero or more general conditions encoded in scripts organized in the form of a +/// binary tree. /// -/// If one or more of the spending conditions consist of just a single key (after aggregation), -/// the most likely one should be made the internal key. -/// See [BIP341](https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki) for more details -/// on choosing internal keys for a taproot application +/// Taproot can be spent by either: +/// - Spending using the key path i.e., with secret key corresponding to the tweaked `output_key`. +/// - By satisfying any of the scripts in the script spend path. Each script can be satisfied by +/// providing a witness stack consisting of the script's inputs, plus the script itself and the +/// control block. /// -/// Note: This library currently does not support [annex](https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki#cite_note-5) +/// If one or more of the spending conditions consist of just a single key (after aggregation), the +/// most likely key should be made the internal key. +/// See [BIP341](https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki) for more details on +/// choosing internal keys for a taproot application. +/// +/// Note: This library currently does not support +/// [annex](https://github.com/bitcoin/bips/blob/master/bip-0341.mediawiki#cite_note-5). #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct TaprootSpendInfo { /// The BIP341 internal key. internal_key: UntweakedPublicKey, - /// The Merkle root of the script tree (None if there are no scripts) + /// The merkle root of the script tree (None if there are no scripts). merkle_root: Option, - /// The sign final output pubkey as per BIP 341 + /// The sign final output pubkey as per BIP 341. output_key_parity: secp256k1::Parity, - /// The tweaked output key + /// The tweaked output key. output_key: TweakedPublicKey, - /// Map from (script, leaf_version) to (sets of) [`TaprootMerkleBranch`]. - /// More than one control block for a given script is only possible if it - /// appears in multiple branches of the tree. In all cases, keeping one should - /// be enough for spending funds, but we keep all of the paths so that - /// a full tree can be constructed again from spending data if required. + /// Map from (script, leaf_version) to (sets of) [`TaprootMerkleBranch`]. More than one control + /// block for a given script is only possible if it appears in multiple branches of the tree. In + /// all cases, keeping one should be enough for spending funds, but we keep all of the paths so + /// that a full tree can be constructed again from spending data if required. script_map: ScriptMerkleProofMap, } impl TaprootSpendInfo { - /// Create a new [`TaprootSpendInfo`] from a list of script(with default script version) and - /// weights of satisfaction for that script. The weights represent the probability of - /// each branch being taken. If probabilities/weights for each condition are known, - /// constructing the tree as a Huffman tree is the optimal way to minimize average - /// case satisfaction cost. This function takes input an iterator of tuple(u64, &Script) - /// where usize represents the satisfaction weights of the branch. - /// For example, [(3, S1), (2, S2), (5, S3)] would construct a TapTree that has optimal - /// satisfaction weight when probability for S1 is 30%, S2 is 20% and S3 is 50%. - /// - /// # Errors: - /// - /// - When the optimal huffman tree has a depth more than 128 - /// - If the provided list of script weights is empty + /// Creates a new [`TaprootSpendInfo`] from a list of scripts (with default script version) and + /// weights of satisfaction for that script. /// - /// # Edge Cases: - /// - If the script weight calculations overflow, a sub-optimal tree may be generated. This - /// should not happen unless you are dealing with billions of branches with weights close to - /// 2^32. + /// See [`TaprootBuilder::with_huffman_tree`] for more detailed documentation. pub fn with_huffman_tree( secp: &Secp256k1, internal_key: UntweakedPublicKey, script_weights: I, ) -> Result where - I: IntoIterator, + I: IntoIterator, C: secp256k1::Verification, { - let mut node_weights = BinaryHeap::<(Reverse, NodeInfo)>::new(); - for (p, leaf) in script_weights { - node_weights.push((Reverse(p as u64), NodeInfo::new_leaf_with_ver(leaf, LeafVersion::TapScript))); - } - if node_weights.is_empty() { - return Err(TaprootBuilderError::IncompleteTree); - } - while node_weights.len() > 1 { - // Combine the last two elements and insert a new node - let (p1, s1) = node_weights.pop().expect("len must be at least two"); - let (p2, s2) = node_weights.pop().expect("len must be at least two"); - // Insert the sum of first two in the tree as a new node - // N.B.: p1 + p2 can not practically saturate as you would need to have 2**32 max u32s - // from the input to overflow. However, saturating is a reasonable behavior here as - // huffman tree construction would treat all such elements as "very likely". - let p = Reverse(p1.0.saturating_add(p2.0)); - node_weights.push((p, NodeInfo::combine(s1, s2)?)); - } - // Every iteration of the loop reduces the node_weights.len() by exactly 1 - // Therefore, the loop will eventually terminate with exactly 1 element - debug_assert!(node_weights.len() == 1); - let node = node_weights.pop().expect("huffman tree algorithm is broken").1; - return Ok(Self::from_node_info(secp, internal_key, node)); + TaprootBuilder::with_huffman_tree(script_weights)?.finalize(secp, internal_key) } - /// Create a new key spend with internal key and proided merkle root. - /// Provide [`None`] for merkle_root if there is no script path. + /// Creates a new key spend with `internal_key` and `merkle_root`. Provide [`None`] for + /// `merkle_root` if there is no script path. /// /// *Note*: As per BIP341 /// - /// When the merkle root is [`None`], the output key commits to an unspendable - /// script path instead of having no script path. This is achieved by computing - /// the output key point as Q = P + int(hashTapTweak(bytes(P)))G. - /// See also [`TaprootSpendInfo::tap_tweak`]. - /// Refer to BIP 341 footnote (Why should the output key always have - /// a taproot commitment, even if there is no script path?) for more details + /// When the merkle root is [`None`], the output key commits to an unspendable script path + /// instead of having no script path. This is achieved by computing the output key point as + /// `Q = P + int(hashTapTweak(bytes(P)))G`. See also [`TaprootSpendInfo::tap_tweak`]. /// + /// Refer to BIP 341 footnote ('Why should the output key always have a taproot commitment, even + /// if there is no script path?') for more details. pub fn new_key_spend( secp: &Secp256k1, internal_key: UntweakedPublicKey, @@ -270,34 +236,37 @@ impl TaprootSpendInfo { } } - /// Obtain the tweak and parity used to compute the output_key + /// Returns the `TapTweakHash` for this [`TaprootSpendInfo`] i.e., the tweak using `internal_key` + /// and `merkle_root`. pub fn tap_tweak(&self) -> TapTweakHash { TapTweakHash::from_key_and_tweak(self.internal_key, self.merkle_root) } - /// Obtain the internal key + /// Returns the internal key for this [`TaprootSpendInfo`]. pub fn internal_key(&self) -> UntweakedPublicKey { self.internal_key } - /// Obtain the merkle root + /// Returns the merkle root for this [`TaprootSpendInfo`]. pub fn merkle_root(&self) -> Option { self.merkle_root } - /// Output key(the key used in script pubkey) from Spend data. See also - /// [`TaprootSpendInfo::output_key_parity`] + /// Returns the output key (the key used in script pubkey) for this [`TaprootSpendInfo`]. pub fn output_key(&self) -> TweakedPublicKey { self.output_key } - /// Parity of the output key. See also [`TaprootSpendInfo::output_key`] + /// Returns the parity of the output key. See also [`TaprootSpendInfo::output_key`]. pub fn output_key_parity(&self) -> secp256k1::Parity { self.output_key_parity } - // Internal function to compute [`TaprootSpendInfo`] from NodeInfo - fn from_node_info( + /// Computes the [`TaprootSpendInfo`] from `internal_key` and `node`. + /// + /// This is useful when you want to manually build a taproot tree without using + /// [`TaprootBuilder`]. + pub fn from_node_info( secp: &Secp256k1, internal_key: UntweakedPublicKey, node: NodeInfo, @@ -322,14 +291,17 @@ impl TaprootSpendInfo { info } - /// Access the internal script map + /// Returns the internal script map. pub fn as_script_map(&self) -> &ScriptMerkleProofMap { &self.script_map } - /// Obtain a [`ControlBlock`] for particular script with the given version. - /// Returns [`None`] if the script is not contained in the [`TaprootSpendInfo`] - /// If there are multiple ControlBlocks possible, this returns the shortest one. + /// Constructs a [`ControlBlock`] for particular script with the given version. + /// + /// # Returns + /// + /// - If there are multiple control blocks possible, returns the shortest one. + /// - If the script is not contained in the [`TaprootSpendInfo`], returns `None`. pub fn control_block(&self, script_ver: &(Script, LeafVersion)) -> Option { let merkle_branch_set = self.script_map.get(script_ver)?; // Choose the smallest one amongst the multiple script maps @@ -346,22 +318,23 @@ impl TaprootSpendInfo { } } -/// Builder for building taproot iteratively. Users can specify tap leaf or omitted/hidden -/// branches in a DFS(Depth first search) walk to construct this tree. -// Similar to Taproot Builder in bitcoin core +/// Builder for building taproot iteratively. Users can specify tap leaf or omitted/hidden branches +/// in a depth-first search (DFS) walk order to construct this tree. +/// +/// See Wikipedia for more details on [DFS](https://en.wikipedia.org/wiki/Depth-first_search). +// Similar to Taproot Builder in bitcoin core. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct TaprootBuilder { - // The following doc-comment is from bitcoin core, but modified for rust - // The comment below describes the current state of the builder for a given tree. + // The following doc-comment is from bitcoin core, but modified for Rust. It describes the + // current state of the builder for a given tree. // - // For each level in the tree, one NodeInfo object may be present. branch at index 0 - // is information about the root; further values are for deeper subtrees being - // explored. + // For each level in the tree, one NodeInfo object may be present. Branch at index 0 is + // information about the root; further values are for deeper subtrees being explored. // - // During the construction of Taptree, for every right branch taken to - // reach the position we're currently working in, there will be a (Some(_)) - // entry in branch corresponding to the left branch at that level. + // During the construction of Taptree, for every right branch taken to reach the position we're + // currently working on, there will be a `(Some(_))` entry in branch corresponding to the left + // branch at that level. // // For example, imagine this tree: - N0 - // / \ @@ -371,32 +344,83 @@ pub struct TaprootBuilder { // / \ // D E // - // Initially, branch is empty. After processing leaf A, it would become - // {None, None, A}. When processing leaf B, an entry at level 2 already - // exists, and it would thus be combined with it to produce a level 1 one, - // resulting in {None, N1}. Adding C and D takes us to {None, N1, C} - // and {None, N1, C, D} respectively. When E is processed, it is combined - // with D, and then C, and then N1, to produce the root, resulting in {N0}. - // - // This structure allows processing with just O(log n) overhead if the leaves - // are computed on the fly. + // Initially, branch is empty. After processing leaf A, it would become {None, None, A}. When + // processing leaf B, an entry at level 2 already exists, and it would thus be combined with it + // to produce a level 1 entry, resulting in {None, N1}. Adding C and D takes us to {None, N1, C} + // and {None, N1, C, D} respectively. When E is processed, it is combined with D, and then C, + // and then N1, to produce the root, resulting in {N0}. // - // As an invariant, there can never be None entries at the end. There can - // also not be more than 128 entries (as that would mean more than 128 levels - // in the tree). The depth of newly added entries will always be at least - // equal to the current size of branch (otherwise it does not correspond - // to a depth-first traversal of a tree). branch is only empty if no entries - // have ever be processed. branch having length 1 corresponds to being done. + // This structure allows processing with just O(log n) overhead if the leaves are computed on + // the fly. // + // As an invariant, there can never be None entries at the end. There can also not be more than + // 128 entries (as that would mean more than 128 levels in the tree). The depth of newly added + // entries will always be at least equal to the current size of branch (otherwise it does not + // correspond to a depth-first traversal of a tree). A branch is only empty if no entries have + // ever be processed. A branch having length 1 corresponds to being done. branch: Vec>, } impl TaprootBuilder { - /// Create a new instance of [`TaprootBuilder`] + /// Creates a new instance of [`TaprootBuilder`]. pub fn new() -> Self { TaprootBuilder { branch: vec![] } } - /// Just like [`TaprootBuilder::add_leaf`] but allows to specify script version + + /// Creates a new [`TaprootSpendInfo`] from a list of scripts (with default script version) and + /// weights of satisfaction for that script. + /// + /// The weights represent the probability of each branch being taken. If probabilities/weights + /// for each condition are known, constructing the tree as a Huffman Tree is the optimal way to + /// minimize average case satisfaction cost. This function takes as input an iterator of + /// `tuple(u32, &Script)` where `u32` represents the satisfaction weights of the branch. For + /// example, [(3, S1), (2, S2), (5, S3)] would construct a [`TapTree`] that has optimal + /// satisfaction weight when probability for S1 is 30%, S2 is 20% and S3 is 50%. + /// + /// # Errors: + /// + /// - When the optimal Huffman Tree has a depth more than 128. + /// - If the provided list of script weights is empty. + /// + /// # Edge Cases: + /// + /// If the script weight calculations overflow, a sub-optimal tree may be generated. This should + /// not happen unless you are dealing with billions of branches with weights close to 2^32. + /// + /// [`TapTree`]: ::util::psbt::TapTree + pub fn with_huffman_tree( + script_weights: I, + ) -> Result + where + I: IntoIterator, + { + let mut node_weights = BinaryHeap::<(Reverse, NodeInfo)>::new(); + for (p, leaf) in script_weights { + node_weights.push((Reverse(p), NodeInfo::new_leaf_with_ver(leaf, LeafVersion::TapScript))); + } + if node_weights.is_empty() { + return Err(TaprootBuilderError::IncompleteTree); + } + while node_weights.len() > 1 { + // Combine the last two elements and insert a new node + let (p1, s1) = node_weights.pop().expect("len must be at least two"); + let (p2, s2) = node_weights.pop().expect("len must be at least two"); + // Insert the sum of first two in the tree as a new node + // N.B.: p1 + p2 can not practically saturate as you would need to have 2**32 max u32s + // from the input to overflow. However, saturating is a reasonable behavior here as + // huffman tree construction would treat all such elements as "very likely". + let p = Reverse(p1.0.saturating_add(p2.0)); + node_weights.push((p, NodeInfo::combine(s1, s2)?)); + } + // Every iteration of the loop reduces the node_weights.len() by exactly 1 + // Therefore, the loop will eventually terminate with exactly 1 element + debug_assert_eq!(node_weights.len(), 1); + let node = node_weights.pop().expect("huffman tree algorithm is broken").1; + Ok(TaprootBuilder{branch: vec![Some(node)]}) + } + + /// Adds a leaf script at `depth` to the builder with script version `ver`. Errors if the leaves + /// are not provided in DFS walk order. The depth of the root node is 0. pub fn add_leaf_with_ver( self, depth: usize, @@ -407,29 +431,27 @@ impl TaprootBuilder { self.insert(leaf, depth) } - /// Add a leaf script at a depth `depth` to the builder with default script version. - /// This will error if the leave are not provided in a DFS walk order. The depth of the - /// root node is 0 and it's immediate child would be at depth 1. - /// See [`TaprootBuilder::add_leaf_with_ver`] for adding a leaf with specific version - /// See [Wikipedia](https://en.wikipedia.org/wiki/Depth-first_search) for more details + /// Adds a leaf script at `depth` to the builder with default script version. Errors if the + /// leaves are not provided in DFS walk order. The depth of the root node is 0. + /// + /// See [`TaprootBuilder::add_leaf_with_ver`] for adding a leaf with specific version. pub fn add_leaf(self, depth: usize, script: Script) -> Result { self.add_leaf_with_ver(depth, script, LeafVersion::TapScript) } - /// Add a hidden/omitted node at a depth `depth` to the builder. - /// This will error if the node are not provided in a DFS walk order. The depth of the - /// root node is 0 and it's immediate child would be at depth 1. + /// Adds a hidden/omitted node at `depth` to the builder. Errors if the leaves are not provided + /// in DFS walk order. The depth of the root node is 0. pub fn add_hidden(self, depth: usize, hash: sha256::Hash) -> Result { let node = NodeInfo::new_hidden(hash); self.insert(node, depth) } - /// Check if the builder is a complete tree + /// Checks if the builder is a complete tree. pub fn is_complete(&self) -> bool { self.branch.len() == 1 && self.branch[0].is_some() } - /// Create [`TaprootSpendInfo`] with the given internal key + /// Creates a [`TaprootSpendInfo`] with the given internal key. pub fn finalize( mut self, secp: &Secp256k1, @@ -446,11 +468,11 @@ impl TaprootBuilder { Ok(TaprootSpendInfo::from_node_info(secp, internal_key, node)) } - pub(crate) fn branch(&self) -> &[Option]{ + pub(crate) fn branch(&self) -> &[Option] { &self.branch } - // Helper function to insert a leaf at a depth + /// Inserts a leaf at `depth`. fn insert(mut self, mut node: NodeInfo, mut depth: usize) -> Result { // early error on invalid depth. Though this will be checked later // while constructing TaprootMerkelBranch @@ -497,27 +519,30 @@ impl TaprootBuilder { } } -// Internally used structure to represent the node information in taproot tree +/// Represents the node information in taproot tree. +/// +/// You can use [`TaprootSpendInfo::from_node_info`] to a get [`TaprootSpendInfo`] +/// from the merkle root [`NodeInfo`]. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub(crate) struct NodeInfo { - /// Merkle Hash for this node +pub struct NodeInfo { + /// Merkle hash for this node. pub(crate) hash: sha256::Hash, - /// information about leaves inside this node + /// Information about leaves inside this node. pub(crate) leaves: Vec, } impl NodeInfo { - // Create a new NodeInfo with omitted/hidden info - fn new_hidden(hash: sha256::Hash) -> Self { + /// Creates a new [`NodeInfo`] with omitted/hidden info. + pub fn new_hidden(hash: sha256::Hash) -> Self { Self { hash: hash, leaves: vec![], } } - // Create a new leaf with NodeInfo - fn new_leaf_with_ver(script: Script, ver: LeafVersion) -> Self { + /// Creates a new leaf [`NodeInfo`] with given [`Script`] and [`LeafVersion`]. + pub fn new_leaf_with_ver(script: Script, ver: LeafVersion) -> Self { let leaf = LeafInfo::new(script, ver); Self { hash: leaf.hash(), @@ -525,8 +550,8 @@ impl NodeInfo { } } - // Combine two NodeInfo's to create a new parent - fn combine(a: Self, b: Self) -> Result { + /// Combines two [`NodeInfo`] to create a new parent. + pub fn combine(a: Self, b: Self) -> Result { let mut all_leaves = Vec::with_capacity(a.leaves.len() + b.leaves.len()); for mut a_leaf in a.leaves { a_leaf.merkle_branch.push(b.hash)?; // add hashing partner @@ -551,20 +576,20 @@ impl NodeInfo { } } -// Internally used structure to store information about taproot leaf node +/// Store information about taproot leaf node. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub(crate) struct LeafInfo { - // The underlying script + /// The underlying script. pub(crate) script: Script, - // The leaf version + /// The leaf version. pub(crate) ver: LeafVersion, - // The merkle proof(hashing partners) to get this node + /// The merkle proof (hashing partners) to get this node. pub(crate) merkle_branch: TaprootMerkleBranch, } impl LeafInfo { - // Create an instance of Self from Script with default version and no merkle branch + /// Creates an new [`LeafInfo`] from `script` and `ver` and no merkle branch. fn new(script: Script, ver: LeafVersion) -> Self { Self { script: script, @@ -573,34 +598,32 @@ impl LeafInfo { } } - // Compute a leaf hash for the given leaf + /// Computes a leaf hash for this [`LeafInfo`]. fn hash(&self) -> sha256::Hash { let leaf_hash = TapLeafHash::from_script(&self.script, self.ver); sha256::Hash::from_inner(leaf_hash.into_inner()) } } -/// The Merkle proof for inclusion of a tree in a taptree hash -// The type of hash is sha256::Hash because the vector might contain -// both TapBranchHash and TapLeafHash +/// The merkle proof for inclusion of a tree in a taptree hash. +// The type of hash is `sha256::Hash` because the vector might contain both `TapBranchHash` and +// `TapLeafHash`. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct TaprootMerkleBranch(Vec); impl TaprootMerkleBranch { - /// Obtain a reference to inner + /// Returns a reference to the inner vector of hashes. pub fn as_inner(&self) -> &[sha256::Hash] { &self.0 } - /// Create a merkle proof from slice + /// Creates a merkle proof from raw data representing a list of hashes. pub fn from_slice(sl: &[u8]) -> Result { if sl.len() % TAPROOT_CONTROL_NODE_SIZE != 0 { Err(TaprootError::InvalidMerkleBranchSize(sl.len())) } else if sl.len() > TAPROOT_CONTROL_NODE_SIZE * TAPROOT_CONTROL_MAX_NODE_COUNT { - Err(TaprootError::InvalidMerkleTreeDepth( - sl.len() / TAPROOT_CONTROL_NODE_SIZE, - )) + Err(TaprootError::InvalidMerkleTreeDepth(sl.len() / TAPROOT_CONTROL_NODE_SIZE)) } else { let inner = sl // TODO: Use chunks_exact after MSRV changes to 1.31 @@ -614,7 +637,11 @@ impl TaprootMerkleBranch { } } - /// Serialize to a writer. Returns the number of bytes written + /// Serializes to a writer. + /// + /// # Returns + /// + /// The number of bytes written to the writer. pub fn encode(&self, mut writer: Write) -> io::Result { for hash in self.0.iter() { writer.write_all(hash)?; @@ -622,12 +649,12 @@ impl TaprootMerkleBranch { Ok(self.0.len() * sha256::Hash::LEN) } - /// Serialize self as bytes + /// Serializes `self` as bytes. pub fn serialize(&self) -> Vec { self.0.iter().map(|e| e.as_inner()).flatten().map(|x| *x).collect::>() } - // Internal function to append elements to proof + /// Appends elements to proof. fn push(&mut self, h: sha256::Hash) -> Result<(), TaprootBuilderError> { if self.0.len() >= TAPROOT_CONTROL_MAX_NODE_COUNT { Err(TaprootBuilderError::InvalidMerkleTreeDepth(self.0.len())) @@ -637,8 +664,11 @@ impl TaprootMerkleBranch { } } - /// Create a MerkleProof from Vec<[`sha256::Hash`]>. Returns an error when - /// inner proof len is more than TAPROOT_CONTROL_MAX_NODE_COUNT (128) + /// Creates a merkle proof from list of hashes. + /// + /// # Errors + /// + /// If inner proof length is more than [`TAPROOT_CONTROL_MAX_NODE_COUNT`] (128). pub fn from_inner(inner: Vec) -> Result { if inner.len() > TAPROOT_CONTROL_MAX_NODE_COUNT { Err(TaprootError::InvalidMerkleTreeDepth(inner.len())) @@ -647,42 +677,46 @@ impl TaprootMerkleBranch { } } - /// Consume Self to get Vec<[`sha256::Hash`]> + /// Returns the inner list of hashes. pub fn into_inner(self) -> Vec { self.0 } } -/// Control Block data structure used in Tapscript satisfaction +/// Control block data structure used in Tapscript satisfaction. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct ControlBlock { - /// The tapleaf version, + /// The tapleaf version. pub leaf_version: LeafVersion, - /// The parity of the output key (NOT THE INTERNAL KEY WHICH IS ALWAYS XONLY) + /// The parity of the output key (NOT THE INTERNAL KEY WHICH IS ALWAYS XONLY). pub output_key_parity: secp256k1::Parity, - /// The internal key + /// The internal key. pub internal_key: UntweakedPublicKey, - /// The merkle proof of a script associated with this leaf + /// The merkle proof of a script associated with this leaf. pub merkle_branch: TaprootMerkleBranch, } impl ControlBlock { - /// Obtain a ControlBlock from slice. This is an extra witness element - /// that provides the proof that taproot script pubkey is correctly computed - /// with some specified leaf hash. This is the last element in - /// taproot witness when spending a output via script path. + /// Constructs a `ControlBlock` from slice. This is an extra witness element that provides the + /// proof that taproot script pubkey is correctly computed with some specified leaf hash. This + /// is the last element in taproot witness when spending a output via script path. /// - /// # Errors: - /// - If the control block size is not of the form 33 + 32m where - /// 0 <= m <= 128, InvalidControlBlock is returned + /// # Errors + /// + /// - `TaprootError::InvalidControlBlockSize` if `sl` is not of size 1 + 32 + 32N for any N >= 0. + /// - `TaprootError::InvalidParity` if first byte of `sl` is not a valid output key parity. + /// - `TaprootError::InvalidTaprootLeafVersion` if first byte of `sl` is not a valid leaf version. + /// - `TaprootError::InvalidInternalKey` if internal key is invalid (first 32 bytes after the parity byte). + /// - `TaprootError::InvalidMerkleTreeDepth` if merkle tree is too deep (more than 128 levels). pub fn from_slice(sl: &[u8]) -> Result { if sl.len() < TAPROOT_CONTROL_BASE_SIZE || (sl.len() - TAPROOT_CONTROL_BASE_SIZE) % TAPROOT_CONTROL_NODE_SIZE != 0 { return Err(TaprootError::InvalidControlBlockSize(sl.len())); } - let output_key_parity = secp256k1::Parity::from((sl[0] & 1) as i32); + let output_key_parity = secp256k1::Parity::from_i32((sl[0] & 1) as i32) + .map_err(TaprootError::InvalidParity)?; let leaf_version = LeafVersion::from_consensus(sl[0] & TAPROOT_LEAF_MASK)?; let internal_key = UntweakedPublicKey::from_slice(&sl[1..TAPROOT_CONTROL_BASE_SIZE]) .map_err(TaprootError::InvalidInternalKey)?; @@ -695,13 +729,17 @@ impl ControlBlock { }) } - /// Obtain the size of control block. Faster and more efficient than calling - /// serialize() followed by len(). Can be handy for fee estimation + /// Returns the size of control block. Faster and more efficient than calling + /// `Self::serialize().len()`. Can be handy for fee estimation. pub fn size(&self) -> usize { TAPROOT_CONTROL_BASE_SIZE + TAPROOT_CONTROL_NODE_SIZE * self.merkle_branch.as_inner().len() } - /// Serialize to a writer. Returns the number of bytes written + /// Serializes to a writer. + /// + /// # Returns + /// + /// The number of bytes written to the writer. pub fn encode(&self, mut writer: Write) -> io::Result { let first_byte: u8 = i32::from(self.output_key_parity) as u8 | self.leaf_version.to_consensus(); writer.write_all(&[first_byte])?; @@ -710,20 +748,21 @@ impl ControlBlock { Ok(self.size()) } - /// Serialize the control block. This would be required when - /// using ControlBlock as a witness element while spending an output via - /// script path. This serialization does not include the VarInt prefix that would be - /// applied when encoding this element as a witness. + /// Serializes the control block. + /// + /// This would be required when using [`ControlBlock`] as a witness element while spending an + /// output via script path. This serialization does not include the [`::VarInt`] prefix that would + /// be applied when encoding this element as a witness. pub fn serialize(&self) -> Vec { let mut buf = Vec::with_capacity(self.size()); - self.encode(&mut buf) - .expect("writers don't error"); + self.encode(&mut buf).expect("writers don't error"); buf } - /// Verify that a control block is correct proof for a given output key and script - /// This only checks that script is contained inside the taptree described by - /// output key, full verification must also execute the script with witness data + /// Verifies that a control block is correct proof for a given output key and script. + /// + /// Only checks that script is contained inside the taptree described by output key. Full + /// verification must also execute the script with witness data. pub fn verify_taproot_commitment( &self, secp: &Secp256k1, @@ -775,7 +814,7 @@ impl FutureLeafVersion { } } - /// Get consensus representation of the future leaf version. + /// Returns the consensus representation of this [`FutureLeafVersion`]. #[inline] pub fn to_consensus(self) -> u8 { self.0 @@ -803,31 +842,23 @@ impl fmt::UpperHex for FutureLeafVersion { } } -/// The leaf version for tapleafs +/// The leaf version for tapleafs. #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum LeafVersion { - /// BIP-342 tapscript + /// BIP-342 tapscript. TapScript, - /// Future leaf version + /// Future leaf version. Future(FutureLeafVersion) } impl LeafVersion { - /// Obtain LeafVersion from consensus byte representation. + /// Creates a [`LeafVersion`] from consensus byte representation. /// /// # Errors + /// /// - If the last bit of the `version` is odd. /// - If the `version` is 0x50 ([`TAPROOT_ANNEX_PREFIX`]). - // Text from BIP341: - // In order to support some forms of static analysis that rely on - // being able to identify script spends without access to the output being - // spent, it is recommended to avoid using any leaf versions that would conflict - // with a valid first byte of either a valid P2WPKH pubkey or a valid P2WSH script - // (that is, both v and v | 1 should be an undefined, invalid or disabled opcode - // or an opcode that is not valid as the first opcode). - // The values that comply to this rule are the 32 even values between - // 0xc0 and 0xfe and also 0x66, 0x7e, 0x80, 0x84, 0x96, 0x98, 0xba, 0xbc, 0xbe pub fn from_consensus(version: u8) -> Result { match version { TAPROOT_LEAF_TAPSCRIPT => Ok(LeafVersion::TapScript), @@ -836,7 +867,7 @@ impl LeafVersion { } } - /// Get consensus representation of the [`LeafVersion`]. + /// Returns the consensus representation of this [`LeafVersion`]. pub fn to_consensus(self) -> u8 { match self { LeafVersion::TapScript => TAPROOT_LEAF_TAPSCRIPT, @@ -868,23 +899,26 @@ impl fmt::UpperHex for LeafVersion { } } -/// Serializes LeafVersion as u8 using consensus encoding +/// Serializes [`LeafVersion`] as a `u8` using consensus encoding. #[cfg(feature = "serde")] #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] impl ::serde::Serialize for LeafVersion { fn serialize(&self, serializer: S) -> Result - where - S: ::serde::Serializer, + where + S: ::serde::Serializer, { serializer.serialize_u8(self.to_consensus()) } } -/// Deserializes LeafVersion as u8 using consensus encoding +/// Deserializes [`LeafVersion`] as a `u8` using consensus encoding. #[cfg(feature = "serde")] #[cfg_attr(docsrs, doc(cfg(feature = "serde")))] impl<'de> ::serde::Deserialize<'de> for LeafVersion { - fn deserialize(deserializer: D) -> Result where D: ::serde::Deserializer<'de> { + fn deserialize(deserializer: D) -> Result + where + D: ::serde::Deserializer<'de> + { struct U8Visitor; impl<'de> ::serde::de::Visitor<'de> for U8Visitor { type Value = LeafVersion; @@ -907,20 +941,20 @@ impl<'de> ::serde::Deserialize<'de> for LeafVersion { } } -/// Detailed error type for taproot builder +/// Detailed error type for taproot builder. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum TaprootBuilderError { - /// Merkle Tree depth must not be more than 128 + /// Merkle tree depth must not be more than 128. InvalidMerkleTreeDepth(usize), - /// Nodes must be added specified in DFS order + /// Nodes must be added specified in DFS walk order. NodeNotInDfsOrder, - /// Two nodes at depth 0 are not allowed + /// Two nodes at depth 0 are not allowed. OverCompleteTree, - /// Invalid taproot internal key + /// Invalid taproot internal key. InvalidInternalKey(secp256k1::Error), - /// Called finalize on an incomplete tree + /// Called finalize on an incomplete tree. IncompleteTree, - /// Called finalize on a empty tree + /// Called finalize on a empty tree. EmptyTree, } @@ -935,11 +969,9 @@ impl fmt::Display for TaprootBuilderError { "Attempted to create a tree with two nodes at depth 0. There must\ only be a exactly one node at depth 0", ), - TaprootBuilderError::InvalidMerkleTreeDepth(d) => write!( - f, - "Merkle Tree depth({}) must be less than {}", - d, TAPROOT_CONTROL_MAX_NODE_COUNT - ), + TaprootBuilderError::InvalidMerkleTreeDepth(d) => { + write!(f, "Merkle Tree depth({}) must be less than {}", d, TAPROOT_CONTROL_MAX_NODE_COUNT) + } TaprootBuilderError::InvalidInternalKey(e) => { write!(f, "Invalid Internal XOnly key : {}", e) } @@ -957,20 +989,22 @@ impl fmt::Display for TaprootBuilderError { #[cfg_attr(docsrs, doc(cfg(feature = "std")))] impl error::Error for TaprootBuilderError {} -/// Detailed error type for taproot utilities +/// Detailed error type for taproot utilities. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum TaprootError { - /// Proof size must be a multiple of 32 + /// Proof size must be a multiple of 32. InvalidMerkleBranchSize(usize), - /// Merkle Tree depth must not be more than 128 + /// Merkle tree depth must not be more than 128. InvalidMerkleTreeDepth(usize), - /// The last bit of tapleaf version must be zero + /// The last bit of tapleaf version must be zero. InvalidTaprootLeafVersion(u8), - /// Invalid Control Block Size + /// Invalid control block size. InvalidControlBlockSize(usize), - /// Invalid taproot internal key + /// Invalid taproot internal key. InvalidInternalKey(secp256k1::Error), - /// Empty TapTree + /// Invalid parity for internal key. + InvalidParity(secp256k1::InvalidParityValue), + /// Empty tap tree. EmptyTree, } @@ -999,6 +1033,7 @@ impl fmt::Display for TaprootError { ), // TODO: add source when in MSRV TaprootError::InvalidInternalKey(e) => write!(f, "Invalid Internal XOnly key : {}", e), + TaprootError::InvalidParity(e) => write!(f, "Invalid parity value for internal key: {}", e), TaprootError::EmptyTree => write!(f, "Taproot Tree must contain at least one script"), } } diff --git a/src/util/uint.rs b/src/util/uint.rs index 1dbf20aee..b85ede063 100644 --- a/src/util/uint.rs +++ b/src/util/uint.rs @@ -19,7 +19,7 @@ //! macro_rules! construct_uint { - ($name:ident, $n_words:expr) => ( + ($name:ident, $n_words:expr) => { /// Little-endian large integer type #[derive(Copy, Clone, PartialEq, Eq, Hash, Default)] pub struct $name(pub [u64; $n_words]); @@ -169,7 +169,9 @@ macro_rules! construct_uint { let &mut $name(ref mut arr) = self; for i in 0..$n_words { arr[i] = arr[i].wrapping_add(1); - if arr[i] != 0 { break; } + if arr[i] != 0 { + break; + } } } } @@ -188,8 +190,12 @@ macro_rules! construct_uint { // and the auto derive is a lexicographic ordering(i.e. memcmp) // which with numbers is equivalent to big-endian for i in 0..$n_words { - if self[$n_words - 1 - i] < other[$n_words - 1 - i] { return ::core::cmp::Ordering::Less; } - if self[$n_words - 1 - i] > other[$n_words - 1 - i] { return ::core::cmp::Ordering::Greater; } + if self[$n_words - 1 - i] < other[$n_words - 1 - i] { + return ::core::cmp::Ordering::Less; + } + if self[$n_words - 1 - i] > other[$n_words - 1 - i] { + return ::core::cmp::Ordering::Greater; + } } ::core::cmp::Ordering::Equal } @@ -499,7 +505,7 @@ macro_rules! construct_uint { } } } - ); + }; } construct_uint!(Uint256, 4);