Skip to content

Commit

Permalink
Merge branch 'master' into utxo-retaddr
Browse files Browse the repository at this point in the history
  • Loading branch information
coderofstuff authored Oct 15, 2024
2 parents 8e07ad7 + 0df2de5 commit 17274ff
Show file tree
Hide file tree
Showing 33 changed files with 1,236 additions and 913 deletions.
1,181 changes: 484 additions & 697 deletions Cargo.lock

Large diffs are not rendered by default.

7 changes: 3 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,7 @@ regex = "1.10.2"
ripemd = { version = "0.1.3", default-features = false }
rlimit = "0.10.1"
rocksdb = "0.22.0"
rv = "0.16.4"
secp256k1 = { version = "0.29.0", features = [
"global-context",
"rand-std",
Expand All @@ -243,8 +244,6 @@ sha3 = "0.10.8"
slugify-rs = "0.0.3"
smallvec = { version = "1.11.1", features = ["serde"] }
sorted-insert = "0.2.3"
statest = "0.2.2"
statrs = "0.13.0" # TODO "0.16.0"
subtle = { version = "2.5.0", default-features = false }
sysinfo = "0.31.2"
tempfile = "3.8.1"
Expand All @@ -253,8 +252,8 @@ thiserror = "1.0.50"
tokio = { version = "1.33.0", features = ["sync", "rt-multi-thread"] }
tokio-stream = "0.1.14"
toml = "0.8.8"
tonic = { version = "0.12.2", features = ["tls-webpki-roots", "gzip", "transport"] }
tonic-build = { version = "0.12.2", features = ["prost"] }
tonic = { version = "0.12.3", features = ["tls-webpki-roots", "gzip", "transport"] }
tonic-build = { version = "0.12.3", features = ["prost"] }
triggered = "0.1.2"
uuid = { version = "1.5.0", features = ["v4", "fast-rng", "serde"] }
wasm-bindgen = { version = "0.2.93", features = ["serde-serialize"] }
Expand Down
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,8 @@ To build WASM on MacOS you need to install `llvm` from homebrew (at the time of
<summary>
Kaspa CLI + Wallet
</summary>
`kaspa-cli` crate provides cli-driven RPC interface to the node and a

`kaspa-cli` crate provides a cli-driven RPC interface to the node and a
terminal interface to the Rusty Kaspa Wallet runtime. These wallets are
compatible with WASM SDK Wallet API and Kaspa NG projects.

Expand Down Expand Up @@ -323,8 +324,6 @@ wRPC
<summary>Simulation framework (Simpa)</summary>
Logging in `kaspad` and `simpa` can be [filtered](https://docs.rs/env_logger/0.10.0/env_logger/#filtering-results) by either:
The current codebase supports a full in-process network simulation, building an actual DAG over virtual time with virtual delay and benchmarking validation time (following the simulation generation).
To see the available commands
Expand Down
3 changes: 1 addition & 2 deletions components/addressmanager/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,4 @@ thiserror.workspace = true
tokio.workspace = true

[dev-dependencies]
statrs.workspace = true
statest.workspace = true
rv.workspace = true
17 changes: 8 additions & 9 deletions components/addressmanager/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -520,8 +520,7 @@ mod address_store_with_cache {
use kaspa_database::create_temp_db;
use kaspa_database::prelude::ConnBuilder;
use kaspa_utils::networking::IpAddress;
use statest::ks::KSTest;
use statrs::distribution::Uniform;
use rv::{dist::Uniform, misc::ks_test as one_way_ks_test, traits::Cdf};
use std::net::{IpAddr, Ipv6Addr};

#[test]
Expand Down Expand Up @@ -591,10 +590,11 @@ mod address_store_with_cache {
assert!(num_of_buckets >= 12);

// Run multiple Kolmogorov–Smirnov tests to offset random noise of the random weighted iterator
let num_of_trials = 512;
let num_of_trials = 2048; // Number of trials to run the test, chosen to reduce random noise.
let mut cul_p = 0.;
// The target uniform distribution
let target_u_dist = Uniform::new(0.0, (num_of_buckets) as f64).unwrap();
let target_uniform_dist = Uniform::new(1.0, num_of_buckets as f64).unwrap();
let uniform_cdf = |x: f64| target_uniform_dist.cdf(&x);
for _ in 0..num_of_trials {
// The weight sampled expected uniform distibution
let prioritized_address_distribution = am
Expand All @@ -603,13 +603,12 @@ mod address_store_with_cache {
.take(num_of_buckets)
.map(|addr| addr.prefix_bucket().as_u64() as f64)
.collect_vec();

let ks_test = KSTest::new(prioritized_address_distribution.as_slice());
cul_p += ks_test.ks1(&target_u_dist).0;
cul_p += one_way_ks_test(prioritized_address_distribution.as_slice(), uniform_cdf).1;
}

// Normalize and adjust p to test for uniformity, over average of all trials.
let adjusted_p = (0.5 - cul_p / num_of_trials as f64).abs();
// we do this to reduce the effect of random noise failing this test.
let adjusted_p = ((cul_p / num_of_trials as f64) - 0.5).abs();
// Define the significance threshold.
let significance = 0.10;

Expand All @@ -619,7 +618,7 @@ mod address_store_with_cache {
adjusted_p,
significance
);
assert!(adjusted_p <= significance)
assert!(adjusted_p <= significance);
}
}
}
7 changes: 6 additions & 1 deletion consensus/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,14 @@ serde_json.workspace = true
flate2.workspace = true
rand_distr.workspace = true
kaspa-txscript-errors.workspace = true
kaspa-addresses.workspace = true

[[bench]]
name = "parallel_muhash"
harness = false

[[bench]]
name = "hash_benchmarks"
name = "check_scripts"
harness = false

[features]
Expand Down
126 changes: 126 additions & 0 deletions consensus/benches/check_scripts.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion, SamplingMode};
use kaspa_addresses::{Address, Prefix, Version};
use kaspa_consensus::processes::transaction_validator::transaction_validator_populated::{
check_scripts_par_iter, check_scripts_par_iter_pool, check_scripts_sequential,
};
use kaspa_consensus_core::hashing::sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync};
use kaspa_consensus_core::hashing::sighash_type::SIG_HASH_ALL;
use kaspa_consensus_core::subnets::SubnetworkId;
use kaspa_consensus_core::tx::{MutableTransaction, Transaction, TransactionInput, TransactionOutpoint, UtxoEntry};
use kaspa_txscript::caches::Cache;
use kaspa_txscript::pay_to_address_script;
use kaspa_utils::iter::parallelism_in_power_steps;
use rand::{thread_rng, Rng};
use secp256k1::Keypair;

// You may need to add more detailed mocks depending on your actual code.
fn mock_tx(inputs_count: usize, non_uniq_signatures: usize) -> (Transaction, Vec<UtxoEntry>) {
let reused_values = SigHashReusedValuesUnsync::new();
let dummy_prev_out = TransactionOutpoint::new(kaspa_hashes::Hash::from_u64_word(1), 1);
let mut tx = Transaction::new(
0,
vec![],
vec![],
0,
SubnetworkId::from_bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
0,
vec![],
);
let mut utxos = vec![];
let mut kps = vec![];
for _ in 0..inputs_count - non_uniq_signatures {
let kp = Keypair::new(secp256k1::SECP256K1, &mut thread_rng());
tx.inputs.push(TransactionInput { previous_outpoint: dummy_prev_out, signature_script: vec![], sequence: 0, sig_op_count: 1 });
let address = Address::new(Prefix::Mainnet, Version::PubKey, &kp.x_only_public_key().0.serialize());
utxos.push(UtxoEntry {
amount: thread_rng().gen::<u32>() as u64,
script_public_key: pay_to_address_script(&address),
block_daa_score: 333,
is_coinbase: false,
});
kps.push(kp);
}
for _ in 0..non_uniq_signatures {
let kp = kps.last().unwrap();
tx.inputs.push(TransactionInput { previous_outpoint: dummy_prev_out, signature_script: vec![], sequence: 0, sig_op_count: 1 });
let address = Address::new(Prefix::Mainnet, Version::PubKey, &kp.x_only_public_key().0.serialize());
utxos.push(UtxoEntry {
amount: thread_rng().gen::<u32>() as u64,
script_public_key: pay_to_address_script(&address),
block_daa_score: 444,
is_coinbase: false,
});
}
for (i, kp) in kps.iter().enumerate().take(inputs_count - non_uniq_signatures) {
let mut_tx = MutableTransaction::with_entries(&tx, utxos.clone());
let sig_hash = calc_schnorr_signature_hash(&mut_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values);
let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap();
let sig: [u8; 64] = *kp.sign_schnorr(msg).as_ref();
// This represents OP_DATA_65 <SIGNATURE+SIGHASH_TYPE> (since signature length is 64 bytes and SIGHASH_TYPE is one byte)
tx.inputs[i].signature_script = std::iter::once(65u8).chain(sig).chain([SIG_HASH_ALL.to_u8()]).collect();
}
let length = tx.inputs.len();
for i in (inputs_count - non_uniq_signatures)..length {
let kp = kps.last().unwrap();
let mut_tx = MutableTransaction::with_entries(&tx, utxos.clone());
let sig_hash = calc_schnorr_signature_hash(&mut_tx.as_verifiable(), i, SIG_HASH_ALL, &reused_values);
let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap();
let sig: [u8; 64] = *kp.sign_schnorr(msg).as_ref();
// This represents OP_DATA_65 <SIGNATURE+SIGHASH_TYPE> (since signature length is 64 bytes and SIGHASH_TYPE is one byte)
tx.inputs[i].signature_script = std::iter::once(65u8).chain(sig).chain([SIG_HASH_ALL.to_u8()]).collect();
}
(tx, utxos)
}

fn benchmark_check_scripts(c: &mut Criterion) {
for inputs_count in [100, 50, 25, 10, 5, 2] {
for non_uniq_signatures in [0, inputs_count / 2] {
let (tx, utxos) = mock_tx(inputs_count, non_uniq_signatures);
let mut group = c.benchmark_group(format!("inputs: {inputs_count}, non uniq: {non_uniq_signatures}"));
group.sampling_mode(SamplingMode::Flat);

group.bench_function("single_thread", |b| {
let tx = MutableTransaction::with_entries(&tx, utxos.clone());
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_sequential(black_box(&cache), black_box(&tx.as_verifiable())).unwrap();
})
});

group.bench_function("rayon par iter", |b| {
let tx = MutableTransaction::with_entries(tx.clone(), utxos.clone());
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_par_iter(black_box(&cache), black_box(&tx.as_verifiable())).unwrap();
})
});

// Iterate powers of two up to available parallelism
for i in parallelism_in_power_steps() {
if inputs_count >= i {
group.bench_function(format!("rayon, custom thread pool, thread count {i}"), |b| {
let tx = MutableTransaction::with_entries(tx.clone(), utxos.clone());
// Create a custom thread pool with the specified number of threads
let pool = rayon::ThreadPoolBuilder::new().num_threads(i).build().unwrap();
let cache = Cache::new(inputs_count as u64);
b.iter(|| {
cache.clear();
check_scripts_par_iter_pool(black_box(&cache), black_box(&tx.as_verifiable()), black_box(&pool)).unwrap();
})
});
}
}
}
}
}

criterion_group! {
name = benches;
// This can be any expression that returns a `Criterion` object.
config = Criterion::default().with_output_color(true).measurement_time(std::time::Duration::new(20, 0));
targets = benchmark_check_scripts
}

criterion_main!(benches);
15 changes: 0 additions & 15 deletions consensus/benches/hash_benchmarks.rs

This file was deleted.

66 changes: 66 additions & 0 deletions consensus/benches/parallel_muhash.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use itertools::Itertools;
use kaspa_consensus_core::{
muhash::MuHashExtensions,
subnets::SUBNETWORK_ID_NATIVE,
tx::{ScriptPublicKey, SignableTransaction, Transaction, TransactionInput, TransactionOutpoint, TransactionOutput, UtxoEntry},
};
use kaspa_hashes::TransactionID;
use kaspa_muhash::MuHash;
use kaspa_utils::iter::parallelism_in_power_steps;
use rayon::prelude::*;

fn generate_transaction(ins: usize, outs: usize, randomness: u64) -> SignableTransaction {
let mut tx = Transaction::new(0, vec![], vec![], 0, SUBNETWORK_ID_NATIVE, 0, vec![]);
let mut entries = vec![];
for i in 0..ins {
let mut hasher = TransactionID::new();
hasher.write(i.to_le_bytes());
hasher.write(randomness.to_le_bytes());
let input = TransactionInput::new(TransactionOutpoint::new(hasher.finalize(), 0), vec![10; 66], 0, 1);
let entry = UtxoEntry::new(22222222, ScriptPublicKey::from_vec(0, vec![99; 34]), 23456, false);
tx.inputs.push(input);
entries.push(entry);
}
for _ in 0..outs {
let output = TransactionOutput::new(23456, ScriptPublicKey::from_vec(0, vec![101; 34]));
tx.outputs.push(output);
}
tx.finalize();
SignableTransaction::with_entries(tx, entries)
}

pub fn parallel_muhash_benchmark(c: &mut Criterion) {
let mut group = c.benchmark_group("muhash txs");
let txs = (0..256).map(|i| generate_transaction(2, 2, i)).collect_vec();
group.bench_function("seq", |b| {
b.iter(|| {
let mut mh = MuHash::new();
for tx in txs.iter() {
mh.add_transaction(&tx.as_verifiable(), 222);
}
black_box(mh)
})
});

for threads in parallelism_in_power_steps() {
group.bench_function(format!("par {threads}"), |b| {
let pool = rayon::ThreadPoolBuilder::new().num_threads(threads).build().unwrap();
b.iter(|| {
pool.install(|| {
let mh =
txs.par_iter().map(|tx| MuHash::from_transaction(&tx.as_verifiable(), 222)).reduce(MuHash::new, |mut a, b| {
a.combine(&b);
a
});
black_box(mh)
})
})
});
}

group.finish();
}

criterion_group!(benches, parallel_muhash_benchmark);
criterion_main!(benches);
6 changes: 3 additions & 3 deletions consensus/client/src/sign.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use core::iter::once;
use itertools::Itertools;
use kaspa_consensus_core::{
hashing::{
sighash::{calc_schnorr_signature_hash, SigHashReusedValues},
sighash::{calc_schnorr_signature_hash, SigHashReusedValuesUnsync},
sighash_type::SIG_HASH_ALL,
},
tx::PopulatedTransaction,
Expand Down Expand Up @@ -44,7 +44,7 @@ pub fn sign_with_multiple_v3<'a>(tx: &'a Transaction, privkeys: &[[u8; 32]]) ->
map.insert(script_pub_key_script, schnorr_key);
}

let mut reused_values = SigHashReusedValues::new();
let reused_values = SigHashReusedValuesUnsync::new();
let mut additional_signatures_required = false;
{
let input_len = tx.inner().inputs.len();
Expand All @@ -59,7 +59,7 @@ pub fn sign_with_multiple_v3<'a>(tx: &'a Transaction, privkeys: &[[u8; 32]]) ->
};
let script = script_pub_key.script();
if let Some(schnorr_key) = map.get(script) {
let sig_hash = calc_schnorr_signature_hash(&populated_transaction, i, SIG_HASH_ALL, &mut reused_values);
let sig_hash = calc_schnorr_signature_hash(&populated_transaction, i, SIG_HASH_ALL, &reused_values);
let msg = secp256k1::Message::from_digest_slice(sig_hash.as_bytes().as_slice()).unwrap();
let sig: [u8; 64] = *schnorr_key.sign_schnorr(msg).as_ref();
// This represents OP_DATA_65 <SIGNATURE+SIGHASH_TYPE> (since signature length is 64 bytes and SIGHASH_TYPE is one byte)
Expand Down
10 changes: 5 additions & 5 deletions consensus/client/src/signing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ impl SigHashCache {
}
}

pub fn sig_op_counts_hash(&mut self, tx: &Transaction, hash_type: SigHashType, reused_values: &mut SigHashReusedValues) -> Hash {
pub fn sig_op_counts_hash(&mut self, tx: &Transaction, hash_type: SigHashType, reused_values: &SigHashReusedValues) -> Hash {
if hash_type.is_sighash_anyone_can_pay() {
return ZERO_HASH;
}
Expand Down Expand Up @@ -185,16 +185,16 @@ pub fn calc_schnorr_signature_hash(
let mut hasher = TransactionSigningHash::new();
hasher
.write_u16(tx.version)
.update(previous_outputs_hash(&tx, hash_type, &mut reused_values))
.update(sequences_hash(&tx, hash_type, &mut reused_values))
.update(sig_op_counts_hash(&tx, hash_type, &mut reused_values));
.update(previous_outputs_hash(&tx, hash_type, &reused_values))
.update(sequences_hash(&tx, hash_type, &reused_values))
.update(sig_op_counts_hash(&tx, hash_type, &reused_values));
hash_outpoint(&mut hasher, input.previous_outpoint);
hash_script_public_key(&mut hasher, &utxo.script_public_key);
hasher
.write_u64(utxo.amount)
.write_u64(input.sequence)
.write_u8(input.sig_op_count)
.update(outputs_hash(&tx, hash_type, &mut reused_values, input_index))
.update(outputs_hash(&tx, hash_type, &reused_values, input_index))
.write_u64(tx.lock_time)
.update(&tx.subnetwork_id)
.write_u64(tx.gas)
Expand Down
1 change: 1 addition & 0 deletions consensus/core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ wasm32-sdk = []
default = []

[dependencies]
arc-swap.workspace = true
async-trait.workspace = true
borsh.workspace = true
cfg-if.workspace = true
Expand Down
Loading

0 comments on commit 17274ff

Please sign in to comment.