Skip to content

Commit

Permalink
Merge pull request lightningdevkit#1518 from valentinewallace/2022-06…
Browse files Browse the repository at this point in the history
…-OMs-prefactor

Onion messages v1 pre-refactor
  • Loading branch information
TheBlueMatt authored Jun 21, 2022
2 parents 10071b5 + ee805e9 commit 3676a05
Show file tree
Hide file tree
Showing 5 changed files with 308 additions and 15 deletions.
18 changes: 3 additions & 15 deletions lightning/src/ln/channelmanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use bitcoin::blockdata::transaction::Transaction;
use bitcoin::blockdata::constants::genesis_block;
use bitcoin::network::constants::Network;

use bitcoin::hashes::{Hash, HashEngine};
use bitcoin::hashes::Hash;
use bitcoin::hashes::sha256::Hash as Sha256;
use bitcoin::hashes::sha256d::Hash as Sha256dHash;
use bitcoin::hash_types::{BlockHash, Txid};
Expand Down Expand Up @@ -2175,22 +2175,10 @@ impl<Signer: Sign, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelMana
}
},
onion_utils::Hop::Forward { next_hop_data, next_hop_hmac, new_packet_bytes } => {
let mut new_pubkey = msg.onion_routing_packet.public_key.unwrap();

let blinding_factor = {
let mut sha = Sha256::engine();
sha.input(&new_pubkey.serialize()[..]);
sha.input(&shared_secret);
Sha256::from_engine(sha).into_inner()
};

let public_key = if let Err(e) = new_pubkey.mul_assign(&self.secp_ctx, &blinding_factor[..]) {
Err(e)
} else { Ok(new_pubkey) };

let new_pubkey = msg.onion_routing_packet.public_key.unwrap();
let outgoing_packet = msgs::OnionPacket {
version: 0,
public_key,
public_key: onion_utils::next_hop_packet_pubkey(&self.secp_ctx, new_pubkey, &shared_secret),
hop_data: new_packet_bytes,
hmac: next_hop_hmac.clone(),
};
Expand Down
11 changes: 11 additions & 0 deletions lightning/src/ln/onion_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,17 @@ pub(super) fn gen_ammag_from_shared_secret(shared_secret: &[u8]) -> [u8; 32] {
Hmac::from_engine(hmac).into_inner()
}

pub(super) fn next_hop_packet_pubkey<T: secp256k1::Signing + secp256k1::Verification>(secp_ctx: &Secp256k1<T>, mut packet_pubkey: PublicKey, packet_shared_secret: &[u8; 32]) -> Result<PublicKey, secp256k1::Error> {
let blinding_factor = {
let mut sha = Sha256::engine();
sha.input(&packet_pubkey.serialize()[..]);
sha.input(packet_shared_secret);
Sha256::from_engine(sha).into_inner()
};

packet_pubkey.mul_assign(secp_ctx, &blinding_factor[..]).map(|_| packet_pubkey)
}

// can only fail if an intermediary hop has an invalid public key or session_priv is invalid
#[inline]
pub(super) fn construct_onion_keys_callback<T: secp256k1::Signing, FType: FnMut(SharedSecret, [u8; 32], PublicKey, &RouteHop, usize)> (secp_ctx: &Secp256k1<T>, path: &Vec<RouteHop>, session_priv: &SecretKey, mut callback: FType) -> Result<(), secp256k1::Error> {
Expand Down
263 changes: 263 additions & 0 deletions lightning/src/util/chacha20poly1305rfc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@
// This is a port of Andrew Moons poly1305-donna
// https://github.com/floodyberry/poly1305-donna

use ln::msgs::DecodeError;
use util::ser::{FixedLengthReader, LengthRead, LengthReadableArgs, Readable, Writeable, Writer};
use io::{self, Read, Write};

#[cfg(not(fuzzing))]
mod real_chachapoly {
use util::chacha20::ChaCha20;
Expand Down Expand Up @@ -70,6 +74,26 @@ mod real_chachapoly {
self.mac.raw_result(out_tag);
}

// Encrypt `input_output` in-place. To finish and calculate the tag, use `finish_and_get_tag`
// below.
pub(super) fn encrypt_in_place(&mut self, input_output: &mut [u8]) {
debug_assert!(self.finished == false);
self.cipher.process_in_place(input_output);
self.data_len += input_output.len();
self.mac.input(input_output);
}

// If we were previously encrypting with `encrypt_in_place`, this method can be used to finish
// encrypting and calculate the tag.
pub(super) fn finish_and_get_tag(&mut self, out_tag: &mut [u8]) {
debug_assert!(self.finished == false);
ChaCha20Poly1305RFC::pad_mac_16(&mut self.mac, self.data_len);
self.finished = true;
self.mac.input(&self.aad_len.to_le_bytes());
self.mac.input(&(self.data_len as u64).to_le_bytes());
self.mac.raw_result(out_tag);
}

pub fn decrypt(&mut self, input: &[u8], output: &mut [u8], tag: &[u8]) -> bool {
assert!(input.len() == output.len());
assert!(self.finished == false);
Expand All @@ -92,11 +116,141 @@ mod real_chachapoly {
false
}
}

// Decrypt in place, without checking the tag. Use `finish_and_check_tag` to check it
// later when decryption finishes.
//
// Should never be `pub` because the public API should always enforce tag checking.
pub(super) fn decrypt_in_place(&mut self, input_output: &mut [u8]) {
debug_assert!(self.finished == false);
self.mac.input(input_output);
self.data_len += input_output.len();
self.cipher.process_in_place(input_output);
}

// If we were previously decrypting with `decrypt_in_place`, this method must be used to finish
// decrypting and check the tag. Returns whether or not the tag is valid.
pub(super) fn finish_and_check_tag(&mut self, tag: &[u8]) -> bool {
debug_assert!(self.finished == false);
self.finished = true;
ChaCha20Poly1305RFC::pad_mac_16(&mut self.mac, self.data_len);
self.mac.input(&self.aad_len.to_le_bytes());
self.mac.input(&(self.data_len as u64).to_le_bytes());

let mut calc_tag = [0u8; 16];
self.mac.raw_result(&mut calc_tag);
if fixed_time_eq(&calc_tag, tag) {
true
} else {
false
}
}
}
}
#[cfg(not(fuzzing))]
pub use self::real_chachapoly::ChaCha20Poly1305RFC;

/// Enables simultaneously reading and decrypting a ChaCha20Poly1305RFC stream from a std::io::Read.
struct ChaChaPolyReader<'a, R: Read> {
pub chacha: &'a mut ChaCha20Poly1305RFC,
pub read: R,
}

impl<'a, R: Read> Read for ChaChaPolyReader<'a, R> {
// Decrypt bytes from Self::read into `dest`.
// `ChaCha20Poly1305RFC::finish_and_check_tag` must be called to check the tag after all reads
// complete.
fn read(&mut self, dest: &mut [u8]) -> Result<usize, io::Error> {
let res = self.read.read(dest)?;
if res > 0 {
self.chacha.decrypt_in_place(&mut dest[0..res]);
}
Ok(res)
}
}

/// Enables simultaneously writing and encrypting a byte stream into a Writer.
struct ChaChaPolyWriter<'a, W: Writer> {
pub chacha: &'a mut ChaCha20Poly1305RFC,
pub write: &'a mut W,
}

impl<'a, W: Writer> Writer for ChaChaPolyWriter<'a, W> {
// Encrypt then write bytes from `src` into Self::write.
// `ChaCha20Poly1305RFC::finish_and_get_tag` can be called to retrieve the tag after all writes
// complete.
fn write_all(&mut self, src: &[u8]) -> Result<(), io::Error> {
let mut src_idx = 0;
while src_idx < src.len() {
let mut write_buffer = [0; 8192];
let bytes_written = (&mut write_buffer[..]).write(&src[src_idx..]).expect("In-memory writes can't fail");
self.chacha.encrypt_in_place(&mut write_buffer[..bytes_written]);
self.write.write_all(&write_buffer[..bytes_written])?;
src_idx += bytes_written;
}
Ok(())
}
}

/// Enables the use of the serialization macros for objects that need to be simultaneously encrypted and
/// serialized. This allows us to avoid an intermediate Vec allocation.
pub(crate) struct ChaChaPolyWriteAdapter<'a, W: Writeable> {
pub rho: [u8; 32],
pub writeable: &'a W,
}

impl<'a, W: Writeable> ChaChaPolyWriteAdapter<'a, W> {
#[allow(unused)] // This will be used for onion messages soon
pub fn new(rho: [u8; 32], writeable: &'a W) -> ChaChaPolyWriteAdapter<'a, W> {
Self { rho, writeable }
}
}

impl<'a, T: Writeable> Writeable for ChaChaPolyWriteAdapter<'a, T> {
// Simultaneously write and encrypt Self::writeable.
fn write<W: Writer>(&self, w: &mut W) -> Result<(), io::Error> {
let mut chacha = ChaCha20Poly1305RFC::new(&self.rho, &[0; 12], &[]);
let mut chacha_stream = ChaChaPolyWriter { chacha: &mut chacha, write: w };
self.writeable.write(&mut chacha_stream)?;
let mut tag = [0 as u8; 16];
chacha.finish_and_get_tag(&mut tag);
tag.write(w)?;

Ok(())
}
}

/// Enables the use of the serialization macros for objects that need to be simultaneously decrypted and
/// deserialized. This allows us to avoid an intermediate Vec allocation.
pub(crate) struct ChaChaPolyReadAdapter<R: Readable> {
#[allow(unused)] // This will be used soon for onion messages
pub readable: R,
}

impl<T: Readable> LengthReadableArgs<[u8; 32]> for ChaChaPolyReadAdapter<T> {
// Simultaneously read and decrypt an object from a LengthRead, storing it in Self::readable.
// LengthRead must be used instead of std::io::Read because we need the total length to separate
// out the tag at the end.
fn read<R: LengthRead>(mut r: &mut R, secret: [u8; 32]) -> Result<Self, DecodeError> {
if r.total_bytes() < 16 { return Err(DecodeError::InvalidValue) }

let mut chacha = ChaCha20Poly1305RFC::new(&secret, &[0; 12], &[]);
let decrypted_len = r.total_bytes() - 16;
let s = FixedLengthReader::new(&mut r, decrypted_len);
let mut chacha_stream = ChaChaPolyReader { chacha: &mut chacha, read: s };
let readable: T = Readable::read(&mut chacha_stream)?;
chacha_stream.read.eat_remaining()?;

let mut tag = [0 as u8; 16];
r.read_exact(&mut tag)?;
if !chacha.finish_and_check_tag(&tag) {
return Err(DecodeError::InvalidValue)
}

Ok(Self { readable })
}
}

#[cfg(fuzzing)]
mod fuzzy_chachapoly {
#[derive(Clone, Copy)]
Expand Down Expand Up @@ -130,6 +284,16 @@ mod fuzzy_chachapoly {
self.finished = true;
}

pub(super) fn encrypt_in_place(&mut self, _input_output: &mut [u8]) {
assert!(self.finished == false);
self.finished = true;
}

pub(super) fn finish_and_get_tag(&mut self, out_tag: &mut [u8]) {
out_tag.copy_from_slice(&self.tag);
self.finished = true;
}

pub fn decrypt(&mut self, input: &[u8], output: &mut [u8], tag: &[u8]) -> bool {
assert!(input.len() == output.len());
assert!(self.finished == false);
Expand All @@ -139,7 +303,106 @@ mod fuzzy_chachapoly {
self.finished = true;
true
}

pub(super) fn decrypt_in_place(&mut self, _input: &mut [u8]) {
assert!(self.finished == false);
}

pub(super) fn finish_and_check_tag(&mut self, tag: &[u8]) -> bool {
if tag[..] != self.tag[..] { return false; }
self.finished = true;
true
}
}
}
#[cfg(fuzzing)]
pub use self::fuzzy_chachapoly::ChaCha20Poly1305RFC;

#[cfg(test)]
mod tests {
use ln::msgs::DecodeError;
use super::{ChaChaPolyReadAdapter, ChaChaPolyWriteAdapter};
use util::ser::{self, FixedLengthReader, LengthReadableArgs, Writeable};

// Used for for testing various lengths of serialization.
#[derive(Debug, PartialEq)]
struct TestWriteable {
field1: Vec<u8>,
field2: Vec<u8>,
field3: Vec<u8>,
}
impl_writeable_tlv_based!(TestWriteable, {
(1, field1, vec_type),
(2, field2, vec_type),
(3, field3, vec_type),
});

#[test]
fn test_chacha_stream_adapters() {
// Check that ChaChaPolyReadAdapter and ChaChaPolyWriteAdapter correctly encode and decode an
// encrypted object.
macro_rules! check_object_read_write {
($obj: expr) => {
// First, serialize the object, encrypted with ChaCha20Poly1305.
let rho = [42; 32];
let writeable_len = $obj.serialized_length() as u64 + 16;
let write_adapter = ChaChaPolyWriteAdapter::new(rho, &$obj);
let encrypted_writeable_bytes = write_adapter.encode();
let encrypted_writeable = &encrypted_writeable_bytes[..];

// Now deserialize the object back and make sure it matches the original.
let mut rd = FixedLengthReader::new(encrypted_writeable, writeable_len);
let read_adapter = <ChaChaPolyReadAdapter<TestWriteable>>::read(&mut rd, rho).unwrap();
assert_eq!($obj, read_adapter.readable);
};
}

// Try a big object that will require multiple write buffers.
let big_writeable = TestWriteable {
field1: vec![43],
field2: vec![44; 4192],
field3: vec![45; 4192 + 1],
};
check_object_read_write!(big_writeable);

// Try a small object that fits into one write buffer.
let small_writeable = TestWriteable {
field1: vec![43],
field2: vec![44],
field3: vec![45],
};
check_object_read_write!(small_writeable);
}

fn do_chacha_stream_adapters_ser_macros() -> Result<(), DecodeError> {
let writeable = TestWriteable {
field1: vec![43],
field2: vec![44; 4192],
field3: vec![45; 4192 + 1],
};

// First, serialize the object into a TLV stream, encrypted with ChaCha20Poly1305.
let rho = [42; 32];
let write_adapter = ChaChaPolyWriteAdapter::new(rho, &writeable);
let mut writer = ser::VecWriter(Vec::new());
encode_tlv_stream!(&mut writer, {
(1, write_adapter, required),
});

// Now deserialize the object back and make sure it matches the original.
let mut read_adapter: Option<ChaChaPolyReadAdapter<TestWriteable>> = None;
decode_tlv_stream!(&writer.0[..], {
(1, read_adapter, (option: LengthReadableArgs, rho)),
});
assert_eq!(writeable, read_adapter.unwrap().readable);

Ok(())
}

#[test]
fn chacha_stream_adapters_ser_macros() {
// Test that our stream adapters work as expected with the TLV macros.
// This also serves to test the `option: $trait` variant of the `decode_tlv` ser macro.
do_chacha_stream_adapters_ser_macros().unwrap()
}
}
22 changes: 22 additions & 0 deletions lightning/src/util/ser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,13 @@ impl<R: Read> Read for FixedLengthReader<R> {
}
}

impl<R: Read> LengthRead for FixedLengthReader<R> {
#[inline]
fn total_bytes(&self) -> u64 {
self.total_bytes
}
}

/// A Read which tracks whether any bytes have been read at all. This allows us to distinguish
/// between "EOF reached before we started" and "EOF reached mid-read".
pub(crate) struct ReadTrackingReader<R: Read> {
Expand Down Expand Up @@ -220,6 +227,21 @@ pub trait ReadableArgs<P>
fn read<R: Read>(reader: &mut R, params: P) -> Result<Self, DecodeError>;
}

/// A std::io::Read that also provides the total bytes available to read.
pub(crate) trait LengthRead: Read {
/// The total number of bytes available to read.
fn total_bytes(&self) -> u64;
}

/// A trait that various higher-level rust-lightning types implement allowing them to be read in
/// from a Read given some additional set of arguments which is required to deserialize, requiring
/// the implementer to provide the total length of the read.
pub(crate) trait LengthReadableArgs<P> where Self: Sized
{
/// Reads a Self in from the given LengthRead
fn read<R: LengthRead>(reader: &mut R, params: P) -> Result<Self, DecodeError>;
}

/// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read
///
/// (C-not exported) as we only export serialization to/from byte arrays instead
Expand Down
Loading

0 comments on commit 3676a05

Please sign in to comment.