diff --git a/lightning/src/util/chacha20poly1305rfc.rs b/lightning/src/util/chacha20poly1305rfc.rs index 0971d137a05..6986670ff2e 100644 --- a/lightning/src/util/chacha20poly1305rfc.rs +++ b/lightning/src/util/chacha20poly1305rfc.rs @@ -10,8 +10,9 @@ // This is a port of Andrew Moons poly1305-donna // https://github.com/floodyberry/poly1305-donna -use util::ser::{Writeable, Writer}; -use io::{self, Write}; +use ln::msgs::DecodeError; +use util::ser::{FixedLengthReader, LengthRead, LengthReadableArgs, Readable, Writeable, Writer}; +use io::{self, Read, Write}; #[cfg(not(fuzzing))] mod real_chachapoly { @@ -115,11 +116,59 @@ mod real_chachapoly { false } } + + // Decrypt in place, without checking the tag. Use `finish_and_check_tag` to check it + // later when decryption finishes. + // + // Should never be `pub` because the public API should always enforce tag checking. + pub(super) fn decrypt_in_place(&mut self, input_output: &mut [u8]) { + debug_assert!(self.finished == false); + self.mac.input(input_output); + self.data_len += input_output.len(); + self.cipher.process_in_place(input_output); + } + + // If we were previously decrypting with `decrypt_in_place`, this method must be used to finish + // decrypting and check the tag. Returns whether or not the tag is valid. + pub(super) fn finish_and_check_tag(&mut self, tag: &[u8]) -> bool { + debug_assert!(self.finished == false); + self.finished = true; + ChaCha20Poly1305RFC::pad_mac_16(&mut self.mac, self.data_len); + self.mac.input(&self.aad_len.to_le_bytes()); + self.mac.input(&(self.data_len as u64).to_le_bytes()); + + let mut calc_tag = [0u8; 16]; + self.mac.raw_result(&mut calc_tag); + if fixed_time_eq(&calc_tag, tag) { + true + } else { + false + } + } } } #[cfg(not(fuzzing))] pub use self::real_chachapoly::ChaCha20Poly1305RFC; +/// Enables simultaneously reading and decrypting a ChaCha20Poly1305RFC stream from a std::io::Read. +struct ChaChaPolyReader<'a, R: Read> { + pub chacha: &'a mut ChaCha20Poly1305RFC, + pub read: R, +} + +impl<'a, R: Read> Read for ChaChaPolyReader<'a, R> { + // Decrypt bytes from Self::read into `dest`. + // `ChaCha20Poly1305RFC::finish_and_check_tag` must be called to check the tag after all reads + // complete. + fn read(&mut self, dest: &mut [u8]) -> Result { + let res = self.read.read(dest)?; + if res > 0 { + self.chacha.decrypt_in_place(&mut dest[0..res]); + } + Ok(res) + } +} + /// Enables simultaneously writing and encrypting a byte stream into a Writer. struct ChaChaPolyWriter<'a, W: Writer> { pub chacha: &'a mut ChaCha20Poly1305RFC, @@ -171,6 +220,36 @@ impl<'a, T: Writeable> Writeable for ChaChaPolyWriteAdapter<'a, T> { } } +/// Enables the use of the serialization macros for objects that need to be simultaneously decrypted and +/// deserialized. This allows us to avoid an intermediate Vec allocation. +pub(crate) struct ChaChaPolyReadAdapter { + #[allow(unused)] // This will be used soon for onion messages + pub readable: R, +} + +impl LengthReadableArgs<[u8; 32]> for ChaChaPolyReadAdapter { + // Simultaneously read and decrypt an object from a LengthRead, storing it in Self::readable. + // LengthRead must be used instead of std::io::Read because we need the total length to separate + // out the tag at the end. + fn read(mut r: &mut R, secret: [u8; 32]) -> Result { + if r.total_bytes() < 16 { return Err(DecodeError::InvalidValue) } + + let mut chacha = ChaCha20Poly1305RFC::new(&secret, &[0; 12], &[]); + let decrypted_len = r.total_bytes() - 16; + let s = FixedLengthReader::new(&mut r, decrypted_len); + let mut chacha_stream = ChaChaPolyReader { chacha: &mut chacha, read: s }; + let readable: T = Readable::read(&mut chacha_stream)?; + + let mut tag = [0 as u8; 16]; + r.read_exact(&mut tag)?; + if !chacha.finish_and_check_tag(&tag) { + return Err(DecodeError::InvalidValue) + } + + Ok(Self { readable }) + } +} + #[cfg(fuzzing)] mod fuzzy_chachapoly { #[derive(Clone, Copy)] @@ -223,7 +302,106 @@ mod fuzzy_chachapoly { self.finished = true; true } + + pub(super) fn decrypt_in_place(&mut self, _input: &mut [u8]) { + assert!(self.finished == false); + } + + pub(super) fn finish_and_check_tag(&mut self, tag: &[u8]) -> bool { + if tag[..] != self.tag[..] { return false; } + self.finished = true; + true + } } } #[cfg(fuzzing)] pub use self::fuzzy_chachapoly::ChaCha20Poly1305RFC; + +#[cfg(test)] +mod tests { + use ln::msgs::DecodeError; + use super::{ChaChaPolyReadAdapter, ChaChaPolyWriteAdapter}; + use util::ser::{self, FixedLengthReader, LengthReadableArgs, Writeable}; + + // Used for for testing various lengths of serialization. + #[derive(Debug, PartialEq)] + struct TestWriteable { + field1: Vec, + field2: Vec, + field3: Vec, + } + impl_writeable_tlv_based!(TestWriteable, { + (1, field1, vec_type), + (2, field2, vec_type), + (3, field3, vec_type), + }); + + #[test] + fn test_chacha_stream_adapters() { + // Check that ChaChaPolyReadAdapter and ChaChaPolyWriteAdapter correctly encode and decode an + // encrypted object. + macro_rules! check_object_read_write { + ($obj: expr) => { + // First, serialize the object, encrypted with ChaCha20Poly1305. + let rho = [42; 32]; + let writeable_len = $obj.serialized_length() as u64 + 16; + let write_adapter = ChaChaPolyWriteAdapter::new(rho, &$obj); + let encrypted_writeable_bytes = write_adapter.encode(); + let encrypted_writeable = &encrypted_writeable_bytes[..]; + + // Now deserialize the object back and make sure it matches the original. + let mut rd = FixedLengthReader::new(encrypted_writeable, writeable_len); + let read_adapter = >::read(&mut rd, rho).unwrap(); + assert_eq!($obj, read_adapter.readable); + }; + } + + // Try a big object that will require multiple write buffers. + let big_writeable = TestWriteable { + field1: vec![43], + field2: vec![44; 4192], + field3: vec![45; 4192 + 1], + }; + check_object_read_write!(big_writeable); + + // Try a small object that fits into one write buffer. + let small_writeable = TestWriteable { + field1: vec![43], + field2: vec![44], + field3: vec![45], + }; + check_object_read_write!(small_writeable); + } + + fn do_chacha_stream_adapters_ser_macros() -> Result<(), DecodeError> { + let writeable = TestWriteable { + field1: vec![43], + field2: vec![44; 4192], + field3: vec![45; 4192 + 1], + }; + + // First, serialize the object into a TLV stream, encrypted with ChaCha20Poly1305. + let rho = [42; 32]; + let write_adapter = ChaChaPolyWriteAdapter::new(rho, &writeable); + let mut writer = ser::VecWriter(Vec::new()); + encode_tlv_stream!(&mut writer, { + (1, write_adapter, required), + }); + + // Now deserialize the object back and make sure it matches the original. + let mut read_adapter: Option> = None; + decode_tlv_stream!(&writer.0[..], { + (1, read_adapter, (option: LengthReadableArgs, rho)), + }); + assert_eq!(writeable, read_adapter.unwrap().readable); + + Ok(()) + } + + #[test] + fn chacha_stream_adapters_ser_macros() { + // Test that our stream adapters work as expected with the TLV macros. + // This also serves to test the `option: $trait` variant of the `decode_tlv` ser macro. + do_chacha_stream_adapters_ser_macros().unwrap() + } +} diff --git a/lightning/src/util/ser.rs b/lightning/src/util/ser.rs index 428adbc5e66..a45806eef54 100644 --- a/lightning/src/util/ser.rs +++ b/lightning/src/util/ser.rs @@ -134,6 +134,13 @@ impl Read for FixedLengthReader { } } +impl LengthRead for FixedLengthReader { + #[inline] + fn total_bytes(&self) -> u64 { + self.total_bytes + } +} + /// A Read which tracks whether any bytes have been read at all. This allows us to distinguish /// between "EOF reached before we started" and "EOF reached mid-read". pub(crate) struct ReadTrackingReader { @@ -220,6 +227,21 @@ pub trait ReadableArgs

fn read(reader: &mut R, params: P) -> Result; } +/// A std::io::Read that also provides the total bytes available to read. +pub(crate) trait LengthRead: Read { + /// The total number of bytes available to read. + fn total_bytes(&self) -> u64; +} + +/// A trait that various higher-level rust-lightning types implement allowing them to be read in +/// from a Read given some additional set of arguments which is required to deserialize, requiring +/// the implementer to provide the total length of the read. +pub(crate) trait LengthReadableArgs

where Self: Sized +{ + /// Reads a Self in from the given LengthRead + fn read(reader: &mut R, params: P) -> Result; +} + /// A trait that various rust-lightning types implement allowing them to (maybe) be read in from a Read /// /// (C-not exported) as we only export serialization to/from byte arrays instead diff --git a/lightning/src/util/ser_macros.rs b/lightning/src/util/ser_macros.rs index 165d1f1edba..816426b1133 100644 --- a/lightning/src/util/ser_macros.rs +++ b/lightning/src/util/ser_macros.rs @@ -118,6 +118,9 @@ macro_rules! check_tlv_order { ($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, ignorable) => {{ // no-op }}; + ($last_seen_type: expr, $typ: expr, $type: expr, $field: ident, (option: $trait: ident $(, $read_arg: expr)?)) => {{ + // no-op + }}; } macro_rules! check_missing_tlv { @@ -144,6 +147,9 @@ macro_rules! check_missing_tlv { ($last_seen_type: expr, $type: expr, $field: ident, ignorable) => {{ // no-op }}; + ($last_seen_type: expr, $type: expr, $field: ident, (option: $trait: ident $(, $read_arg: expr)?)) => {{ + // no-op + }}; } macro_rules! decode_tlv { @@ -163,6 +169,9 @@ macro_rules! decode_tlv { ($reader: expr, $field: ident, ignorable) => {{ $field = ser::MaybeReadable::read(&mut $reader)?; }}; + ($reader: expr, $field: ident, (option: $trait: ident $(, $read_arg: expr)?)) => {{ + $field = Some($trait::read(&mut $reader $(, $read_arg)*)?); + }}; } macro_rules! decode_tlv_stream {