Skip to content

Commit

Permalink
Merge pull request #9 from dfns-labs/serde-issue
Browse files Browse the repository at this point in the history
Address (de)serialization issue
  • Loading branch information
survived authored Sep 26, 2023
2 parents bb2e710 + 4a177ec commit 8ada318
Show file tree
Hide file tree
Showing 5 changed files with 272 additions and 26 deletions.
16 changes: 13 additions & 3 deletions generic-ec-curves/src/rust_crypto/point.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,9 @@ where
let point_encoded = E::AffinePoint::from(self.0).to_encoded_point(true);

let mut bytes = Self::Bytes::default();
bytes.copy_from_slice(point_encoded.as_bytes());
if !bool::from(Self::is_zero(self)) {
bytes.copy_from_slice(point_encoded.as_bytes());
}

bytes
}
Expand All @@ -126,7 +128,9 @@ where
let point_encoded = E::AffinePoint::from(self.0).to_encoded_point(false);

let mut bytes = Self::Bytes::default();
bytes.copy_from_slice(point_encoded.as_bytes());
if !bool::from(Self::is_zero(self)) {
bytes.copy_from_slice(point_encoded.as_bytes());
}

bytes
}
Expand All @@ -138,7 +142,13 @@ where
E::AffinePoint: FromEncodedPoint<E> + Into<E::ProjectivePoint>,
FieldBytesSize<E>: ModulusSize,
{
fn decode(bytes: &[u8]) -> Option<Self> {
fn decode(mut bytes: &[u8]) -> Option<Self> {
let all_zero = bytes.iter().all(|b| *b == 0);
if all_zero {
// This is the only representation of identity point recognized
// by `elliptic-curve` library
bytes = &[0]
}
let encoded_point = EncodedPoint::<E>::from_bytes(bytes).ok()?;
Option::from(E::AffinePoint::from_encoded_point(&encoded_point))
.map(|point: E::AffinePoint| Self(point.into()))
Expand Down
43 changes: 38 additions & 5 deletions generic-ec/src/serde.rs
Original file line number Diff line number Diff line change
Expand Up @@ -399,13 +399,29 @@ mod utils {
where
S: serde::Serializer,
{
#[cfg(feature = "alloc")]
if serializer.is_human_readable() {
let bytes_hex = hex::encode(source);
return serializer.serialize_str(&bytes_hex);
// We only support serialization of byte arrays up to 128 bytes. It can be generalized when
// Rust has better support of const generics
let mut buf = [0u8; 256];

if source.as_ref().len() * 2 > buf.len() {
return Err(<S::Error as serde::ser::Error>::custom(
super::error_msg::ByteArrayTooLarge {
len: source.as_ref().len(),
supported_len: buf.len() / 2,
},
));
}
let buf = &mut buf[..2 * source.as_ref().len()];
hex::encode_to_slice(source, buf)
.map_err(<S::Error as serde::ser::Error>::custom)?;
let buf_str = core::str::from_utf8(buf).map_err(|e| {
<S::Error as serde::ser::Error>::custom(super::error_msg::MalformedHex(e))
})?;
serializer.serialize_str(buf_str)
} else {
serializer.serialize_bytes(source.as_ref())
}

serializer.serialize_bytes(source.as_ref())
}
}

Expand Down Expand Up @@ -528,4 +544,21 @@ mod error_msg {
write!(f, "invalid scalar")
}
}

pub struct MalformedHex(pub core::str::Utf8Error);
impl fmt::Display for MalformedHex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "malformed hex: {}", self.0)
}
}

pub struct ByteArrayTooLarge {
pub len: usize,
pub supported_len: usize,
}
impl fmt::Display for ByteArrayTooLarge {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "byte array is too large: its length is {} bytes, but only up to {} bytes can be serialized", self.len, self.supported_len)
}
}
}
13 changes: 11 additions & 2 deletions tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,18 @@ publish = false

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dev-dependencies]
generic-ec = { path = "../generic-ec", features = ["all-curves"] }
[dependencies]
generic-ec = { path = "../generic-ec", default-features = false, features = ["all-curves", "serde"] }

[dev-dependencies]
generic-tests = "0.1"
rand_dev = "0.1"
rand = "0.8"

serde = "1"
serde_with = "2"
serde_test = "1"
hex = "0.4"

[features]
default = ["generic-ec/std"]
35 changes: 19 additions & 16 deletions tests/tests/curves.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,18 +104,19 @@ mod tests {
fn point_bytes<E: Curve>() {
let mut rng = DevRng::new();

let s = Scalar::<E>::random(&mut rng);
let p = Point::generator() * s;
let random_point = Point::generator() * Scalar::<E>::random(&mut rng);

let bytes_compressed = p.to_bytes(true);
let bytes_uncompressed = p.to_bytes(false);
assert!(bytes_compressed.len() <= bytes_uncompressed.len());
for point in [Point::zero(), Point::generator().into(), random_point] {
let bytes_compressed = point.to_bytes(true);
let bytes_uncompressed = point.to_bytes(false);
assert!(bytes_compressed.len() <= bytes_uncompressed.len());

let p1 = Point::<E>::from_bytes(&bytes_compressed).unwrap();
let p2 = Point::<E>::from_bytes(&bytes_uncompressed).unwrap();
let p1 = Point::<E>::from_bytes(&bytes_compressed).unwrap();
let p2 = Point::<E>::from_bytes(&bytes_uncompressed).unwrap();

assert_eq!(p, p1);
assert_eq!(p, p2);
assert_eq!(point, p1);
assert_eq!(point, p2);
}
}

#[test]
Expand Down Expand Up @@ -161,15 +162,17 @@ mod tests {
fn scalar_from_bytes_mod_order<E: Curve>() {
let mut rng = DevRng::new();

let s = Scalar::<E>::random(&mut rng);
let s_be = s.to_be_bytes();
let s_le = s.to_le_bytes();
let random_scalar = Scalar::<E>::random(&mut rng);
for s in [Scalar::zero(), random_scalar] {
let s_be = s.to_be_bytes();
let s_le = s.to_le_bytes();

let s1 = Scalar::<E>::from_be_bytes_mod_order(&s_be);
let s2 = Scalar::<E>::from_le_bytes_mod_order(&s_le);
let s1 = Scalar::<E>::from_be_bytes_mod_order(&s_be);
let s2 = Scalar::<E>::from_le_bytes_mod_order(&s_le);

assert_eq!(s, s1);
assert_eq!(s, s2);
assert_eq!(s, s1);
assert_eq!(s, s2);
}
}

fn _is_copy<T: Copy>() {}
Expand Down
191 changes: 191 additions & 0 deletions tests/tests/serde.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
#[generic_tests::define]
mod tests {
use generic_ec::{Curve, Point, Scalar};
use serde_test::{Configure, Token};

#[test]
fn serialize_point<E: Curve>() {
let mut rng = rand_dev::DevRng::new();

let random_point = Point::<E>::generator() * Scalar::random(&mut rng);
for point in [Point::zero(), Point::generator().into(), random_point] {
let point_uncompressed = point.to_bytes(false).to_vec().leak();
let point_uncompressed_hex = hex::encode(&point_uncompressed).leak();
let point_compressed = point.to_bytes(true).to_vec().leak();
let point_compressed_hex = hex::encode(&point_compressed).leak();

// Human-readable, uncompressed
serde_test::assert_ser_tokens(
&point.readable(),
&[
Token::Struct {
name: "PointUncompressed",
len: 2,
},
Token::Str("curve"),
Token::Str(E::CURVE_NAME),
Token::Str("point"),
Token::Str(point_uncompressed_hex),
Token::StructEnd,
],
);

// Human-readable, compressed
serde_test::assert_ser_tokens(
&Compressed(point).readable(),
&[
Token::NewtypeStruct {
name: "PointCompact",
},
Token::Str(point_compressed_hex),
],
);

// Binary, uncompressed
serde_test::assert_ser_tokens(
&point.compact(),
&[
Token::Struct {
name: "PointUncompressed",
len: 2,
},
Token::Str("curve"),
Token::Str(E::CURVE_NAME),
Token::Str("point"),
Token::Bytes(point_uncompressed),
Token::StructEnd,
],
);

// Binary, compressed
serde_test::assert_ser_tokens(
&Compressed(point).compact(),
&[
Token::NewtypeStruct {
name: "PointCompact",
},
Token::Bytes(point_compressed),
],
);
}
}

#[test]
fn deserialize_point<E: Curve>() {
let mut rng = rand_dev::DevRng::new();

let random_point = Point::<E>::generator() * Scalar::random(&mut rng);
for point in [Point::zero(), Point::generator().into(), random_point] {
let point_uncompressed = point.to_bytes(false).to_vec().leak();
let point_uncompressed_hex = hex::encode(&point_uncompressed).leak();
let point_compressed = point.to_bytes(true).to_vec().leak();
let point_compressed_hex = hex::encode(&point_compressed).leak();

// Uncompressed, hex-encoding
serde_test::assert_de_tokens(
&point.readable(),
&[
Token::Struct {
name: "PointUncompressed",
len: 2,
},
Token::Str("curve"),
Token::Str(E::CURVE_NAME),
Token::Str("point"),
Token::Str(point_uncompressed_hex),
Token::StructEnd,
],
);
// Uncompressed, seq-encoded
{
let mut tokens = vec![
Token::Struct {
name: "PointUncompressed",
len: 2,
},
Token::Str("curve"),
Token::Str(E::CURVE_NAME),
Token::Str("point"),
Token::Seq { len: None },
];
tokens.extend(point_uncompressed.iter().copied().map(Token::U8));
tokens.extend([Token::SeqEnd, Token::StructEnd]);
serde_test::assert_de_tokens(&point.readable(), &tokens);
}
// Uncompressed, bytes-encoded
serde_test::assert_de_tokens(
&point.readable(),
&[
Token::Struct {
name: "PointUncompressed",
len: 2,
},
Token::Str("curve"),
Token::Str(E::CURVE_NAME),
Token::Str("point"),
Token::Bytes(point_uncompressed),
Token::StructEnd,
],
);

// Compressed, hex-encoding
serde_test::assert_de_tokens(
&Compressed(point).readable(),
&[
Token::NewtypeStruct {
name: "PointCompact",
},
Token::Str(point_compressed_hex),
],
);
// Compressed, seq-encoded
{
let mut tokens = vec![
Token::NewtypeStruct {
name: "PointCompact",
},
Token::Seq { len: None },
];
tokens.extend(point_compressed.iter().copied().map(Token::U8));
tokens.push(Token::SeqEnd);
serde_test::assert_de_tokens(&Compressed(point).readable(), &tokens);
}
}
}

#[derive(PartialEq, Eq, Debug)]
struct Compressed<T>(T);
impl<T> serde::Serialize for Compressed<T>
where
generic_ec::serde::Compact: serde_with::SerializeAs<T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
use serde_with::SerializeAs;
generic_ec::serde::Compact::serialize_as(&self.0, serializer)
}
}
impl<'de, T> serde::Deserialize<'de> for Compressed<T>
where
generic_ec::serde::Compact: serde_with::DeserializeAs<'de, T>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use serde_with::DeserializeAs;
generic_ec::serde::Compact::deserialize_as(deserializer).map(Self)
}
}

#[instantiate_tests(<generic_ec::curves::Secp256k1>)]
mod secp256k1 {}

#[instantiate_tests(<generic_ec::curves::Secp256r1>)]
mod secp256r1 {}

#[instantiate_tests(<generic_ec::curves::Stark>)]
mod stark {}
}

0 comments on commit 8ada318

Please sign in to comment.