Skip to content
This repository has been archived by the owner on Dec 18, 2023. It is now read-only.

Add test and fix for non-canonical point at infinity #157

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions bls12_381/src/curves/g1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ mod test {

use super::*;
use crate::g1;
use ark_serialize::CanonicalDeserialize;
use ark_std::{rand::Rng, UniformRand};

fn sample_unchecked() -> Affine<g1::Config> {
Expand All @@ -204,4 +205,16 @@ mod test {
assert!(p.is_in_correct_subgroup_assuming_on_curve());
}
}

#[test]
fn non_canonical_identity_point() {
let non_canonical_hex = "c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
assert_eq!(non_canonical_bytes.len(), 48);

let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);

assert!(maybe_affine_point.is_err())
}
}
27 changes: 21 additions & 6 deletions bls12_381/src/curves/util.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use ark_ec::{short_weierstrass::Affine, AffineRepr};
use ark_ff::{BigInteger384, PrimeField};
use ark_serialize::SerializationError;
use ark_std::Zero;

use crate::{g1::Config as G1Config, g2::Config as G2Config, Fq, Fq2, G1Affine, G2Affine};

Expand All @@ -14,6 +15,7 @@ pub struct EncodingFlags {
}

impl EncodingFlags {
/// Fetches the flags from the byte-string
pub fn get_flags(bytes: &[u8]) -> Self {
Comment on lines +18 to 19

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think some extra validation here is needed to catch another, more subtle class of non-canonical infinity point encodings: some flag combinations are nonsensical. Here's a failing test to show you what I mean:

#[test]
fn bad_flag_combination() {
    // See https://github.com/zkcrypto/pairing/tree/fa8103764a07bd273927447d434de18aace252d3/src/bls12_381#serialization
    // - Bit 1 is compressed/uncompressed
    // - Bit 2 is infinity
    // - Bit 3 is lexicographical order for compressed point deserialization
    // Hence `0b1110` ("e" in hex) or `0b0110` ("6" in hex") are both nonsensical.
    let non_canonical_hex = "e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000";
    let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap();
    assert_eq!(non_canonical_bytes.len(), 48);

    let maybe_affine_point: Result<G1Affine, ark_serialize::SerializationError> =
        CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]);

    assert!(maybe_affine_point.is_err())
}

let compression_flag_set = (bytes[0] >> 7) & 1;
let infinity_flag_set = (bytes[0] >> 6) & 1;
Expand All @@ -25,6 +27,8 @@ impl EncodingFlags {
is_lexographically_largest: sort_flag_set == 1,
}
}

/// Encodes the flags into the byte-string
pub fn encode_flags(&self, bytes: &mut [u8]) {
if self.is_compressed {
bytes[0] |= 1 << 7;
Expand All @@ -38,6 +42,13 @@ impl EncodingFlags {
bytes[0] |= 1 << 5;
}
}

/// Removes the flags from the byte-string.
///
/// This reverses the effects of `encode_flags`.
pub fn remove_flags(bytes: &mut [u8]) {
bytes[0] &= 0b0001_1111;
}
}

pub(crate) fn deserialize_fq(bytes: [u8; 48]) -> Option<Fq> {
Expand Down Expand Up @@ -81,8 +92,7 @@ pub(crate) fn read_fq_with_offset(
tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]);

if mask {
// Mask away the flag bits
tmp[0] &= 0b0001_1111;
EncodingFlags::remove_flags(&mut tmp);
}
deserialize_fq(tmp).ok_or(SerializationError::InvalidData)
}
Expand All @@ -99,18 +109,23 @@ pub(crate) fn read_g1_compressed<R: ark_serialize::Read>(
// Obtain the three flags from the start of the byte sequence
let flags = EncodingFlags::get_flags(&bytes[..]);

// we expect to be deserializing a compressed point
// We expect to be deserializing a compressed point
if !flags.is_compressed {
return Err(SerializationError::UnexpectedFlags);
}

// Attempt to obtain the x-coordinate
let x = read_fq_with_offset(&bytes, 0, true)?;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we make this a byte-based check? i.e. directly on the bytes that we've read?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good catch, I've PRed to @kevaundray's fork, if not merged there soon I'll open a PR directly here.


if flags.is_infinity {
// Check that the `x` co-ordinate was `0`
if !x.is_zero() {
return Err(SerializationError::InvalidData);
}

return Ok(G1Affine::zero());
}

// Attempt to obtain the x-coordinate
let x = read_fq_with_offset(&bytes, 0, true)?;

let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest)
.ok_or(SerializationError::InvalidData)?;

Expand Down
Loading