From 484a23c45a24663dcd0aedafc1ad0f8736bfa43f Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 15 Apr 2023 22:34:59 +0100 Subject: [PATCH 01/11] add test for non-canonical identity --- bls12_381/src/curves/g1.rs | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/bls12_381/src/curves/g1.rs b/bls12_381/src/curves/g1.rs index 520348c0..61fd7ddc 100644 --- a/bls12_381/src/curves/g1.rs +++ b/bls12_381/src/curves/g1.rs @@ -180,6 +180,7 @@ mod test { use super::*; use crate::g1; + use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_std::{rand::Rng, UniformRand}; fn sample_unchecked() -> Affine { @@ -204,4 +205,24 @@ mod test { assert!(p.is_in_correct_subgroup_assuming_on_curve()); } } + + #[test] + fn non_canonical_identity_point() { + let non_canonical_hex = "c01000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap(); + assert_eq!(non_canonical_bytes.len(), 48); + + let affine_point: G1Affine = + CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]).unwrap(); + + let mut canonical_bytes = ark_std::vec![0u8; 48]; + affine_point + .serialize_compressed(&mut canonical_bytes[..]) + .unwrap(); + + assert_eq!( + non_canonical_bytes, canonical_bytes, + "if a point has been successfully deserialized, then it should equal the serialized point. ie serialization is canonical." + ) + } } From e919a764eab0ccec8a33dd90808ee1fe1281898a Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 15 Apr 2023 23:00:17 +0100 Subject: [PATCH 02/11] add shorter test case --- bls12_381/src/curves/g1.rs | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/bls12_381/src/curves/g1.rs b/bls12_381/src/curves/g1.rs index 61fd7ddc..035c7816 100644 --- a/bls12_381/src/curves/g1.rs +++ b/bls12_381/src/curves/g1.rs @@ -180,7 +180,7 @@ mod test { use super::*; use crate::g1; - use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; + use ark_serialize::CanonicalDeserialize; use ark_std::{rand::Rng, UniformRand}; fn sample_unchecked() -> Affine { @@ -212,17 +212,9 @@ mod test { let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap(); assert_eq!(non_canonical_bytes.len(), 48); - let affine_point: G1Affine = - CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]).unwrap(); + let maybe_affine_point: Result = + CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]); - let mut canonical_bytes = ark_std::vec![0u8; 48]; - affine_point - .serialize_compressed(&mut canonical_bytes[..]) - .unwrap(); - - assert_eq!( - non_canonical_bytes, canonical_bytes, - "if a point has been successfully deserialized, then it should equal the serialized point. ie serialization is canonical." - ) + assert!(maybe_affine_point.is_err()) } } From 6bacd2d14bd9fb6141142608578556cf6d77cc55 Mon Sep 17 00:00:00 2001 From: Kevaundray Wedderburn Date: Sat, 15 Apr 2023 23:02:01 +0100 Subject: [PATCH 03/11] add fix --- bls12_381/src/curves/util.rs | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/bls12_381/src/curves/util.rs b/bls12_381/src/curves/util.rs index 7d73ed8c..25a0212d 100644 --- a/bls12_381/src/curves/util.rs +++ b/bls12_381/src/curves/util.rs @@ -1,6 +1,7 @@ use ark_ec::{short_weierstrass::Affine, AffineRepr}; use ark_ff::{BigInteger384, PrimeField}; use ark_serialize::SerializationError; +use ark_std::Zero; use crate::{g1::Config as G1Config, g2::Config as G2Config, Fq, Fq2, G1Affine, G2Affine}; @@ -14,6 +15,7 @@ pub struct EncodingFlags { } impl EncodingFlags { + /// Fetches the flags from the byte-string pub fn get_flags(bytes: &[u8]) -> Self { let compression_flag_set = (bytes[0] >> 7) & 1; let infinity_flag_set = (bytes[0] >> 6) & 1; @@ -25,6 +27,8 @@ impl EncodingFlags { is_lexographically_largest: sort_flag_set == 1, } } + + /// Encodes the flags into the byte-string pub fn encode_flags(&self, bytes: &mut [u8]) { if self.is_compressed { bytes[0] |= 1 << 7; @@ -38,6 +42,13 @@ impl EncodingFlags { bytes[0] |= 1 << 5; } } + + /// Removes the flags from the byte-string. + /// + /// This reverses the effects of `encode_flags`. + pub fn remove_flags(bytes: &mut [u8]) { + bytes[0] &= 0b0001_1111; + } } pub(crate) fn deserialize_fq(bytes: [u8; 48]) -> Option { @@ -81,8 +92,7 @@ pub(crate) fn read_fq_with_offset( tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]); if mask { - // Mask away the flag bits - tmp[0] &= 0b0001_1111; + EncodingFlags::remove_flags(&mut tmp); } deserialize_fq(tmp).ok_or(SerializationError::InvalidData) } @@ -99,18 +109,23 @@ pub(crate) fn read_g1_compressed( // Obtain the three flags from the start of the byte sequence let flags = EncodingFlags::get_flags(&bytes[..]); - // we expect to be deserializing a compressed point + // We expect to be deserializing a compressed point if !flags.is_compressed { return Err(SerializationError::UnexpectedFlags); } + // Attempt to obtain the x-coordinate + let x = read_fq_with_offset(&bytes, 0, true)?; + if flags.is_infinity { + // Check that the `x` co-ordinate was `0` + if !x.is_zero() { + return Err(SerializationError::InvalidData); + } + return Ok(G1Affine::zero()); } - // Attempt to obtain the x-coordinate - let x = read_fq_with_offset(&bytes, 0, true)?; - let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest) .ok_or(SerializationError::InvalidData)?; From a30a2eb2fe7c9d404f45ed6013ac96a14e870eea Mon Sep 17 00:00:00 2001 From: mmagician Date: Sun, 3 Sep 2023 12:40:52 +0200 Subject: [PATCH 04/11] Perform the zero check directly on bytes --- bls12_381/src/curves/util.rs | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/bls12_381/src/curves/util.rs b/bls12_381/src/curves/util.rs index 25a0212d..7cd546af 100644 --- a/bls12_381/src/curves/util.rs +++ b/bls12_381/src/curves/util.rs @@ -1,7 +1,6 @@ use ark_ec::{short_weierstrass::Affine, AffineRepr}; use ark_ff::{BigInteger384, PrimeField}; use ark_serialize::SerializationError; -use ark_std::Zero; use crate::{g1::Config as G1Config, g2::Config as G2Config, Fq, Fq2, G1Affine, G2Affine}; @@ -82,11 +81,7 @@ pub(crate) fn serialize_fq(field: Fq) -> [u8; 48] { result } -pub(crate) fn read_fq_with_offset( - bytes: &[u8], - offset: usize, - mask: bool, -) -> Result { +fn read_bytes_with_offset(bytes: &[u8], offset: usize, mask: bool) -> [u8; G1_SERIALIZED_SIZE] { let mut tmp = [0; G1_SERIALIZED_SIZE]; // read `G1_SERIALIZED_SIZE` bytes tmp.copy_from_slice(&bytes[offset * G1_SERIALIZED_SIZE..G1_SERIALIZED_SIZE * (offset + 1)]); @@ -94,6 +89,15 @@ pub(crate) fn read_fq_with_offset( if mask { EncodingFlags::remove_flags(&mut tmp); } + tmp +} + +pub(crate) fn read_fq_with_offset( + bytes: &[u8], + offset: usize, + mask: bool, +) -> Result { + let tmp = read_bytes_with_offset(bytes, offset, mask); deserialize_fq(tmp).ok_or(SerializationError::InvalidData) } @@ -115,17 +119,18 @@ pub(crate) fn read_g1_compressed( } // Attempt to obtain the x-coordinate - let x = read_fq_with_offset(&bytes, 0, true)?; + let x_bytes = read_bytes_with_offset(&bytes, 0, true); if flags.is_infinity { // Check that the `x` co-ordinate was `0` - if !x.is_zero() { + if x_bytes != [0u8; 48] { return Err(SerializationError::InvalidData); } return Ok(G1Affine::zero()); } + let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?; let p = G1Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest) .ok_or(SerializationError::InvalidData)?; From 994c68167fe0f1c846b86d3e1496c74b3bd50227 Mon Sep 17 00:00:00 2001 From: mmagician Date: Sun, 3 Sep 2023 12:43:36 +0200 Subject: [PATCH 05/11] `read_fq_with_offset` need not be public --- bls12_381/src/curves/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bls12_381/src/curves/util.rs b/bls12_381/src/curves/util.rs index 7cd546af..e8fab00c 100644 --- a/bls12_381/src/curves/util.rs +++ b/bls12_381/src/curves/util.rs @@ -92,7 +92,7 @@ fn read_bytes_with_offset(bytes: &[u8], offset: usize, mask: bool) -> [u8; G1_SE tmp } -pub(crate) fn read_fq_with_offset( +fn read_fq_with_offset( bytes: &[u8], offset: usize, mask: bool, From 39be9ddb958de9f4220b17f70e2a43d8ced6563b Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 7 Sep 2023 21:49:22 +0200 Subject: [PATCH 06/11] if infinity flag is set, all bits should be zero for un- & compressed --- bls12_381/src/curves/util.rs | 53 +++++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 19 deletions(-) diff --git a/bls12_381/src/curves/util.rs b/bls12_381/src/curves/util.rs index e8fab00c..f893f2b4 100644 --- a/bls12_381/src/curves/util.rs +++ b/bls12_381/src/curves/util.rs @@ -92,15 +92,6 @@ fn read_bytes_with_offset(bytes: &[u8], offset: usize, mask: bool) -> [u8; G1_SE tmp } -fn read_fq_with_offset( - bytes: &[u8], - offset: usize, - mask: bool, -) -> Result { - let tmp = read_bytes_with_offset(bytes, offset, mask); - deserialize_fq(tmp).ok_or(SerializationError::InvalidData) -} - pub(crate) fn read_g1_compressed( mut reader: R, ) -> Result, ark_serialize::SerializationError> { @@ -153,15 +144,20 @@ pub(crate) fn read_g1_uncompressed( return Err(SerializationError::UnexpectedFlags); } + let x_bytes = read_bytes_with_offset(&bytes, 0, true); + let y_bytes = read_bytes_with_offset(&bytes, 1, false); + if flags.is_infinity { + if x_bytes != [0u8; 48] || y_bytes != [0u8; 48] { + return Err(SerializationError::InvalidData); + } return Ok(G1Affine::zero()); } // Attempt to obtain the x-coordinate - let x = read_fq_with_offset(&bytes, 0, true)?; + let x = deserialize_fq(x_bytes).ok_or(SerializationError::InvalidData)?; // Attempt to obtain the y-coordinate - let y = read_fq_with_offset(&bytes, 1, false)?; - + let y = deserialize_fq(y_bytes).ok_or(SerializationError::InvalidData)?; let p = G1Affine::new_unchecked(x, y); Ok(p) @@ -183,14 +179,19 @@ pub(crate) fn read_g2_compressed( return Err(SerializationError::UnexpectedFlags); } + let xc1_bytes = read_bytes_with_offset(&bytes, 0, true); + let xc0_bytes = read_bytes_with_offset(&bytes, 1, false); + if flags.is_infinity { + if xc1_bytes != [0u8; 48] || xc0_bytes != [0u8; 48] { + return Err(SerializationError::InvalidData); + } return Ok(G2Affine::zero()); } // Attempt to obtain the x-coordinate - let xc1 = read_fq_with_offset(&bytes, 0, true)?; - let xc0 = read_fq_with_offset(&bytes, 1, false)?; - + let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?; + let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?; let x = Fq2::new(xc0, xc1); let p = G2Affine::get_point_from_x_unchecked(x, flags.is_lexographically_largest) @@ -215,18 +216,32 @@ pub(crate) fn read_g2_uncompressed( return Err(SerializationError::UnexpectedFlags); } + let xc1_bytes = read_bytes_with_offset(&bytes, 0, true); + let xc0_bytes = read_bytes_with_offset(&bytes, 1, false); + + let yc1_bytes = read_bytes_with_offset(&bytes, 2, false); + let yc0_bytes = read_bytes_with_offset(&bytes, 3, false); + if flags.is_infinity { + if xc1_bytes != [0u8; 48] + || xc0_bytes != [0u8; 48] + || yc1_bytes != [0u8; 48] + || yc0_bytes != [0u8; 48] + { + return Err(SerializationError::InvalidData); + } return Ok(G2Affine::zero()); } + let xc1 = deserialize_fq(xc1_bytes).ok_or(SerializationError::InvalidData)?; + let xc0 = deserialize_fq(xc0_bytes).ok_or(SerializationError::InvalidData)?; + let yc1 = deserialize_fq(yc1_bytes).ok_or(SerializationError::InvalidData)?; + let yc0 = deserialize_fq(yc0_bytes).ok_or(SerializationError::InvalidData)?; + // Attempt to obtain the x-coordinate - let xc1 = read_fq_with_offset(&bytes, 0, true)?; - let xc0 = read_fq_with_offset(&bytes, 1, false)?; let x = Fq2::new(xc0, xc1); // Attempt to obtain the y-coordinate - let yc1 = read_fq_with_offset(&bytes, 2, false)?; - let yc0 = read_fq_with_offset(&bytes, 3, false)?; let y = Fq2::new(yc0, yc1); let p = G2Affine::new_unchecked(x, y); From 09996ac313f39e41c7db8c50a8717074fee5bece Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 7 Sep 2023 21:53:40 +0200 Subject: [PATCH 07/11] `get_flags` returns a Result now - catches bad flag combinations --- bls12_381/src/curves/util.rs | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/bls12_381/src/curves/util.rs b/bls12_381/src/curves/util.rs index f893f2b4..5e35e5d2 100644 --- a/bls12_381/src/curves/util.rs +++ b/bls12_381/src/curves/util.rs @@ -15,16 +15,24 @@ pub struct EncodingFlags { impl EncodingFlags { /// Fetches the flags from the byte-string - pub fn get_flags(bytes: &[u8]) -> Self { + pub fn get_flags(bytes: &[u8]) -> Result { let compression_flag_set = (bytes[0] >> 7) & 1; let infinity_flag_set = (bytes[0] >> 6) & 1; let sort_flag_set = (bytes[0] >> 5) & 1; - Self { - is_compressed: compression_flag_set == 1, - is_infinity: infinity_flag_set == 1, - is_lexographically_largest: sort_flag_set == 1, + let is_compressed = compression_flag_set == 1; + let is_infinity = infinity_flag_set == 1; + let is_lexographically_largest = sort_flag_set == 1; + + if is_lexographically_largest && (!is_compressed || is_infinity) { + return Err(SerializationError::InvalidData); } + + Ok(Self { + is_compressed, + is_infinity, + is_lexographically_largest, + }) } /// Encodes the flags into the byte-string @@ -102,7 +110,7 @@ pub(crate) fn read_g1_compressed( .ok_or(SerializationError::InvalidData)?; // Obtain the three flags from the start of the byte sequence - let flags = EncodingFlags::get_flags(&bytes[..]); + let flags = EncodingFlags::get_flags(&bytes[..])?; // We expect to be deserializing a compressed point if !flags.is_compressed { @@ -137,7 +145,7 @@ pub(crate) fn read_g1_uncompressed( .map_err(|_| SerializationError::InvalidData)?; // Obtain the three flags from the start of the byte sequence - let flags = EncodingFlags::get_flags(&bytes[..]); + let flags = EncodingFlags::get_flags(&bytes[..])?; // we expect to be deserializing an uncompressed point if flags.is_compressed { @@ -172,7 +180,7 @@ pub(crate) fn read_g2_compressed( .map_err(|_| SerializationError::InvalidData)?; // Obtain the three flags from the start of the byte sequence - let flags = EncodingFlags::get_flags(&bytes); + let flags = EncodingFlags::get_flags(&bytes)?; // we expect to be deserializing a compressed point if !flags.is_compressed { @@ -209,7 +217,7 @@ pub(crate) fn read_g2_uncompressed( .map_err(|_| SerializationError::InvalidData)?; // Obtain the three flags from the start of the byte sequence - let flags = EncodingFlags::get_flags(&bytes); + let flags = EncodingFlags::get_flags(&bytes)?; // we expect to be deserializing an uncompressed point if flags.is_compressed { From ad7f3bd2758e77caee1dd24468ffdb41ca5391de Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 7 Sep 2023 22:03:12 +0200 Subject: [PATCH 08/11] Add test for non-canonical infinity uncompressed --- bls12_381/src/curves/g1.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/bls12_381/src/curves/g1.rs b/bls12_381/src/curves/g1.rs index 9e99ccec..a2a70f36 100644 --- a/bls12_381/src/curves/g1.rs +++ b/bls12_381/src/curves/g1.rs @@ -215,6 +215,15 @@ mod test { let maybe_affine_point: Result = CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]); + assert!(maybe_affine_point.is_err()); + + let non_canonical_hex_uncompressed = "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"; + let non_canonical_bytes = hex::decode(non_canonical_hex_uncompressed).unwrap(); + assert_eq!(non_canonical_bytes.len(), 96); + + let maybe_affine_point: Result = + CanonicalDeserialize::deserialize_uncompressed(&non_canonical_bytes[..]); + assert!(maybe_affine_point.is_err()) } } From 0a05859c7309672939da3ba6e10483cfa17dd2cb Mon Sep 17 00:00:00 2001 From: mmagician Date: Thu, 7 Sep 2023 22:03:39 +0200 Subject: [PATCH 09/11] add test for bad flag combination Thanks @ArnaudBrousseau for the test --- bls12_381/src/curves/g1.rs | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/bls12_381/src/curves/g1.rs b/bls12_381/src/curves/g1.rs index a2a70f36..23e88adb 100644 --- a/bls12_381/src/curves/g1.rs +++ b/bls12_381/src/curves/g1.rs @@ -226,4 +226,34 @@ mod test { assert!(maybe_affine_point.is_err()) } + + #[test] + fn bad_flag_combination() { + // See https://github.com/zkcrypto/pairing/tree/fa8103764a07bd273927447d434de18aace252d3/src/bls12_381#serialization + // - Bit 1 is compressed/uncompressed + // - Bit 2 is infinity + // - Bit 3 is lexicographical order for compressed point deserialization + // Hence `0b1110` ("e" in hex) or `0b0110` ("6" in hex") are both nonsensical. + + // uncompressed, but lexicographically largest flag is set + let non_canonical_hex = "600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + let non_canonical_bytes = hex::decode(non_canonical_hex).unwrap(); + assert_eq!(non_canonical_bytes.len(), 48); + + let maybe_affine_point: Result = + CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]); + + assert!(maybe_affine_point.is_err()); + + // compressed, but infinity flag is set and lexicographically largest flag is + // set + let non_canonical_hex_2 = "e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; + + let non_canonical_bytes = hex::decode(non_canonical_hex_2).unwrap(); + assert_eq!(non_canonical_bytes.len(), 48); + + let maybe_affine_point: Result = + CanonicalDeserialize::deserialize_compressed(&non_canonical_bytes[..]); + assert!(maybe_affine_point.is_err()); + } } From d10b042b57defe80139725489f89c2e3324f74d4 Mon Sep 17 00:00:00 2001 From: mmagician Date: Sat, 9 Sep 2023 11:23:13 +0200 Subject: [PATCH 10/11] Add changelog entry to "Breaking changes" section --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1c39e70c..902b9738 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ### Breaking changes +- [\#176](https://github.com/arkworks-rs/curves/pull/176) Non-canonical infinity point and bad flags in BLS12-381 serialization should fail. + ### Features - [\#156](https://github.com/arkworks-rs/curves/pull/156) Add the bw6-767 curve. From 8511349ed92befe783860754a1ee0aebc1047821 Mon Sep 17 00:00:00 2001 From: mmagician Date: Mon, 11 Sep 2023 09:03:00 +0200 Subject: [PATCH 11/11] moved change to bugfixes section --- CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 902b9738..06d9adfa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,6 @@ ### Breaking changes -- [\#176](https://github.com/arkworks-rs/curves/pull/176) Non-canonical infinity point and bad flags in BLS12-381 serialization should fail. - ### Features - [\#156](https://github.com/arkworks-rs/curves/pull/156) Add the bw6-767 curve. @@ -16,6 +14,8 @@ ### Bugfixes +- [\#176](https://github.com/arkworks-rs/curves/pull/176) Non-canonical infinity point and bad flags in BLS12-381 serialization should fail. + ## v0.4.0 - [\#76](https://github.com/arkworks-rs/curves/pull/76) twisted Edwards parameters for bls12-377 - Fixed curve benches