Skip to content

Commit

Permalink
Add extension fields (#15)
Browse files Browse the repository at this point in the history
Co-authored-by: Remco Bloemen <remco@wicked.ventures>
Co-authored-by: Michele Orrù <m@orru.net>
  • Loading branch information
3 people authored Sep 3, 2024
1 parent 9518e27 commit 9b3793f
Show file tree
Hide file tree
Showing 6 changed files with 301 additions and 88 deletions.
48 changes: 25 additions & 23 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,37 +8,42 @@ license = "MIT/Apache-2.0"
resolver = "2"

[patch.crates-io]
ark-std = {git = "https://github.com/arkworks-rs/utils"}
ark-ec = {git = "https://github.com/arkworks-rs/algebra"}
ark-ff = {git = "https://github.com/arkworks-rs/algebra"}
ark-serialize = {git = "https://github.com/arkworks-rs/algebra"}
ark-bls12-381 = {git = "https://github.com/arkworks-rs/algebra"}
ark-curve25519 = {git = "https://github.com/arkworks-rs/algebra"}
ark-pallas = {git = "https://github.com/arkworks-rs/algebra"}
ark-vesta = {git = "https://github.com/arkworks-rs/algebra"}
ark-std = { git = "https://github.com/arkworks-rs/utils" }
ark-ec = { git = "https://github.com/arkworks-rs/algebra" }
ark-ff = { git = "https://github.com/arkworks-rs/algebra" }
ark-serialize = { git = "https://github.com/arkworks-rs/algebra" }
ark-bls12-381 = { git = "https://github.com/arkworks-rs/algebra" }
ark-curve25519 = { git = "https://github.com/arkworks-rs/algebra" }
ark-pallas = { git = "https://github.com/arkworks-rs/algebra" }
ark-vesta = { git = "https://github.com/arkworks-rs/algebra" }

[dependencies]
zeroize = {version="1.6.0", features=["zeroize_derive"]}
rand = {version="0.8.5", features=["getrandom"]}
zeroize = { version = "1.6.0", features = ["zeroize_derive"] }
rand = { version = "0.8.5", features = ["getrandom"] }
digest = "0.10.7"
generic-array = "0.14.7"
# used as default hasher for the prover
keccak = "0.1.4"
keccak = { version = "0.1.4", features = ["asm", "simd"] }
log = "0.4.20"
# optional dependencies
ark-ff = {version="0.4.0", optional=true}
ark-ec = {version="0.4.0", optional=true}
ark-serialize = {version="0.4.2", optional=true, features=["std"]}
ark-ff = { version = "0.4.0", optional = true }
ark-ec = { version = "0.4.0", optional = true }
ark-serialize = { version = "0.4.2", optional = true, features = ["std"] }
# anemoi = {git = "https://github.com/anemoi-hash/anemoi-rust", optional=true}
group = {version="0.13.0", optional=true}
ark-bls12-381 = {version="0.4.0", optional=true}
group = { version = "0.13.0", optional = true }
ark-bls12-381 = { version = "0.4.0", optional = true }
rayon = { version = "1.10.0", optional = true }
bytemuck = "1.17.1"
blake3 = "1.5.4"


[features]
default = []
default = ["parallel"]
parallel = ["dep:rayon"]
ark = ["dep:ark-ff", "dep:ark-ec", "dep:ark-serialize"]
group = ["dep:group"]
ark-bls12-381 = ["ark", "dep:ark-bls12-381"]
rayon = ["dep:rayon"]
# anemoi = ["dep:anemoi"]

[dev-dependencies]
Expand All @@ -47,7 +52,7 @@ sha2 = "0.10.7"
blake2 = "0.10.6"
hex = "0.4.3"
# test curve25519 compatibility
curve25519-dalek = {version="4.0.0", features=["group"]}
curve25519-dalek = { version = "4.0.0", features = ["group"] }
ark-curve25519 = "0.4.0"
# test algebraic hashers
bls12_381 = "0.8.0"
Expand All @@ -58,10 +63,7 @@ pasta_curves = "0.5.1"
ark-vesta = { version = "0.4.0", features = ["std"] }

[package.metadata.docs.rs]
rustdoc-args = [
"--html-in-header", "doc/katex-header.html",
"--cfg", "docsrs",
]
rustdoc-args = ["--html-in-header", "doc/katex-header.html", "--cfg", "docsrs"]
features = ["ark", "group"]

[[example]]
Expand All @@ -70,7 +72,7 @@ required-features = ["ark"]

[[example]]
name = "schnorr_algebraic_hash"
required-features = ["ark", "ark-bls12-381"]
required-features = ["ark", "ark-bls112-381"]

[[example]]
name = "bulletproof"
Expand Down
35 changes: 26 additions & 9 deletions src/plugins/ark/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,32 +77,43 @@ where

impl<F, T> FieldChallenges<F> for T
where
F: PrimeField,
F: Field,
T: ByteChallenges,
{
fn fill_challenge_scalars(&mut self, output: &mut [F]) -> ProofResult<()> {
let mut buf = vec![0u8; bytes_uniform_modp(F::MODULUS_BIT_SIZE)];
let base_field_size = bytes_uniform_modp(F::BasePrimeField::MODULUS_BIT_SIZE);
let mut buf = vec![0u8; F::extension_degree() as usize * base_field_size];

for o in output.iter_mut() {
self.fill_challenge_bytes(&mut buf)?;
*o = F::from_be_bytes_mod_order(&buf).into();
*o = F::from_base_prime_field_elems(
buf.chunks(base_field_size)
.map(F::BasePrimeField::from_be_bytes_mod_order),
)
.expect("Could not convert");
}
Ok(())
}
}

// Field <-> Field interactions:

impl<H, R, C, const N: usize> FieldPublic<Fp<C, N>> for Merlin<H, Fp<C, N>, R>
impl<F, H, R, C, const N: usize> FieldPublic<F> for Merlin<H, Fp<C, N>, R>
where
F: Field<BasePrimeField = Fp<C, N>>,
H: DuplexHash<Fp<C, N>>,
R: RngCore + CryptoRng,
C: FpConfig<N>,
{
type Repr = ();

fn public_scalars(&mut self, input: &[Fp<C, N>]) -> ProofResult<Self::Repr> {
self.public_units(input)?;
fn public_scalars(&mut self, input: &[F]) -> ProofResult<Self::Repr> {
let flattened: Vec<_> = input
.into_iter()
.map(|f| f.to_base_prime_field_elements())
.flatten()
.collect();
self.public_units(&flattened)?;
Ok(())
}
}
Expand All @@ -126,15 +137,21 @@ where
//
//

impl<H, C, const N: usize> FieldPublic<Fp<C, N>> for Arthur<'_, H, Fp<C, N>>
impl<F, H, C, const N: usize> FieldPublic<F> for Arthur<'_, H, Fp<C, N>>
where
F: Field<BasePrimeField = Fp<C, N>>,
H: DuplexHash<Fp<C, N>>,
C: FpConfig<N>,
{
type Repr = ();

fn public_scalars(&mut self, input: &[Fp<C, N>]) -> ProofResult<Self::Repr> {
self.public_units(input)?;
fn public_scalars(&mut self, input: &[F]) -> ProofResult<Self::Repr> {
let flattened: Vec<_> = input
.into_iter()
.map(|f| f.to_base_prime_field_elements())
.flatten()
.collect();
self.public_units(&flattened)?;
Ok(())
}
}
Expand Down
25 changes: 18 additions & 7 deletions src/plugins/ark/iopattern.rs
Original file line number Diff line number Diff line change
@@ -1,34 +1,45 @@
use ark_ec::CurveGroup;
use ark_ff::{Fp, FpConfig, PrimeField};
use ark_ff::{Field, Fp, FpConfig, PrimeField};

use super::*;
use crate::plugins::{bytes_modp, bytes_uniform_modp};

impl<F, H> FieldIOPattern<F> for IOPattern<H>
where
F: PrimeField,
F: Field,
H: DuplexHash,
{
fn add_scalars(self, count: usize, label: &str) -> Self {
self.add_bytes(count * bytes_modp(F::MODULUS_BIT_SIZE), label)
self.add_bytes(
count
* F::extension_degree() as usize
* bytes_modp(F::BasePrimeField::MODULUS_BIT_SIZE),
label,
)
}

fn challenge_scalars(self, count: usize, label: &str) -> Self {
self.challenge_bytes(count * bytes_uniform_modp(F::MODULUS_BIT_SIZE), label)
self.challenge_bytes(
count
* F::extension_degree() as usize
* bytes_uniform_modp(F::BasePrimeField::MODULUS_BIT_SIZE),
label,
)
}
}

impl<C, H, const N: usize> FieldIOPattern<Fp<C, N>> for IOPattern<H, Fp<C, N>>
impl<F, C, H, const N: usize> FieldIOPattern<F> for IOPattern<H, Fp<C, N>>
where
F: Field<BasePrimeField = Fp<C, N>>,
C: FpConfig<N>,
H: DuplexHash<Fp<C, N>>,
{
fn add_scalars(self, count: usize, label: &str) -> Self {
self.absorb(count, label)
self.absorb(count * F::extension_degree() as usize, label)
}

fn challenge_scalars(self, count: usize, label: &str) -> Self {
self.squeeze(count, label)
self.squeeze(count * F::extension_degree() as usize, label)
}
}

Expand Down
71 changes: 69 additions & 2 deletions src/plugins/ark/tests.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
#[cfg(feature = "ark-bls12-381")]
use super::poseidon::PoseidonHash;
use crate::{DefaultHash, DuplexHash, IOPattern, Unit, UnitTranscript};
use crate::{
ByteChallenges, ByteIOPattern, ByteReader, ByteWriter, DefaultHash, DuplexHash, IOPattern,
ProofResult, Unit, UnitTranscript,
};
#[cfg(feature = "ark-bls12-381")]
use ark_bls12_381::Fr;
use ark_bls12_381::{Fq2, Fr};
use ark_ff::Field;

/// Test that the algebraic hashes do use the IV generated from the IO Pattern.
fn check_iv_is_used<H: DuplexHash<F>, F: Unit + Copy + Default + Eq + core::fmt::Debug>() {
Expand Down Expand Up @@ -43,3 +47,66 @@ fn test_poseidon_basic() {
assert_ne!(challenge, F::from(0));
}
}

fn ark_iopattern<F, H>() -> IOPattern<H>
where
F: Field,
H: DuplexHash,
IOPattern<H>: super::FieldIOPattern<F> + ByteIOPattern,
{
use super::{ByteIOPattern, FieldIOPattern};

IOPattern::new("github.com/mmaker/nimue")
.add_scalars(3, "com")
.challenge_bytes(16, "chal")
.add_bytes(16, "resp")
.challenge_scalars(2, "chal")
}

fn test_arkworks_end_to_end<F: Field, H: DuplexHash>() -> ProofResult<()> {
use crate::plugins::ark::{FieldChallenges, FieldReader, FieldWriter};
use rand::Rng;

let mut rng = ark_std::test_rng();
// Generate elements for the transcript
let (f0, f1, f2) = (F::rand(&mut rng), F::rand(&mut rng), F::rand(&mut rng));
let mut b0 = [0; 16];
let mut c0 = [0; 16];
let b1: [u8; 16] = rng.gen();
let mut f3 = [F::ZERO; 2];
let mut g3 = [F::ZERO; 2];

let io_pattern = ark_iopattern::<F, H>();

let mut merlin = io_pattern.to_merlin();

merlin.add_scalars(&[f0, f1, f2])?;
merlin.fill_challenge_bytes(&mut b0)?;
merlin.add_bytes(&b1)?;
merlin.fill_challenge_scalars(&mut f3)?;

let mut arthur = io_pattern.to_arthur(merlin.transcript());
let [g0, g1, g2]: [F; 3] = arthur.next_scalars()?;
arthur.fill_challenge_bytes(&mut c0)?;
let c1: [u8; 16] = arthur.next_bytes()?;
arthur.fill_challenge_scalars(&mut g3)?;

assert_eq!(f0, g0);
assert_eq!(f1, g1);
assert_eq!(f2, g2);
assert_eq!(f3, g3);
assert_eq!(b0, c0);
assert_eq!(b1, c1);

Ok(())
}

#[cfg(feature = "ark-bls12-381")]
#[test]
fn test_arkworks() {
type F = Fr;
type F2 = Fq2;

test_arkworks_end_to_end::<F, DefaultHash>().unwrap();
test_arkworks_end_to_end::<F2, DefaultHash>().unwrap();
}
4 changes: 2 additions & 2 deletions src/plugins/ark/writer.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use ark_ec::CurveGroup;
use ark_ff::{Fp, FpConfig, PrimeField};
use ark_ff::{Field, Fp, FpConfig};
use ark_serialize::CanonicalSerialize;
use rand::{CryptoRng, RngCore};

use super::{FieldPublic, FieldWriter, GroupPublic, GroupWriter};
use crate::{DuplexHash, Merlin, ProofResult, UnitTranscript};

impl<F: PrimeField, H: DuplexHash, R: RngCore + CryptoRng> FieldWriter<F> for Merlin<H, u8, R> {
impl<F: Field, H: DuplexHash, R: RngCore + CryptoRng> FieldWriter<F> for Merlin<H, u8, R> {
fn add_scalars(&mut self, input: &[F]) -> ProofResult<()> {
let serialized = self.public_scalars(input);
self.transcript.extend(serialized?);
Expand Down
Loading

0 comments on commit 9b3793f

Please sign in to comment.