diff --git a/benches/compressed-snark.rs b/benches/compressed-snark.rs index 32df2744a..b05f65fbd 100644 --- a/benches/compressed-snark.rs +++ b/benches/compressed-snark.rs @@ -62,8 +62,8 @@ fn bench_compressed_snark(c: &mut Criterion) { let mut group = c.benchmark_group(format!("CompressedSNARK-StepCircuitSize-{num_cons}")); group.sample_size(num_samples); - let mut c_primary = NonTrivialTestCircuit::new(num_cons); - let mut c_secondary = TrivialTestCircuit::default(); + let c_primary = NonTrivialTestCircuit::new(num_cons); + let c_secondary = TrivialTestCircuit::default(); // Produce public parameters let pp = PublicParams::::setup( @@ -81,8 +81,8 @@ fn bench_compressed_snark(c: &mut Criterion) { let num_steps = 3; let mut recursive_snark: RecursiveSNARK = RecursiveSNARK::new( &pp, - &mut c_primary, - &mut c_secondary, + &c_primary, + &c_secondary, vec![::Scalar::from(2u64)], vec![::Scalar::from(2u64)], ); @@ -90,8 +90,8 @@ fn bench_compressed_snark(c: &mut Criterion) { for i in 0..num_steps { let res = recursive_snark.prove_step( &pp, - &mut c_primary, - &mut c_secondary, + &c_primary, + &c_secondary, vec![::Scalar::from(2u64)], vec![::Scalar::from(2u64)], ); @@ -155,13 +155,13 @@ fn bench_compressed_snark_with_computational_commitments(c: &mut Criterion) { .sampling_mode(SamplingMode::Flat) .sample_size(num_samples); - let mut c_primary = NonTrivialTestCircuit::new(num_cons); - let mut c_secondary = TrivialTestCircuit::default(); + let c_primary = NonTrivialTestCircuit::new(num_cons); + let c_secondary = TrivialTestCircuit::default(); // Produce public parameters let pp = PublicParams::::setup( - &mut c_primary, - &mut c_secondary, + &c_primary, + &c_secondary, Some(SS1::commitment_key_floor()), Some(SS2::commitment_key_floor()), ) @@ -173,8 +173,8 @@ fn bench_compressed_snark_with_computational_commitments(c: &mut Criterion) { let num_steps = 3; let mut recursive_snark: RecursiveSNARK = RecursiveSNARK::new( &pp, - &mut c_primary, - &mut c_secondary, + &c_primary, + &c_secondary, vec![::Scalar::from(2u64)], vec![::Scalar::from(2u64)], ); @@ -182,8 +182,8 @@ fn bench_compressed_snark_with_computational_commitments(c: &mut Criterion) { for i in 0..num_steps { let res = recursive_snark.prove_step( &pp, - &mut c_primary, - &mut c_secondary, + &c_primary, + &c_secondary, vec![::Scalar::from(2u64)], vec![::Scalar::from(2u64)], ); @@ -258,7 +258,7 @@ where } fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { diff --git a/benches/compute-digest.rs b/benches/compute-digest.rs index 6db530ba8..0e8b9e423 100644 --- a/benches/compute-digest.rs +++ b/benches/compute-digest.rs @@ -63,7 +63,7 @@ where } fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { diff --git a/src/circuit.rs b/src/circuit.rs index ecd9b81ca..901b63115 100644 --- a/src/circuit.rs +++ b/src/circuit.rs @@ -90,15 +90,15 @@ pub struct NovaAugmentedCircuit<'a, G: Group, SC: StepCircuit> { params: &'a NovaAugmentedCircuitParams, ro_consts: ROConstantsCircuit, inputs: Option>, - step_circuit: &'a mut SC, // The function that is applied for each step + step_circuit: &'a SC, // The function that is applied for each step } impl<'a, G: Group, SC: StepCircuit> NovaAugmentedCircuit<'a, G, SC> { /// Create a new verification circuit for the input relaxed r1cs instances - pub fn new( + pub const fn new( params: &'a NovaAugmentedCircuitParams, inputs: Option>, - step_circuit: &'a mut SC, + step_circuit: &'a SC, ro_consts: ROConstantsCircuit, ) -> Self { Self { @@ -394,19 +394,19 @@ mod tests { G1: Group::Scalar>, G2: Group::Scalar>, { - let mut ttc1 = TrivialTestCircuit::default(); + let ttc1 = TrivialTestCircuit::default(); // Initialize the shape and ck for the primary let circuit1: NovaAugmentedCircuit<'_, G2, TrivialTestCircuit<::Base>> = - NovaAugmentedCircuit::new(primary_params, None, &mut ttc1, ro_consts1.clone()); + NovaAugmentedCircuit::new(primary_params, None, &ttc1, ro_consts1.clone()); let mut cs: TestShapeCS = TestShapeCS::new(); let _ = circuit1.synthesize(&mut cs); let (shape1, ck1) = cs.r1cs_shape_and_key(None); assert_eq!(cs.num_constraints(), num_constraints_primary); - let mut ttc2 = TrivialTestCircuit::default(); + let ttc2 = TrivialTestCircuit::default(); // Initialize the shape and ck for the secondary let circuit2: NovaAugmentedCircuit<'_, G1, TrivialTestCircuit<::Base>> = - NovaAugmentedCircuit::new(secondary_params, None, &mut ttc2, ro_consts2.clone()); + NovaAugmentedCircuit::new(secondary_params, None, &ttc2, ro_consts2.clone()); let mut cs: TestShapeCS = TestShapeCS::new(); let _ = circuit2.synthesize(&mut cs); let (shape2, ck2) = cs.r1cs_shape_and_key(None); @@ -425,7 +425,7 @@ mod tests { None, ); let circuit1: NovaAugmentedCircuit<'_, G2, TrivialTestCircuit<::Base>> = - NovaAugmentedCircuit::new(primary_params, Some(inputs1), &mut ttc1, ro_consts1); + NovaAugmentedCircuit::new(primary_params, Some(inputs1), &ttc1, ro_consts1); let _ = circuit1.synthesize(&mut cs1); let (inst1, witness1) = cs1.r1cs_instance_and_witness(&shape1, &ck1).unwrap(); // Make sure that this is satisfiable @@ -444,7 +444,7 @@ mod tests { None, ); let circuit2: NovaAugmentedCircuit<'_, G1, TrivialTestCircuit<::Base>> = - NovaAugmentedCircuit::new(secondary_params, Some(inputs2), &mut ttc2, ro_consts2); + NovaAugmentedCircuit::new(secondary_params, Some(inputs2), &ttc2, ro_consts2); let _ = circuit2.synthesize(&mut cs2); let (inst2, witness2) = cs2.r1cs_instance_and_witness(&shape2, &ck2).unwrap(); // Make sure that it is satisfiable diff --git a/src/gadgets/lookup.rs b/src/gadgets/lookup.rs index eea64f64f..eb10054d3 100644 --- a/src/gadgets/lookup.rs +++ b/src/gadgets/lookup.rs @@ -20,11 +20,12 @@ use super::utils::scalar_as_base; use super::utils::{alloc_one, conditionally_select2, le_bits_to_num}; /// rw trace +#[derive(Clone)] pub enum RWTrace { /// read - Read(T, T), + Read(T, T, T), // addr, read_value, read_counter /// write - Write(T, T), + Write(T, T, T, T), // addr, read_value, write_value, read_counter } /// Lookup in R1CS @@ -36,99 +37,17 @@ pub enum TableType { ReadWrite, } -/// for starting a transaction simulation -pub struct LookupTransactionSimulate<'a, G: Group> { - lookup: &'a mut Lookup, - rw_trace: Vec>, - map_aux: BTreeMap, -} - -impl<'a, G: Group> LookupTransactionSimulate<'a, G> { - /// start a new transaction simulated - pub fn start_transaction(lookup: &'a mut Lookup) -> LookupTransactionSimulate<'a, G> { - LookupTransactionSimulate { - lookup, - rw_trace: vec![], - map_aux: BTreeMap::new(), - } - } - - /// read value from table - pub fn read(&mut self, addr: G::Scalar) -> G::Scalar - where - ::Scalar: Ord, - { - let key = &addr; - let (value, _) = self.map_aux.entry(*key).or_insert_with(|| { - self - .lookup - .map_aux - .get(key) - .cloned() - .unwrap_or_else(|| (G::Scalar::from(0), G::Scalar::from(0))) - }); - self.rw_trace.push(RWTrace::Read(addr, *value)); // append read trace - *value - } - /// write value to lookup table - pub fn write(&mut self, addr: G::Scalar, value: G::Scalar) - where - ::Scalar: Ord, - { - let _ = self.map_aux.insert( - addr, - ( - value, - G::Scalar::ZERO, // zero counter doens't matter, real counter will be computed inside lookup table - ), - ); - self.rw_trace.push(RWTrace::Write(addr, value)); // append read trace - } - /// commit rw_trace to lookup - pub fn commit( - &mut self, - ro_consts: ROConstants, - prev_intermediate_gamma: G::Scalar, - ) -> G::Scalar - where - ::Scalar: Ord, - G: Group::Scalar>, - G2: Group::Scalar>, - { - let mut hasher = ::RO::new(ro_consts, 1 + self.rw_trace.len() * 3); - hasher.absorb(prev_intermediate_gamma); - - self.rw_trace.iter().for_each(|rwtrace| { - let (addr, (read_value, read_counter)) = match rwtrace { - RWTrace::Read(addr, value) => (addr, self.lookup.rw_operation(true, *addr, *value)), - RWTrace::Write(addr, value) => (addr, self.lookup.rw_operation(false, *addr, *value)), - }; - hasher.absorb(*addr); - hasher.absorb(read_value); - hasher.absorb(read_counter); - }); - let hash_bits = hasher.squeeze(NUM_CHALLENGE_BITS); - scalar_as_base::(hash_bits) - } -} - -/// for starting a transaction -pub struct LookupTransaction<'a, G: Group> { - lookup: &'a mut Lookup, - rw_trace: Vec>>, - map_aux: BTreeMap, +/// for build up a lookup trace +#[derive(Clone)] +pub struct LookupTrace { + expected_rw_trace: Vec>, + rw_trace_allocated_num: Vec>>, + max_cap_rwcounter_log2: usize, + table_type: TableType, + cursor: usize, } -impl<'a, G: Group> LookupTransaction<'a, G> { - /// start a new transaction - pub fn start_transaction(lookup: &'a mut Lookup) -> LookupTransaction<'a, G> { - LookupTransaction { - lookup, - rw_trace: vec![], - map_aux: BTreeMap::new(), - } - } - +impl LookupTrace { /// read value from table pub fn read::Scalar>>( &mut self, @@ -136,45 +55,95 @@ impl<'a, G: Group> LookupTransaction<'a, G> { addr: &AllocatedNum, ) -> Result, SynthesisError> where - ::Scalar: Ord, + ::Scalar: Ord + PartialEq + Eq, { + assert!( + self.cursor < self.expected_rw_trace.len(), + "cursor {} out of range with expected length {}", + self.cursor, + self.expected_rw_trace.len() + ); let key = &addr.get_value().unwrap_or_default(); - let (value, _) = self.map_aux.entry(*key).or_insert_with(|| { + if let RWTrace::Read(expected_addr, expected_read_value, expected_read_counter) = + self.expected_rw_trace[self.cursor] + { + assert!( + *key == expected_addr, + "read address {:?} mismatch with expected {:?}", + *key, + expected_addr + ); + let read_value = + AllocatedNum::alloc(cs.namespace(|| "read_value"), || Ok(expected_read_value))?; + let read_counter = AllocatedNum::alloc(cs.namespace(|| "read_counter"), || { + Ok(expected_read_counter) + })?; self - .lookup - .map_aux - .get(key) - .cloned() - .unwrap_or_else(|| (G::Scalar::from(0), G::Scalar::from(0))) - }); - let read_value = AllocatedNum::alloc(cs.namespace(|| "read_value"), || Ok(*value))?; - self.rw_trace.push(RWTrace::Read::>( - addr.clone(), - read_value.clone(), - )); // append read trace - Ok(read_value) + .rw_trace_allocated_num + .push(RWTrace::Read::>( + addr.clone(), + read_value.clone(), + read_counter, + )); // append read trace + + self.cursor += 1; + Ok(read_value) + } else { + Err(SynthesisError::AssignmentMissing) + } } /// write value to lookup table - pub fn write( + pub fn write::Scalar>>( &mut self, + mut cs: CS, addr: &AllocatedNum, value: &AllocatedNum, ) -> Result<(), SynthesisError> where ::Scalar: Ord, { - let _ = self.map_aux.insert( - addr.get_value().unwrap_or_default(), - ( - value.get_value().unwrap_or_default(), - G::Scalar::ZERO, // zero counter doens't matter, real counter will be computed inside lookup table - ), + assert!( + self.cursor < self.expected_rw_trace.len(), + "cursor {} out of range with expected length {}", + self.cursor, + self.expected_rw_trace.len() ); - self - .rw_trace - .push(RWTrace::Write(addr.clone(), value.clone())); // append read trace - Ok(()) + if let RWTrace::Write( + expected_addr, + expected_read_value, + expected_write_value, + expected_read_counter, + ) = self.expected_rw_trace[self.cursor] + { + assert!( + addr.get_value().unwrap_or_default() == expected_addr, + "write address {:?} mismatch with expected {:?}", + addr.get_value().unwrap_or_default(), + expected_addr + ); + assert!( + value.get_value().unwrap_or_default() == expected_write_value, + "write value {:?} mismatch with expected {:?}", + value.get_value().unwrap_or_default(), + expected_write_value + ); + let expected_read_value = + AllocatedNum::alloc(cs.namespace(|| "read_value"), || Ok(expected_read_value))?; + let expected_read_counter = AllocatedNum::alloc(cs.namespace(|| "read_counter"), || { + Ok(expected_read_counter) + })?; + self.rw_trace_allocated_num.push(RWTrace::Write( + addr.clone(), + expected_read_value, + value.clone(), + expected_read_counter, + )); // append write trace + self.cursor += 1; + Ok(()) + } else { + Err(SynthesisError::AssignmentMissing) + } } /// commit rw_trace to lookup @@ -204,27 +173,28 @@ impl<'a, G: Group> LookupTransaction<'a, G> { { let mut ro = G2::ROCircuit::new( ro_const, - 1 + 3 * self.rw_trace.len(), // prev_challenge + [(address, value, counter)] + 1 + 3 * self.expected_rw_trace.len(), // prev_challenge + [(address, value, counter)] ); ro.absorb(prev_intermediate_gamma); - let (next_R, next_W, next_rw_counter) = self.rw_trace.iter().enumerate().try_fold( + let rw_trace_allocated_num = self.rw_trace_allocated_num.clone(); + let (next_R, next_W, next_rw_counter) = rw_trace_allocated_num.iter().enumerate().try_fold( (prev_R.clone(), prev_W.clone(), prev_rw_counter.clone()), |(prev_R, prev_W, prev_rw_counter), (i, rwtrace)| match rwtrace { - RWTrace::Read(addr, read_value) => { - let (next_R, next_W, next_rw_counter, read_value, read_counter) = - self.lookup.rw_operation_circuit( - cs.namespace(|| format!("{}th read ", i)), - true, - addr, - gamma, - read_value, - &prev_R, - &prev_W, - &prev_rw_counter, - )?; + RWTrace::Read(addr, read_value, expected_read_counter) => { + let (next_R, next_W, next_rw_counter) = self.rw_operation_circuit( + cs.namespace(|| format!("{}th read ", i)), + addr, + gamma, + read_value, + read_value, + &prev_R, + &prev_W, + expected_read_counter, + &prev_rw_counter, + )?; ro.absorb(addr); - ro.absorb(&read_value); - ro.absorb(&read_counter); + ro.absorb(read_value); + ro.absorb(expected_read_counter); Ok::< ( AllocatedNum, @@ -234,21 +204,21 @@ impl<'a, G: Group> LookupTransaction<'a, G> { SynthesisError, >((next_R, next_W, next_rw_counter)) } - RWTrace::Write(addr, write_value) => { - let (next_R, next_W, next_rw_counter, read_value, read_counter) = - self.lookup.rw_operation_circuit( - cs.namespace(|| format!("{}th write ", i)), - false, - addr, - gamma, - write_value, - &prev_R, - &prev_W, - &prev_rw_counter, - )?; + RWTrace::Write(addr, read_value, write_value, read_counter) => { + let (next_R, next_W, next_rw_counter) = self.rw_operation_circuit( + cs.namespace(|| format!("{}th write ", i)), + addr, + gamma, + read_value, + write_value, + &prev_R, + &prev_W, + read_counter, + &prev_rw_counter, + )?; ro.absorb(addr); - ro.absorb(&read_value); - ro.absorb(&read_counter); + ro.absorb(read_value); + ro.absorb(read_counter); Ok::< ( AllocatedNum, @@ -264,149 +234,30 @@ impl<'a, G: Group> LookupTransaction<'a, G> { let hash = le_bits_to_num(cs.namespace(|| "bits to hash"), &hash_bits)?; Ok((next_R, next_W, next_rw_counter, hash)) } -} - -/// Lookup in R1CS -#[derive(Clone, Debug)] -pub struct Lookup { - pub(crate) map_aux: BTreeMap, // (value, counter) - /// map_aux_dirty only include the modified fields of `map_aux`, thats why called dirty - map_aux_dirty: BTreeMap, // (value, counter) - rw_counter: F, - table_type: TableType, // read only or read-write - max_cap_rwcounter_log2: usize, // max cap for rw_counter operation in bits -} - -impl Lookup { - /// new lookup table - pub fn new( - max_cap_rwcounter: usize, - table_type: TableType, - initial_table: Vec<(F, F)>, - ) -> Lookup - where - F: Ord, - { - let max_cap_rwcounter_log2 = max_cap_rwcounter.log_2(); - Self { - map_aux: initial_table - .into_iter() - .map(|(addr, value)| (addr, (value, F::ZERO))) - .collect(), - map_aux_dirty: BTreeMap::new(), - rw_counter: F::ZERO, - table_type, - max_cap_rwcounter_log2, - } - } - - /// get table vector - /// very costly operation - pub fn get_table(&self) -> Vec<(F, F, F)> { - self - .map_aux - .iter() - .map(|(addr, (value, counter))| (*addr, *value, *counter)) - .collect() - } - - /// table size - pub fn table_size(&self) -> usize { - self.map_aux.len() - } - - fn rw_operation(&mut self, is_read: bool, addr: F, external_value: F) -> (F, F) - where - F: Ord, - { - // write operations - if !is_read { - debug_assert!(self.table_type == TableType::ReadWrite) // table need to set as rw - } - let (_read_value, _read_counter) = self - .map_aux - .get(&addr) - .cloned() - .unwrap_or((F::from(0), F::from(0))); - - let (write_value, write_counter) = ( - if is_read { _read_value } else { external_value }, - if self.table_type == TableType::ReadOnly { - _read_counter - } else { - max(self.rw_counter, _read_counter) - } + F::ONE, - ); - self.map_aux.insert(addr, (write_value, write_counter)); - self - .map_aux_dirty - .insert(addr, (write_value, write_counter)); - self.rw_counter = write_counter; - (_read_value, _read_counter) - } #[allow(clippy::too_many_arguments)] - fn rw_operation_circuit>( + fn rw_operation_circuit>( &mut self, mut cs: CS, - is_read: bool, addr: &AllocatedNum, // challenges: &(AllocatedNum, AllocatedNum), gamma: &AllocatedNum, - external_value: &AllocatedNum, + read_value: &AllocatedNum, + write_value: &AllocatedNum, prev_R: &AllocatedNum, prev_W: &AllocatedNum, + read_counter: &AllocatedNum, prev_rw_counter: &AllocatedNum, - ) -> Result< - ( - AllocatedNum, - AllocatedNum, - AllocatedNum, - AllocatedNum, - AllocatedNum, - ), - SynthesisError, - > + ) -> Result<(AllocatedNum, AllocatedNum, AllocatedNum), SynthesisError> where F: Ord, { - // extract challenge - // get content from map - // value are provided beforehand from outside, therefore here just constraints it - let (_read_value, _read_counter) = self - .map_aux - .get(&addr.get_value().unwrap_or_default()) - .cloned() - .unwrap_or((F::from(0), F::from(0))); - - let read_counter = AllocatedNum::alloc(cs.namespace(|| "counter"), || Ok(_read_counter))?; - - // external_read_value should match with _read_value - if is_read { - if let Some(external_read_value) = external_value.get_value() { - assert_eq!(external_read_value, _read_value) - } - }; - - // external_read_value should match with rw_counter witness - if let Some(external_rw_counter) = prev_rw_counter.get_value() { - assert_eq!(external_rw_counter, self.rw_counter) - } - - let one = F::ONE; - // update R let gamma_square = gamma.mul(cs.namespace(|| "gamme^2"), gamma)?; // read_value_term = gamma * value - let read_value = if is_read { - external_value.clone() - } else { - AllocatedNum::alloc(cs.namespace(|| "read_value"), || Ok(_read_value))? - }; - let read_value_term = gamma.mul(cs.namespace(|| "read_value_term"), &read_value)?; + let read_value_term = gamma.mul(cs.namespace(|| "read_value_term"), read_value)?; // counter_term = gamma^2 * counter - let read_counter_term = - gamma_square.mul(cs.namespace(|| "read_counter_term"), &read_counter)?; + let read_counter_term = gamma_square.mul(cs.namespace(|| "read_counter_term"), read_counter)?; // new_R = R * (gamma - (addr + gamma * value + gamma^2 * counter)) let new_R = AllocatedNum::alloc(cs.namespace(|| "new_R"), || { prev_R @@ -443,7 +294,7 @@ impl Lookup { // TODO optimise with `max` table lookup to save more constraints let lt = less_than( cs.namespace(|| "read_counter < a"), - &read_counter, + read_counter, prev_rw_counter, self.max_cap_rwcounter_log2, )?; @@ -452,7 +303,7 @@ impl Lookup { "write_counter = read_counter < prev_rw_counter ? prev_rw_counter: read_counter" }), prev_rw_counter, - &read_counter, + read_counter, <, )?; let write_counter_term = @@ -464,11 +315,7 @@ impl Lookup { // update W // write_value_term = gamma * value - let write_value_term = if is_read { - read_value_term - } else { - gamma.mul(cs.namespace(|| "write_value_term"), external_value)? - }; + let write_value_term = gamma.mul(cs.namespace(|| "write_value_term"), write_value)?; let new_W = AllocatedNum::alloc(cs.namespace(|| "new_W"), || { prev_W .get_value() @@ -498,30 +345,200 @@ impl Lookup { |lc| lc + new_W.get_variable(), ); - // update witness - self.map_aux.insert( - addr.get_value().unwrap_or_default(), - ( - external_value.get_value().unwrap_or_default(), - write_counter.get_value().unwrap_or_default() + one, - ), - ); - self.map_aux_dirty.insert( - addr.get_value().unwrap_or_default(), - ( - external_value.get_value().unwrap_or_default(), - write_counter.get_value().unwrap_or_default() + one, - ), - ); let new_rw_counter = add_allocated_num( cs.namespace(|| "new_rw_counter"), &write_counter, &alloc_num_one, )?; - if let Some(new_rw_counter) = new_rw_counter.get_value() { - self.rw_counter = new_rw_counter; + Ok((new_R, new_W, new_rw_counter)) + } +} + +/// for build up a lookup trace +pub struct LookupTraceBuilder<'a, G: Group> { + lookup: &'a mut Lookup, + rw_trace: Vec>, + map_aux: BTreeMap, +} + +impl<'a, G: Group> LookupTraceBuilder<'a, G> { + /// start a new transaction simulated + pub fn new(lookup: &'a mut Lookup) -> LookupTraceBuilder<'a, G> { + LookupTraceBuilder { + lookup, + rw_trace: vec![], + map_aux: BTreeMap::new(), + } + } + + /// read value from table + pub fn read(&mut self, addr: G::Scalar) -> G::Scalar + where + ::Scalar: Ord, + { + let key = &addr; + let (value, _) = self.map_aux.entry(*key).or_insert_with(|| { + self + .lookup + .map_aux + .get(key) + .cloned() + .unwrap_or((G::Scalar::ZERO, G::Scalar::ZERO)) + }); + self + .rw_trace + .push(RWTrace::Read(addr, *value, G::Scalar::ZERO)); + *value + } + /// write value to lookup table + pub fn write(&mut self, addr: G::Scalar, value: G::Scalar) + where + ::Scalar: Ord, + { + let _ = self.map_aux.insert( + addr, + ( + value, + G::Scalar::ZERO, // zero counter doens't matter, real counter will provided in snapshot stage + ), + ); + self.rw_trace.push(RWTrace::Write( + addr, + G::Scalar::ZERO, + value, + G::Scalar::ZERO, + )); // append read trace + } + + /// commit rw_trace to lookup + pub fn snapshot( + &mut self, + ro_consts: ROConstants, + prev_intermediate_gamma: G::Scalar, + ) -> (G::Scalar, LookupTrace) + where + ::Scalar: Ord, + G: Group::Scalar>, + G2: Group::Scalar>, + { + let mut hasher = ::RO::new(ro_consts, 1 + self.rw_trace.len() * 3); + hasher.absorb(prev_intermediate_gamma); + + self.rw_trace = self + .rw_trace + .iter() + .map(|rwtrace| { + let (addr, (read_value, read_counter)) = match rwtrace { + RWTrace::Read(addr, _, _) => (addr, self.lookup.rw_operation(*addr, None)), + RWTrace::Write(addr, _, write_value, _) => { + (addr, self.lookup.rw_operation(*addr, Some(*write_value))) + } + }; + hasher.absorb(*addr); + hasher.absorb(read_value); + hasher.absorb(read_counter); + match rwtrace { + RWTrace::Read(..) => RWTrace::Read(*addr, read_value, read_counter), + RWTrace::Write(_, _, write_value, _) => { + RWTrace::Write(*addr, read_value, *write_value, read_counter) + } + } + }) + .collect(); + let hash_bits = hasher.squeeze(NUM_CHALLENGE_BITS); + let rw_trace = self.rw_trace.to_vec(); + self.rw_trace.clear(); + let next_intermediate_gamma = scalar_as_base::(hash_bits); + ( + next_intermediate_gamma, + LookupTrace { + expected_rw_trace: rw_trace, + rw_trace_allocated_num: vec![], + cursor: 0, + max_cap_rwcounter_log2: self.lookup.max_cap_rwcounter_log2, + table_type: self.lookup.table_type.clone(), + }, + ) + } +} + +/// Lookup in R1CS +#[derive(Clone, Debug)] +pub struct Lookup { + pub(crate) map_aux: BTreeMap, // (value, counter) + /// map_aux_dirty only include the modified fields of `map_aux`, thats why called dirty + map_aux_dirty: BTreeMap, // (value, counter) + rw_counter: F, + pub(crate) table_type: TableType, // read only or read-write + pub(crate) max_cap_rwcounter_log2: usize, // max cap for rw_counter operation in bits +} + +impl Lookup { + /// new lookup table + pub fn new( + max_cap_rwcounter: usize, + table_type: TableType, + initial_table: Vec<(F, F)>, + ) -> Lookup + where + F: Ord, + { + let max_cap_rwcounter_log2 = max_cap_rwcounter.log_2(); + Self { + map_aux: initial_table + .into_iter() + .map(|(addr, value)| (addr, (value, F::ZERO))) + .collect(), + map_aux_dirty: BTreeMap::new(), + rw_counter: F::ZERO, + table_type, + max_cap_rwcounter_log2, + } + } + + /// get table vector + /// very costly operation + pub fn get_table(&self) -> Vec<(F, F, F)> { + self + .map_aux + .iter() + .map(|(addr, (value, counter))| (*addr, *value, *counter)) + .collect() + } + + /// table size + pub fn table_size(&self) -> usize { + self.map_aux.len() + } + + fn rw_operation(&mut self, addr: F, external_value: Option) -> (F, F) + where + F: Ord, + { + // write operations + if external_value.is_some() { + debug_assert!(self.table_type == TableType::ReadWrite) // table need to set as rw } - Ok((new_R, new_W, new_rw_counter, read_value, read_counter)) + let (read_value, read_counter) = self + .map_aux + .get(&addr) + .cloned() + .unwrap_or((F::from(0), F::from(0))); + + let (write_value, write_counter) = ( + external_value.unwrap_or(read_value), + if self.table_type == TableType::ReadOnly { + read_counter + } else { + max(self.rw_counter, read_counter) + } + F::ONE, + ); + self.map_aux.insert(addr, (write_value, write_counter)); + self + .map_aux_dirty + .insert(addr, (write_value, write_counter)); + self.rw_counter = write_counter; + (read_value, read_counter) } // fn write(&mut self, addr: AllocatedNum, value: F) {} @@ -597,7 +614,7 @@ mod test { // bellpepper::test_shape_cs::TestShapeCS, constants::NUM_CHALLENGE_BITS, gadgets::{ - lookup::{LookupTransaction, LookupTransactionSimulate, TableType}, + lookup::{LookupTraceBuilder, TableType}, utils::{alloc_one, alloc_zero, scalar_as_base}, }, provider::poseidon::PoseidonConstantsCircuit, @@ -623,21 +640,21 @@ mod test { ]; let mut lookup = Lookup::<::Scalar>::new(1024, TableType::ReadWrite, initial_table); - let mut lookup_transaction = LookupTransactionSimulate::::start_transaction(&mut lookup); + let mut lookup_trace_builder = LookupTraceBuilder::::new(&mut lookup); let prev_intermediate_gamma = ::Scalar::ONE; - let read_value = lookup_transaction.read(::Scalar::ZERO); + let read_value = lookup_trace_builder.read(::Scalar::ZERO); assert_eq!(read_value, ::Scalar::ZERO); - let read_value = lookup_transaction.read(::Scalar::ONE); + let read_value = lookup_trace_builder.read(::Scalar::ONE); assert_eq!(read_value, ::Scalar::ONE); - lookup_transaction.write( + lookup_trace_builder.write( ::Scalar::ZERO, ::Scalar::from(111), ); - let read_value = lookup_transaction.read(::Scalar::ZERO); + let read_value = lookup_trace_builder.read(::Scalar::ZERO); assert_eq!(read_value, ::Scalar::from(111),); - let next_intermediate_gamma = - lookup_transaction.commit::(ro_consts.clone(), prev_intermediate_gamma); + let (next_intermediate_gamma, _) = + lookup_trace_builder.snapshot::(ro_consts.clone(), prev_intermediate_gamma); let mut hasher = ::RO::new(ro_consts, 1 + 3 * 4); hasher.absorb(prev_intermediate_gamma); @@ -674,7 +691,7 @@ mod test { (::Scalar::ONE, ::Scalar::ZERO), ]; let mut lookup = Lookup::<::Scalar>::new(1024, TableType::ReadOnly, initial_table); - let mut lookup_transaction = LookupTransaction::::start_transaction(&mut lookup); + let mut lookup_trace_builder = LookupTraceBuilder::::new(&mut lookup); let gamma = AllocatedNum::alloc(cs.namespace(|| "gamma"), || { Ok(::Scalar::from(2)) }) @@ -684,22 +701,33 @@ mod test { let prev_intermediate_gamma = &one; let prev_rw_counter = &zero; let addr = zero.clone(); - let read_value = lookup_transaction + let read_value = lookup_trace_builder.read(addr.get_value().unwrap()); + assert_eq!(read_value, ::Scalar::from(101)); + let read_value = lookup_trace_builder.read(addr.get_value().unwrap()); + assert_eq!(read_value, ::Scalar::from(101)); + let (_, mut lookup_trace) = lookup_trace_builder.snapshot::( + ro_consts.clone(), + prev_intermediate_gamma.get_value().unwrap(), + ); + + let read_value = lookup_trace .read(cs.namespace(|| "read_value1"), &addr) .unwrap(); assert_eq!( read_value.get_value(), Some(::Scalar::from(101)) ); - let read_value = lookup_transaction + + let read_value = lookup_trace .read(cs.namespace(|| "read_value2"), &addr) .unwrap(); assert_eq!( read_value.get_value(), Some(::Scalar::from(101)) ); + let (prev_W, prev_R) = (&one, &one); - let (next_R, next_W, next_rw_counter, next_intermediate_gamma) = lookup_transaction + let (next_R, next_W, next_rw_counter, next_intermediate_gamma) = lookup_trace .commit::( cs.namespace(|| "commit"), ro_consts.clone(), @@ -778,7 +806,7 @@ mod test { ]; let mut lookup = Lookup::<::Scalar>::new(1024, TableType::ReadWrite, initial_table); - let mut lookup_transaction = LookupTransaction::::start_transaction(&mut lookup); + let mut lookup_trace_builder = LookupTraceBuilder::::new(&mut lookup); let gamma = AllocatedNum::alloc(cs.namespace(|| "gamma"), || { Ok(::Scalar::from(2)) }) @@ -788,24 +816,34 @@ mod test { let prev_intermediate_gamma = &one; let prev_rw_counter = &zero; let addr = zero.clone(); - lookup_transaction - .write( - &addr, - &AllocatedNum::alloc(cs.namespace(|| "write value 1"), || { - Ok(::Scalar::from(101)) - }) - .unwrap(), - ) + let write_value_1 = AllocatedNum::alloc(cs.namespace(|| "write value 1"), || { + Ok(::Scalar::from(101)) + }) + .unwrap(); + lookup_trace_builder.write( + addr.get_value().unwrap(), + write_value_1.get_value().unwrap(), + ); + let read_value = lookup_trace_builder.read(addr.get_value().unwrap()); + // cs.namespace(|| "read_value 1"), + assert_eq!(read_value, ::Scalar::from(101)); + let (_, mut lookup_trace) = lookup_trace_builder.snapshot::( + ro_consts.clone(), + prev_intermediate_gamma.get_value().unwrap(), + ); + lookup_trace + .write(cs.namespace(|| "write_value 1"), &addr, &write_value_1) .unwrap(); - let read_value = lookup_transaction + let read_value = lookup_trace .read(cs.namespace(|| "read_value 1"), &addr) .unwrap(); assert_eq!( read_value.get_value(), Some(::Scalar::from(101)) ); + let (prev_W, prev_R) = (&one, &one); - let (next_R, next_W, next_rw_counter, next_intermediate_gamma) = lookup_transaction + let (next_R, next_W, next_rw_counter, next_intermediate_gamma) = lookup_trace .commit::( cs.namespace(|| "commit"), ro_consts.clone(), diff --git a/src/lib.rs b/src/lib.rs index 515c163ca..14ac9a5ce 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -128,11 +128,11 @@ where let ro_consts_circuit_secondary: ROConstantsCircuit = ROConstantsCircuit::::default(); // Initialize ck for the primary - let mut c_primary = c_primary.clone(); + let c_primary = c_primary.clone(); let circuit_primary: NovaAugmentedCircuit<'_, G2, C1> = NovaAugmentedCircuit::new( &augmented_circuit_params_primary, None, - &mut c_primary, + &c_primary, ro_consts_circuit_primary.clone(), ); let mut cs: ShapeCS = ShapeCS::new(); @@ -140,11 +140,11 @@ where let (r1cs_shape_primary, ck_primary) = cs.r1cs_shape_and_key(optfn1); // Initialize ck for the secondary - let mut c_secondary = c_secondary.clone(); + let c_secondary = c_secondary.clone(); let circuit_secondary: NovaAugmentedCircuit<'_, G1, C2> = NovaAugmentedCircuit::new( &augmented_circuit_params_secondary, None, - &mut c_secondary, + &c_secondary, ro_consts_circuit_secondary.clone(), ); let mut cs: ShapeCS = ShapeCS::new(); @@ -296,8 +296,8 @@ where /// Create new instance of recursive SNARK pub fn new( pp: &PublicParams, - c_primary: &mut C1, - c_secondary: &mut C2, + c_primary: &C1, + c_secondary: &C2, z0_primary: Vec, z0_secondary: Vec, ) -> Self { @@ -409,8 +409,8 @@ where pub fn prove_step( &mut self, pp: &PublicParams, - c_primary: &mut C1, - c_secondary: &mut C2, + c_primary: &C1, + c_secondary: &C2, z0_primary: Vec, z0_secondary: Vec, ) -> Result<(), NovaError> { @@ -913,9 +913,7 @@ type CE = ::CE; mod tests { use crate::bellpepper::test_shape_cs::TestShapeCS; use crate::constants::NUM_CHALLENGE_BITS; - use crate::gadgets::lookup::{ - less_than, Lookup, LookupTransaction, LookupTransactionSimulate, TableType, - }; + use crate::gadgets::lookup::{less_than, Lookup, LookupTrace, LookupTraceBuilder, TableType}; use crate::gadgets::utils::{alloc_const, alloc_one, conditionally_select2}; use crate::provider::bn256_grumpkin::{bn256, grumpkin}; use crate::provider::keccak::Keccak256Transcript; @@ -959,7 +957,7 @@ mod tests { } fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { @@ -1472,7 +1470,7 @@ mod tests { } fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { @@ -1644,7 +1642,7 @@ mod tests { test_ivc_base_with::(); } - fn print_constraints_name_on_error_index(err: &NovaError, mut c_primary: C1) + fn print_constraints_name_on_error_index(err: &NovaError, c_primary: &C1) where G1: Group::Scalar>, G2: Group::Scalar>, @@ -1660,7 +1658,7 @@ mod tests { let circuit_primary: NovaAugmentedCircuit<'_, G2, C1> = NovaAugmentedCircuit::new( &augmented_circuit_params_primary, None, - &mut c_primary, + c_primary, ro_consts_circuit_primary, ); // let mut cs: ShapeCS = ShapeCS::new(); @@ -1688,7 +1686,7 @@ mod tests { G1: Group::Scalar>, G2: Group::Scalar>, { - lookup: Lookup, + lookup_trace: LookupTrace, ro_consts: ROConstantsCircuit, max_value_bits: usize, _phantom: PhantomData, @@ -1700,30 +1698,64 @@ mod tests { G1: Group::Scalar>, G2: Group::Scalar>, { - fn new(initial_table: &Lookup, ro_consts: ROConstantsCircuit) -> Self { + fn new( + initial_table: &Lookup, + ro_consts_circuit: ROConstantsCircuit, + ) -> (Vec, Lookup, G1::Scalar) { let n = initial_table.table_size(); let initial_table = initial_table.clone(); + let initial_index = (n - 4) / 2; let max_value_bits = (n - 2).log_2() + 1; // + 1 as a buffer - HeapifyCircuit { - lookup: initial_table, - ro_consts, - max_value_bits, - _phantom: PhantomData:: {}, + let initial_intermediate_gamma = ::Scalar::from(1); + + let mut lookup = initial_table.clone(); + let num_steps = initial_index; + let mut intermediate_gamma = initial_intermediate_gamma; + // simulate folding step lookup io + let mut primary_circuits = vec![]; + let ro_consts = <::RO as ROTrait< + ::Base, + ::Scalar, + >>::Constants::default(); + for i in 0..num_steps + 1 { + let mut lookup_trace_builder = LookupTraceBuilder::::new(&mut lookup); + let addr = G1::Scalar::from((num_steps - i) as u64); + let parent = lookup_trace_builder.read(addr); + let left_child = lookup_trace_builder.read(G1::Scalar::from(2) * addr + G1::Scalar::ONE); + let right_child = + lookup_trace_builder.read(G1::Scalar::from(2) * addr + G1::Scalar::from(2)); + let tmp = if left_child < parent { + left_child + } else { + parent + }; + let tmp = if right_child < tmp { right_child } else { tmp }; + lookup_trace_builder.write(addr, tmp); + let res = lookup_trace_builder.snapshot::(ro_consts.clone(), intermediate_gamma); + intermediate_gamma = res.0; + let (_, lookup_trace) = res; + primary_circuits.push(Self { + lookup_trace, + ro_consts: ro_consts_circuit.clone(), + max_value_bits, + _phantom: PhantomData:: {}, + }); } + + (primary_circuits, lookup, intermediate_gamma) } - // get_z0 - // NOTE: challenge validation will be defered to CompressSNARK fn get_z0( ck: &CommitmentKey, - initial_table: &Lookup, - ) -> (Vec, Lookup) + final_table: &Lookup, + intermediate_gamma: G1::Scalar, + ) -> Vec where G1: Group::Scalar>, G2: Group::Scalar>, { - let n = initial_table.table_size(); + let n = final_table.table_size(); let initial_index = (n - 4) / 2; let (initial_intermediate_gamma, init_prev_R, init_prev_W, init_rw_counter) = ( ::Scalar::from(1), @@ -1737,35 +1769,12 @@ mod tests { ::Scalar, >>::Constants::default(); - let mut lookup = initial_table.clone(); - let num_steps = initial_index; - let mut intermediate_gamma = initial_intermediate_gamma; - // simulate folding step lookup io - for i in 0..num_steps + 1 { - let mut lookup_transaction = - LookupTransactionSimulate::::start_transaction(&mut lookup); - let addr = G1::Scalar::from((num_steps - i) as u64); - let parent = lookup_transaction.read(addr); - let left_child = lookup_transaction.read(G1::Scalar::from(2) * addr + G1::Scalar::ONE); - let right_child = - lookup_transaction.read(G1::Scalar::from(2) * addr + G1::Scalar::from(2)); - let tmp = if left_child < parent { - left_child - } else { - parent - }; - let tmp = if right_child < tmp { right_child } else { tmp }; - lookup_transaction.write(addr, tmp); - intermediate_gamma = - lookup_transaction.commit::(ro_consts.clone(), intermediate_gamma) - } - - let final_values: Vec<::Scalar> = lookup + let final_values: Vec<::Scalar> = final_table .get_table() .iter() .map(|(_, value, _)| *value) .collect(); - let final_counters: Vec<::Scalar> = lookup + let final_counters: Vec<::Scalar> = final_table .get_table() .iter() .map(|(_, _, counter)| *counter) @@ -1795,17 +1804,14 @@ mod tests { let hash_bits = hasher.squeeze(NUM_CHALLENGE_BITS); let gamma = scalar_as_base::(hash_bits); - ( - vec![ - initial_intermediate_gamma, - gamma, - init_prev_R, - init_prev_W, - init_rw_counter, - G1::Scalar::from(initial_index as u64), - ], - lookup, - ) + vec![ + initial_intermediate_gamma, + gamma, + init_prev_R, + init_prev_W, + init_rw_counter, + G1::Scalar::from(initial_index as u64), + ] } } @@ -1821,11 +1827,11 @@ mod tests { } fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { - let mut lookup_transaction = LookupTransaction::::start_transaction(&mut self.lookup); + let mut lookup_trace = self.lookup_trace.clone(); let prev_intermediate_gamma = &z[0]; let gamma = &z[1]; let prev_R = &z[2]; @@ -1857,11 +1863,9 @@ mod tests { |lc| lc + CS::one(), |lc| lc + right_child_index.get_variable(), ); - let parent = lookup_transaction.read(cs.namespace(|| "parent"), index)?; - let left_child = - lookup_transaction.read(cs.namespace(|| "left_child"), &left_child_index)?; - let right_child = - lookup_transaction.read(cs.namespace(|| "right_child"), &right_child_index)?; + let parent = lookup_trace.read(cs.namespace(|| "parent"), index)?; + let left_child = lookup_trace.read(cs.namespace(|| "left_child"), &left_child_index)?; + let right_child = lookup_trace.read(cs.namespace(|| "right_child"), &right_child_index)?; let is_left_child_smaller = less_than( cs.namespace(|| "left_child < parent"), @@ -1891,11 +1895,11 @@ mod tests { &is_right_child_smaller, )?; - lookup_transaction.write(index, &smallest)?; + lookup_trace.write(cs.namespace(|| "write_parent"), index, &smallest)?; // commit the rw change - let (next_R, next_W, next_rw_counter, next_intermediate_gamma) = - lookup_transaction.commit::>::Root>>( + let (next_R, next_W, next_rw_counter, next_intermediate_gamma) = lookup_trace + .commit::>::Root>>( cs.namespace(|| "commit"), self.ro_consts.clone(), prev_intermediate_gamma, @@ -1943,7 +1947,7 @@ mod tests { } fn synthesize>( - &mut self, + &self, _cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { @@ -1967,11 +1971,12 @@ mod tests { Lookup::new(heap_size * 4, TableType::ReadWrite, initial_table) }; - let mut circuit_primary = HeapifyCircuit::new(&initial_table, ro_consts.clone()); + let (circuit_primaries, final_table, intermediate_gamma) = + HeapifyCircuit::new(&initial_table, ro_consts); // let mut circuit_primary = TrivialTestCircuit::default(); // let z0_primary = vec![::Scalar::ZERO; 6]; - let mut circuit_secondary = TrivialTestCircuit::default(); + let circuit_secondary = TrivialTestCircuit::default(); // let mut circuit_primary = TrivialTestCircuit::default(); // produce public parameters @@ -1980,11 +1985,11 @@ mod tests { G2, HeapifyCircuit, TrivialTestCircuit<::Scalar>, - >::setup(&circuit_primary, &circuit_secondary, None, None) + >::setup(&circuit_primaries[0], &circuit_secondary, None, None) .unwrap(); - let (z0_primary, final_table) = - HeapifyCircuit::::get_z0(&pp.ck_primary, &initial_table); + let z0_primary = + HeapifyCircuit::::get_z0(&pp.ck_primary, &initial_table, intermediate_gamma); // println!("num constraints {:?}", pp.num_constraints()); // 5th is initial index. @@ -2001,8 +2006,8 @@ mod tests { TrivialTestCircuit<::Scalar>, > = RecursiveSNARK::new( &pp, - &mut circuit_primary, - &mut circuit_secondary, + &circuit_primaries[0], + &circuit_secondary, z0_primary.clone(), z0_secondary.clone(), ); @@ -2011,8 +2016,8 @@ mod tests { println!("step i {}", i); let res = recursive_snark.prove_step( &pp, - &mut circuit_primary, - &mut circuit_secondary.clone(), + &circuit_primaries[i as usize + 1], + &circuit_secondary.clone(), z0_primary.clone(), z0_secondary.clone(), ); @@ -2027,7 +2032,7 @@ mod tests { res .clone() .map_err(|err| { - print_constraints_name_on_error_index::(&err, circuit_primary.clone()) + print_constraints_name_on_error_index::(&err, &circuit_primaries[0]) }) .unwrap(); assert!(res.is_ok()); diff --git a/src/spartan/direct.rs b/src/spartan/direct.rs index b7b2500b3..4825380ea 100644 --- a/src/spartan/direct.rs +++ b/src/spartan/direct.rs @@ -23,10 +23,7 @@ struct DirectCircuit> { } impl> Circuit for DirectCircuit { - fn synthesize>( - mut self, - cs: &mut CS, - ) -> Result<(), SynthesisError> { + fn synthesize>(self, cs: &mut CS) -> Result<(), SynthesisError> { // obtain the arity information let arity = self.sc.arity(); @@ -175,7 +172,7 @@ mod tests { } fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> { diff --git a/src/spartan/lookupsnark.rs b/src/spartan/lookupsnark.rs index 10b921ccc..f9a26ab7d 100644 --- a/src/spartan/lookupsnark.rs +++ b/src/spartan/lookupsnark.rs @@ -1,3 +1,4 @@ +//! This module implements LookupSNARK which leverage memory-offline-check skills use crate::{ digest::{DigestBuilder, HasDigest, SimpleDigestible}, errors::NovaError, @@ -124,6 +125,7 @@ impl SumcheckEngine for MemoryOfflineSumcheckInstance { } #[allow(unused)] +/// LookupSNARK pub struct LookupSNARK> { a: PhantomData<(G, EE)>, @@ -161,6 +163,7 @@ impl> LookupSNARK where ::Repr: Abomonation, { + /// setup pub fn setup( ck: &CommitmentKey, initial_table: &Vec<(G::Scalar, G::Scalar, G::Scalar)>, @@ -188,10 +191,7 @@ where Ok((pk, vk)) } - // type ProverKey = ProverKey; - // type VerifierKey = VerifierKey; - - /// produces a succinct proof of satisfiability of a `RelaxedR1CS` instance + /// produces a succinct proof of satisfiability of a `LookupSNARK` instance #[tracing::instrument(skip_all, name = "LookupSNARK::prove")] pub fn prove( ck: &CommitmentKey, diff --git a/src/traits/circuit.rs b/src/traits/circuit.rs index 923ab7a58..aef43e8f2 100644 --- a/src/traits/circuit.rs +++ b/src/traits/circuit.rs @@ -14,7 +14,7 @@ pub trait StepCircuit: Send + Sync + Clone { /// Sythesize the circuit for a computation step and return variable /// that corresponds to the output of the step z_{i+1} fn synthesize>( - &mut self, + &self, cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError>; @@ -35,7 +35,7 @@ where } fn synthesize>( - &mut self, + &self, _cs: &mut CS, z: &[AllocatedNum], ) -> Result>, SynthesisError> {