Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
Nashtare authored Aug 21, 2024
2 parents c7b7b3e + bb8f388 commit 5b62e41
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 139 deletions.
108 changes: 0 additions & 108 deletions starky/src/get_challenges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -308,111 +308,3 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
.get_challenges::<F, C>(builder, challenger, challenges, ignore_trace_cap, config)
}
}

// TODO: Deal with the compressed stuff.
// impl<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
// CompressedProofWithPublicInputs<F, C, D>
// {
// /// Computes all Fiat-Shamir challenges used in the Plonk proof.
// pub(crate) fn get_challenges(
// &self,
// common_data: &CommonCircuitData<F, C, D>,
// ) -> anyhow::Result<ProofChallenges<F, D>> {
// let CompressedProof {
// wires_cap,
// plonk_zs_partial_products_cap,
// quotient_polys_cap,
// openings,
// opening_proof:
// CompressedFriProof {
// commit_phase_merkle_caps,
// final_poly,
// pow_witness,
// ..
// },
// } = &self.proof;
//
// get_challenges(
// self.get_public_inputs_hash(),
// wires_cap,
// plonk_zs_partial_products_cap,
// quotient_polys_cap,
// openings,
// commit_phase_merkle_caps,
// final_poly,
// *pow_witness,
// common_data,
// )
// }
//
// /// Computes all coset elements that can be inferred in the FRI reduction steps.
// pub(crate) fn get_inferred_elements(
// &self,
// challenges: &ProofChallenges<F, D>,
// common_data: &CommonCircuitData<F, C, D>,
// ) -> FriInferredElements<F, D> {
// let ProofChallenges {
// plonk_zeta,
// fri_alpha,
// fri_betas,
// fri_query_indices,
// ..
// } = challenges;
// let mut fri_inferred_elements = Vec::new();
// // Holds the indices that have already been seen at each reduction depth.
// let mut seen_indices_by_depth =
// vec![HashSet::new(); common_data.fri_params.reduction_arity_bits.len()];
// let precomputed_reduced_evals = PrecomputedReducedOpenings::from_os_and_alpha(
// &self.proof.openings.to_fri_openings(),
// *fri_alpha,
// );
// let log_n = common_data.degree_bits + common_data.config.fri_config.rate_bits;
// // Simulate the proof verification and collect the inferred elements.
// // The content of the loop is basically the same as the `fri_verifier_query_round` function.
// for &(mut x_index) in fri_query_indices {
// let mut subgroup_x = F::MULTIPLICATIVE_GROUP_GENERATOR
// * F::primitive_root_of_unity(log_n).exp_u64(reverse_bits(x_index, log_n) as u64);
// let mut old_eval = fri_combine_initial::<F, C, D>(
// &common_data.get_fri_instance(*plonk_zeta),
// &self
// .proof
// .opening_proof
// .query_round_proofs
// .initial_trees_proofs[&x_index],
// *fri_alpha,
// subgroup_x,
// &precomputed_reduced_evals,
// &common_data.fri_params,
// );
// for (i, &arity_bits) in common_data
// .fri_params
// .reduction_arity_bits
// .iter()
// .enumerate()
// {
// let coset_index = x_index >> arity_bits;
// if !seen_indices_by_depth[i].insert(coset_index) {
// // If this index has already been seen, we can skip the rest of the reductions.
// break;
// }
// fri_inferred_elements.push(old_eval);
// let arity = 1 << arity_bits;
// let mut evals = self.proof.opening_proof.query_round_proofs.steps[i][&coset_index]
// .evals
// .clone();
// let x_index_within_coset = x_index & (arity - 1);
// evals.insert(x_index_within_coset, old_eval);
// old_eval = compute_evaluation(
// subgroup_x,
// x_index_within_coset,
// arity_bits,
// &evals,
// fri_betas[i],
// );
// subgroup_x = subgroup_x.exp_power_of_2(arity_bits);
// x_index = coset_index;
// }
// }
// FriInferredElements(fri_inferred_elements)
// }
// }
32 changes: 1 addition & 31 deletions starky/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ use alloc::{vec, vec::Vec};
use itertools::Itertools;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::fri::proof::{
CompressedFriProof, FriChallenges, FriChallengesTarget, FriProof, FriProofTarget,
};
use plonky2::fri::proof::{FriChallenges, FriChallengesTarget, FriProof, FriProofTarget};
use plonky2::fri::structure::{
FriOpeningBatch, FriOpeningBatchTarget, FriOpenings, FriOpeningsTarget,
};
Expand Down Expand Up @@ -144,34 +142,6 @@ pub struct StarkProofWithPublicInputsTarget<const D: usize> {
pub public_inputs: Vec<Target>,
}

/// A compressed proof format of a single STARK.
#[derive(Debug, Clone)]
pub struct CompressedStarkProof<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
/// Merkle cap of LDEs of trace values.
pub trace_cap: MerkleCap<F, C::Hasher>,
/// Purported values of each polynomial at the challenge point.
pub openings: StarkOpeningSet<F, D>,
/// A batch FRI argument for all openings.
pub opening_proof: CompressedFriProof<F, C::Hasher, D>,
}

/// A compressed [`StarkProof`] format of a single STARK with its public inputs.
#[derive(Debug, Clone)]
pub struct CompressedStarkProofWithPublicInputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
/// A compressed STARK proof.
pub proof: CompressedStarkProof<F, C, D>,
/// Public inputs for this compressed STARK proof.
pub public_inputs: Vec<F>,
}

/// A [`StarkProof`] along with metadata about the initial Fiat-Shamir state, which is used when
/// creating a recursive wrapper proof around a STARK proof.
#[derive(Debug, Clone)]
Expand Down

0 comments on commit 5b62e41

Please sign in to comment.