diff --git a/crates/zk-por-cli/src/prover.rs b/crates/zk-por-cli/src/prover.rs index ac5c84d..aeab25c 100644 --- a/crates/zk-por-cli/src/prover.rs +++ b/crates/zk-por-cli/src/prover.rs @@ -6,11 +6,11 @@ use indicatif::ProgressBar; use plonky2::hash::hash_types::HashOut; use plonky2_field::types::PrimeField64; use rayon::{iter::ParallelIterator, prelude::*}; -use serde_json::json; + use std::{ fs, fs::File, - io::Write, + io::{BufWriter, Write}, path::PathBuf, str::FromStr, sync::{Arc, RwLock}, @@ -362,12 +362,14 @@ fn dump_proofs( let user_proof_output_dir_path = proof_output_dir_path.join(USER_PROOF_DIRNAME); // directory has been checked empty before. let global_proof_output_path = proof_output_dir_path.join(GLOBAL_PROOF_FILENAME); - let mut global_proof_file = + let global_proof_file = File::create(global_proof_output_path.clone()).map_err(|e| PoRError::Io(e))?; - global_proof_file - .write_all(json!(root_proof).to_string().as_bytes()) - .map_err(|e| return PoRError::Io(e))?; + let mut global_proof_writer = BufWriter::new(global_proof_file); + serde_json::to_writer(&mut global_proof_writer, &root_proof).expect( + format!("fail to dump global proof file to {:?}", global_proof_output_path).as_str(), + ); + global_proof_writer.flush()?; /////////////////////////////////////////////// let hash_offset = RecursiveTargets::::pub_input_hash_offset(); @@ -395,12 +397,14 @@ fn dump_proofs( }; let global_info_output_path = proof_output_dir_path.join(GLOBAL_INFO_FILENAME); - let mut global_info_file = + let global_info_file = File::create(global_info_output_path.clone()).map_err(|e| PoRError::Io(e))?; - global_info_file - .write_all(json!(info).to_string().as_bytes()) - .map_err(|e| return PoRError::Io(e))?; + let mut global_info_writer = BufWriter::new(global_info_file); + serde_json::to_writer(&mut global_info_writer, &info).expect( + format!("fail to dump global info file to {:?}", global_proof_output_path).as_str(), + ); + global_info_writer.flush()?; /////////////////////////////////////////////// // generate and dump proof for each user @@ -447,15 +451,20 @@ fn dump_proofs( let user_proof_output_path = user_proof_output_dir_path.join(format!("{}.json", account.id)); - let mut user_proof_file = File::create(user_proof_output_path).expect( + let user_proof_file = File::create(user_proof_output_path).expect( format!("fail to create user proof file for account {}", user_proof.account.id) .as_str(), ); - user_proof_file.write_all(json!(user_proof).to_string().as_bytes()).expect( + let mut user_proof_writer = BufWriter::new(user_proof_file); + serde_json::to_writer(&mut user_proof_writer, &user_proof).expect( format!("fail to write user proof file for account {}", user_proof.account.id) .as_str(), ); + user_proof_writer.flush().expect( + format!("fail to write user proof file for account {}", user_proof.account.id) + .as_str(), + ) }); bar.inc(chunk.len() as u64); diff --git a/crates/zk-por-cli/src/verifier.rs b/crates/zk-por-cli/src/verifier.rs index d3c9f9a..8045a1b 100644 --- a/crates/zk-por-cli/src/verifier.rs +++ b/crates/zk-por-cli/src/verifier.rs @@ -70,7 +70,9 @@ pub fn verify_user( .map(|user_proof_path| { let merkle_path = File::open(&user_proof_path).unwrap(); let reader = std::io::BufReader::new(merkle_path); - let proof: MerkleProof = from_reader(reader).unwrap(); + let proof: MerkleProof = from_reader(reader).expect( + format!("fail to parse user proof from path {:?}", user_proof_path).as_str(), + ); let result = proof.verify_merkle_proof(root_hash); if verbose { bar.inc(1); diff --git a/crates/zk-por-core/src/account.rs b/crates/zk-por-core/src/account.rs index c811da4..8b6bad0 100644 --- a/crates/zk-por-core/src/account.rs +++ b/crates/zk-por-core/src/account.rs @@ -6,18 +6,78 @@ use plonky2::{ hash::{hash_types::HashOut, poseidon::PoseidonHash}, plonk::config::Hasher, }; -use plonky2_field::types::Field; +use plonky2_field::types::{Field, PrimeField64}; use rand::Rng; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; /// A struct representing a users account. It represents their equity and debt as a Vector of goldilocks field elements. -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone)] pub struct Account { pub id: String, // 256 bit hex string pub equity: Vec, pub debt: Vec, } +impl Serialize for Account { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("Account", 3)?; + state.serialize_field("id", &self.id)?; + // Custom serialization for equity and debt to ensure they are serialized in a specific format if needed + let equity_as_strings: Vec = self + .equity + .iter() + .map(|e| { + let num = e.to_canonical_u64(); + num.to_string() + }) + .collect(); + state.serialize_field("equity", &equity_as_strings)?; + + let debt_as_strings: Vec = self + .debt + .iter() + .map(|e| { + let num = e.to_canonical_u64(); + num.to_string() + }) + .collect(); + state.serialize_field("debt", &debt_as_strings)?; + state.end() + } +} + +impl<'de> Deserialize<'de> for Account { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct InnerAccount { + id: String, + equity: Vec, + debt: Vec, + } + + let helper = InnerAccount::deserialize(deserializer)?; + let equity = helper + .equity + .iter() + .map(|e| F::from_canonical_u64(u64::from_str_radix(e, 10).unwrap())) + .collect(); + let debt = helper + .debt + .iter() + .map(|e| F::from_canonical_u64(u64::from_str_radix(e, 10).unwrap())) + .collect(); + + Ok(Account { id: helper.id, equity: equity, debt: debt }) + } +} + impl Account { /// Gets the account hash for a given account. pub fn get_hash(&self) -> HashOut { @@ -117,3 +177,30 @@ pub fn gen_empty_accounts(batch_size: usize, num_assets: usize) -> Vec ]; accounts } + +#[cfg(test)] +mod tests { + use super::*; + use serde_json; + + #[test] + fn test_account_json_marshalling() { + // Step 1: Create an instance of `Account` + let original_account = Account { + id: "1".to_owned(), // Assuming `id` is of type that implements `Serialize` and `Deserialize` + equity: vec![F::from_canonical_u64(0), F::from_canonical_u64(1)], + debt: vec![F::from_canonical_u64(0), F::from_canonical_u64(2)], + }; + + // Step 2: Serialize the `Account` instance to a JSON string + let json_string = serde_json::to_string(&original_account).unwrap(); + + // Step 3: Deserialize the JSON string back into an `Account` instance + let deserialized_account: Account = serde_json::from_str(&json_string).unwrap(); + + // Step 4: Assert that the original and deserialized instances are equal + assert_eq!(original_account.id, deserialized_account.id); + assert_eq!(original_account.equity, deserialized_account.equity); + assert_eq!(original_account.debt, deserialized_account.debt); + } +} diff --git a/crates/zk-por-core/src/merkle_proof.rs b/crates/zk-por-core/src/merkle_proof.rs index cc646ff..fb3ff6b 100644 --- a/crates/zk-por-core/src/merkle_proof.rs +++ b/crates/zk-por-core/src/merkle_proof.rs @@ -1,10 +1,10 @@ use itertools::Itertools; use plonky2::{ hash::{hash_types::HashOut, poseidon::PoseidonHash}, - plonk::config::Hasher, + plonk::config::{GenericHashOut, Hasher}, }; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; -use serde::{Deserialize, Serialize}; +use serde::{self, Deserialize, Deserializer, Serialize, Serializer}; use crate::{ account::Account, @@ -111,12 +111,76 @@ pub fn get_recursive_siblings_index( /// We use this wrapper struct for the left and right hashes of our recursive siblings. This is needed so a user knows the position of /// their own hash when hashing. -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq)] pub struct RecursiveHashes { left_hashes: Vec>, right_hashes: Vec>, } +impl Serialize for RecursiveHashes { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("RecursiveHashes", 2)?; + + let left_hashes: Vec = self + .left_hashes + .iter() + .map(|e| { + let bytes = e.to_bytes(); + hex::encode(&bytes) + }) + .collect(); + state.serialize_field("left_hashes", &left_hashes)?; + + let right_hashes: Vec = self + .right_hashes + .iter() + .map(|e| { + let bytes = e.to_bytes(); + hex::encode(&bytes) + }) + .collect(); + state.serialize_field("right_hashes", &right_hashes)?; + state.end() + } +} + +impl<'de> Deserialize<'de> for RecursiveHashes { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct Inner { + left_hashes: Vec, + right_hashes: Vec, + } + + let helper = Inner::deserialize(deserializer)?; + let left_hashes = helper + .left_hashes + .iter() + .map(|e| { + let bytes = hex::decode(e).unwrap(); + HashOut::from_bytes(&bytes) + }) + .collect(); + let right_hashes = helper + .right_hashes + .iter() + .map(|e| { + let bytes = hex::decode(e).unwrap(); + HashOut::from_bytes(&bytes) + }) + .collect(); + + Ok(RecursiveHashes { left_hashes: left_hashes, right_hashes: right_hashes }) + } +} + impl RecursiveHashes { pub fn new_from_index(indexes: &RecursiveIndex, db: Arc) -> Self { let left_hashes = indexes @@ -146,7 +210,7 @@ impl RecursiveHashes { /// Hashes for a given users merkle proof of inclusion siblings in the Global Merkle Sum Tree, also includes account data as it is needed for the verification /// of the merkle proof (needed to calculate own hash) -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone)] pub struct MerkleProof { pub account: Account, pub index: usize, @@ -154,6 +218,63 @@ pub struct MerkleProof { pub recursive_tree_siblings: Vec, } +impl Serialize for MerkleProof { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("MerkleProof", 4)?; + state.serialize_field("account", &self.account)?; + state.serialize_field("index", &self.index)?; + + let sum_tree_siblings: Vec = self + .sum_tree_siblings + .iter() + .map(|e| { + let bytes = e.to_bytes(); + hex::encode(&bytes) + }) + .collect(); + + state.serialize_field("sum_tree_siblings", &sum_tree_siblings)?; + state.serialize_field("recursive_tree_siblings", &self.recursive_tree_siblings)?; + state.end() + } +} + +impl<'de> Deserialize<'de> for MerkleProof { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + #[derive(Deserialize)] + struct InnerMerkleProof { + account: Account, + index: usize, + sum_tree_siblings: Vec, + recursive_tree_siblings: Vec, + } + + let helper = InnerMerkleProof::deserialize(deserializer)?; + let sum_tree_siblings = helper + .sum_tree_siblings + .iter() + .map(|e| { + let bytes = hex::decode(e).unwrap(); + HashOut::from_bytes(&bytes) + }) + .collect(); + + Ok(MerkleProof { + account: helper.account, + index: helper.index, + sum_tree_siblings: sum_tree_siblings, + recursive_tree_siblings: helper.recursive_tree_siblings, + }) + } +} + impl MerkleProof { pub fn new_from_account( account: &Account, @@ -466,112 +587,83 @@ pub mod test { res.unwrap(); } - // THIS IS THE TEST DATA FOR VERIFY - // #[test] - // pub fn poseidon_hash() { - // let equity = vec![3,3,3,].iter().map(|x| F::from_canonical_u32(*x)).collect_vec(); - // let debt = vec![1,1,1,].iter().map(|x| F::from_canonical_u32(*x)).collect_vec(); - - // let accounts = vec![ - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33ad".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33ac".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33ab".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33aa".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33a1".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33a2".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33a3".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // }, - // Account{ - // id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33a4".to_string(), - // equity: equity.clone(), - // debt: debt.clone(), - // } - // ]; - - // let msts: Vec = accounts - // .chunks(2) - // .map(|account_batch| MerkleSumTree::new_tree_from_accounts(&account_batch.to_vec())) - // .collect(); - - // let mst_hashes = msts.iter().map(|x| x.merkle_sum_tree.iter().map(|y| y.hash).collect_vec()).collect_vec(); - // println!("msts:{:?}", mst_hashes); - // let inputs = vec![ - // HashOut::from_vec( - // vec![ - // 8699257539652901730, - // 12847577670763395377, - // 14540605839220144846, - // 1921995570040415498, - // ] - // .iter() - // .map(|x| F::from_canonical_u64(*x)) - // .collect::>(), - // ), - // HashOut::from_vec( - // vec![ - // 15026394135096265436, - // 13313300609834454638, - // 10151802728958521275, - // 6200471959130767555, - // ] - // .iter() - // .map(|x| F::from_canonical_u64(*x)) - // .collect::>(), - // ), - // HashOut::from_vec( - // vec![ - // 2010803994799996791, - // 568450490466247075, - // 18209684900543488748, - // 7678193912819861368, - // ] - // .iter() - // .map(|x| F::from_canonical_u64(*x)) - // .collect::>(), - // ), - // HashOut::from_vec( - // vec![ - // 13089029781628355232, - // 10704046654659337561, - // 15794212269117984095, - // 15948192230150472783, - // ] - // .iter() - // .map(|x| F::from_canonical_u64(*x)) - // .collect::>(), - // ), - // ]; - - // let hash = PoseidonHash::hash_no_pad( - // inputs.iter().map(|x| x.elements).flatten().collect_vec().as_slice(), - // ); - // println!("Hash: {:?}", hash); - // } + #[test] + pub fn test_json_merkle_proof() { + let _gmst = GlobalMst::new(GlobalConfig { + num_of_tokens: 3, + num_of_batches: 4, + batch_size: 2, + recursion_branchout_num: 4, + }); + + let equity = vec![0, 3, 3].iter().map(|x| F::from_canonical_u32(*x)).collect_vec(); + let debt = vec![1, 1, 1].iter().map(|x| F::from_canonical_u32(*x)).collect_vec(); + + let sum_tree_siblings = vec![HashOut::from_vec( + vec![ + 7609058119952049295, + 8895839458156070742, + 1052773619972611009, + 6038312163525827182, + ] + .iter() + .map(|x| F::from_canonical_u64(*x)) + .collect::>(), + )]; + + let recursive_tree_siblings = vec![RecursiveHashes { + left_hashes: vec![], + right_hashes: vec![ + HashOut::from_vec( + vec![ + 15026394135096265436, + 13313300609834454638, + 10151802728958521275, + 6200471959130767555, + ] + .iter() + .map(|x| F::from_canonical_u64(*x)) + .collect::>(), + ), + HashOut::from_vec( + vec![ + 2010803994799996791, + 568450490466247075, + 18209684900543488748, + 7678193912819861368, + ] + .iter() + .map(|x| F::from_canonical_u64(*x)) + .collect::>(), + ), + HashOut::from_vec( + vec![ + 13089029781628355232, + 10704046654659337561, + 15794212269117984095, + 15948192230150472783, + ] + .iter() + .map(|x| F::from_canonical_u64(*x)) + .collect::>(), + ), + ], + }]; + + let account = Account { + id: "320b5ea99e653bc2b593db4130d10a4efd3a0b4cc2e1a6672b678d71dfbd33ad".to_string(), + equity: equity.clone(), + debt: debt.clone(), + }; + + let merkle_proof = + MerkleProof { account, sum_tree_siblings, recursive_tree_siblings, index: 0 }; + + let json_string = serde_json::to_string(&merkle_proof).unwrap(); + + // Step 3: Deserialize the JSON string back into an `Account` instance + let deserialized_merkle_proof: MerkleProof = serde_json::from_str(&json_string).unwrap(); + assert_eq!(merkle_proof.index, deserialized_merkle_proof.index); + assert_eq!(merkle_proof.sum_tree_siblings, deserialized_merkle_proof.sum_tree_siblings); + } } diff --git a/scripts/release.sh b/scripts/release_linux.sh similarity index 60% rename from scripts/release.sh rename to scripts/release_linux.sh index 39e4ab7..fe17ada 100755 --- a/scripts/release.sh +++ b/scripts/release_linux.sh @@ -1,9 +1,9 @@ rm -rf release mkdir -p release/{config,sample_data} -cargo build --release +RUSTFLAGS="-C target-feature=+crt-static" cargo build --release --target x86_64-unknown-linux-gnu mv target/release/zk-por-cli release/zk-por-prover -cargo build --features zk-por-core/verifier --release +RUSTFLAGS="-C target-feature=+crt-static" cargo build --features zk-por-core/verifier --release --target x86_64-unknown-linux-gnu mv target/release/zk-por-cli release/zk-por-verifier mkdir -p release/config @@ -13,4 +13,4 @@ mkdir -p release/sample_data cp -r test-data/batch0.json release/sample_data cp docs/release.md release/README.md -tar -cvf zk-por.tar ./release/ \ No newline at end of file +tar -cvf zk-por-linux.tar ./release/ \ No newline at end of file