Skip to content

Commit

Permalink
add API for data directory
Browse files Browse the repository at this point in the history
  • Loading branch information
Hanting Zhang committed Feb 20, 2024
1 parent 8d2bb89 commit 1fbe4af
Show file tree
Hide file tree
Showing 5 changed files with 147 additions and 1 deletion.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ ref-cast = "1.0.20" # allocation-less conversion in multilinear polys
derive_more = "0.99.17" # lightens impl macros for pasta
static_assertions = "1.1.0"
rayon-scan = "0.1.0"
camino = "1.1.6"

[target.'cfg(any(target_arch = "x86_64", target_arch = "aarch64"))'.dependencies]
# grumpkin-msm has been patched to support MSMs for the pasta curve cycle
Expand Down
2 changes: 2 additions & 0 deletions examples/minroot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,8 @@ fn main() {
.with(EnvFilter::from_default_env())
.with(TeXRayLayer::new());
tracing::subscriber::set_global_default(subscriber).unwrap();
arecibo::data::set_write_data(true);

type C1 = MinRootCircuit<<Bn256EngineKZG as Engine>::GE>;

println!("Nova-based VDF with MinRoot delay function");
Expand Down
110 changes: 110 additions & 0 deletions src/data.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
//! Very minimal utilities for reading/writing general arecibo data in disk.
use camino::{Utf8Path, Utf8PathBuf};
use once_cell::sync::OnceCell;
use serde::{de::DeserializeOwned, Serialize};
use std::{
collections::HashMap,
fs::{self, File, OpenOptions},
io::{BufReader, BufWriter},
};

/// Global flag for writing config.
pub static WRITE: bool = false;

/// Path to the directory where Arecibo data will be stored.
pub static ARECIBO_DATA: &str = ".arecibo_data";

/// Global configuration for Arecibo data storage, including root directory and counters.
/// This configuration is initialized on first use.
pub static mut ARECIBO_CONFIG: OnceCell<DataConfig> = OnceCell::new();

/// Configuration for managing Arecibo data files, including the root directory,
/// witness counter, and cross-term counter for organizing files.
#[derive(Debug, Clone)]
pub struct DataConfig {
root_dir: Utf8PathBuf,
section_counters: HashMap<String, usize>,
write_data: bool,
}

/// Initializes the global configuration for Arecibo data storage, setting up the root directory
/// and initializing counters. We create the root directory if it does not already exist.
pub fn init_config() -> DataConfig {
let root_dir = Utf8PathBuf::from(ARECIBO_DATA);
if !root_dir.exists() {
fs::create_dir_all(&root_dir).expect("Failed to create arecibo data directory");
}

DataConfig {
root_dir,
section_counters: HashMap::new(),
write_data: WRITE,
}
}

/// Writes Arecibo data to disk, organizing it into sections and labeling it with a unique identifier.
/// This function serializes the given payload and writes it into the appropriate section and file.
/// For now, we just increment the relevant counter to ensure uniqueness.
pub fn write_arecibo_data<T: Serialize>(
section: impl AsRef<Utf8Path>,
label: impl AsRef<Utf8Path>,
payload: &T,
) {
let _ = unsafe { ARECIBO_CONFIG.set(init_config()) };
let config = unsafe { ARECIBO_CONFIG.get_mut().unwrap() };

let section_path = config.root_dir.join(section.as_ref());
if !section_path.exists() {
fs::create_dir_all(&section_path).expect("Failed to create section directory");
}

let section = section.as_ref().to_string();
let counter = config.section_counters.entry(section).or_insert(0);

let file_path = section_path.join(format!("{}_{:?}", label.as_ref().as_str(), counter));
*counter += 1;

let file = OpenOptions::new()
.read(true)
.write(true)
.truncate(true)
.create(true)
.open(file_path)
.expect("Failed to create data file");

let writer = BufWriter::new(&file);
bincode::serialize_into(writer, payload).expect("Failed to write data");
}

/// Reads and deserializes data from a specified section and label.
pub fn read_arecibo_data<T: DeserializeOwned>(
section: impl AsRef<Utf8Path>,
label: impl AsRef<Utf8Path>,
) -> T {
let config = unsafe { ARECIBO_CONFIG.get_or_init(init_config) };

let section_path = config.root_dir.join(section.as_ref());
assert!(section_path.exists(), "Section directory does not exist");

// Assuming the label uniquely identifies the file, and ignoring the counter for simplicity
let file_path = section_path.join(label.as_ref());
assert!(file_path.exists(), "Data file does not exist");

let file = File::open(file_path).expect("Failed to open data file");
let reader = BufReader::new(file);

bincode::deserialize_from(reader).expect("Failed to read data")
}

/// Are we configured to write data?
pub fn write_data() -> bool {
let config = unsafe { ARECIBO_CONFIG.get_or_init(init_config) };
config.write_data
}

/// Set the configuration for writing data.
pub fn set_write_data(write_data: bool) {
let _ = unsafe { ARECIBO_CONFIG.set(init_config()) };
let config = unsafe { ARECIBO_CONFIG.get_mut().unwrap() };
config.write_data = write_data;
}
33 changes: 33 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ pub mod r1cs;
pub mod spartan;
pub mod traits;

pub mod data;
pub mod supernova;

use once_cell::sync::OnceCell;
Expand All @@ -37,6 +38,7 @@ use crate::{
shape_cs::ShapeCS,
solver::SatisfyingAssignment,
},
data::{write_arecibo_data, write_data},
r1cs::R1CSResult,
};
use abomonation::Abomonation;
Expand Down Expand Up @@ -349,6 +351,13 @@ pub struct ResourceBuffer<E: Engine> {
T: Vec<E::Scalar>,
}

/// A very simple config for [`RecursiveSNARK`] in charge of logging behavior.
/// To be fleshed out and extended in the future.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct RecursiveSNARKConfig {
write_data: bool,
}

/// A SNARK that proves the correct execution of an incremental computation
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "")]
Expand All @@ -373,6 +382,8 @@ where
i: usize,
zi_primary: Vec<E1::Scalar>,
zi_secondary: Vec<<Dual<E1> as Engine>::Scalar>,

config: RecursiveSNARKConfig,
}

impl<E1> RecursiveSNARK<E1>
Expand Down Expand Up @@ -484,6 +495,10 @@ where
T: r1cs::default_T::<Dual<E1>>(r1cs_secondary.num_cons),
};

let config = RecursiveSNARKConfig {
write_data: write_data(),
};

Ok(Self {
z0_primary: z0_primary.to_vec(),
z0_secondary: z0_secondary.to_vec(),
Expand All @@ -496,9 +511,12 @@ where

buffer_primary,
buffer_secondary,

i: 0,
zi_primary,
zi_secondary,

config,
})
}

Expand Down Expand Up @@ -588,6 +606,21 @@ where
&mut self.buffer_primary.ABC_Z_2,
)?;

if self.config.write_data {
let W = l_w_primary.W;
write_arecibo_data(
format!("witness_{:?}", pp.digest()),
format!("len_{}", W.len()),
&W,
);
let T = &self.buffer_primary.T;
write_arecibo_data(
format!("cross_term_{:?}", pp.digest()),
format!("len_{}", T.len()),
&T,
);
}

let mut cs_secondary = SatisfyingAssignment::<Dual<E1>>::with_capacity(
pp.circuit_shape_secondary.r1cs_shape.num_io + 1,
pp.circuit_shape_secondary.r1cs_shape.num_vars,
Expand Down
2 changes: 1 addition & 1 deletion src/r1cs/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ pub struct R1CSResult<E: Engine> {
/// A type that holds a witness for a given R1CS instance
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct R1CSWitness<E: Engine> {
W: Vec<E::Scalar>,
pub(crate) W: Vec<E::Scalar>,
}

/// A type that holds an R1CS instance
Expand Down

0 comments on commit 1fbe4af

Please sign in to comment.