diff --git a/Cargo.toml b/Cargo.toml index 01796d6584..d4a3aaac1b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ bellpepper-core = { workspace = true } bincode = { workspace = true } blstrs = { workspace = true } bytecount = "=0.6.4" -camino = { workspace = true } +camino = { workspace = true, features = ["serde1"] } clap = { workspace = true, features = ["derive"] } config = "0.13.3" dashmap = "5.5.0" @@ -34,7 +34,7 @@ itertools = "0.9" lurk-macros = { path = "lurk-macros" } lurk-metrics = { path = "lurk-metrics" } metrics = { workspace = true } -neptune = { workspace = true, features = ["arity2","arity4","arity8","arity16","pasta","bls"] } +neptune = { workspace = true, features = ["arity2", "arity4", "arity8", "arity16", "pasta", "bls"] } nom = "7.1.3" nom_locate = "4.1.0" nova = { workspace = true } @@ -58,7 +58,7 @@ serde_repr = "0.1.14" tap = "1.0.1" stable_deref_trait = "1.2.0" thiserror = { workspace = true } -abomonation = { workspace = true} +abomonation = { workspace = true } abomonation_derive = { git = "https://github.com/lurk-lab/abomonation_derive.git" } crossbeam = "0.8.2" byteorder = "1.4.3" @@ -69,7 +69,7 @@ ansi_term = "0.12.1" tracing = { workspace = true } tracing-texray = { workspace = true } tracing-subscriber = { workspace = true, features = ["env-filter"] } -elsa = { version = "1.9.0", git="https://github.com/lurk-lab/elsa", branch = "sync_index_map", features = ["indexmap"] } +elsa = { version = "1.9.0", git = "https://github.com/lurk-lab/elsa", branch = "sync_index_map", features = ["indexmap"] } arc-swap = "1.6.0" [target.'cfg(not(target_arch = "wasm32"))'.dependencies] @@ -78,7 +78,10 @@ pasta-msm = { workspace = true } proptest = { workspace = true } proptest-derive = { workspace = true } rand = "0.8.5" -rustyline = { version = "11.0", features = ["derive", "with-file-history"], default-features = false } +rustyline = { version = "11.0", features = [ + "derive", + "with-file-history", +], default-features = false } home = "0.5.5" [target.'cfg(target_arch = "wasm32")'.dependencies] @@ -110,12 +113,7 @@ vergen = { version = "8", features = ["build", "git", "gitcl"] } [workspace] resolver = "2" -members = [ - "clutch", - "fcomm", - "lurk-macros", - "lurk-metrics" -] +members = ["clutch", "fcomm", "lurk-macros", "lurk-metrics"] # Dependencies that should be kept in sync through the whole workspace [workspace.dependencies] @@ -177,10 +175,6 @@ harness = false name = "fibonacci" harness = false -[[bench]] -name = "fibonacci_lem" -harness = false - [[bench]] name = "synthesis" harness = false @@ -203,4 +197,4 @@ harness = false [patch.crates-io] # This is needed to ensure halo2curves, which imports pasta-curves, uses the *same* traits in bn256_grumpkin -pasta_curves = { git="https://github.com/lurk-lab/pasta_curves", branch="dev" } +pasta_curves = { git = "https://github.com/lurk-lab/pasta_curves", branch = "dev" } diff --git a/benches/common/mod.rs b/benches/common/mod.rs new file mode 100644 index 0000000000..5af042e054 --- /dev/null +++ b/benches/common/mod.rs @@ -0,0 +1,14 @@ +use camino::Utf8PathBuf; +use lurk::cli::paths::lurk_default_dir; +use lurk::config::lurk_config; +use once_cell::sync::Lazy; + +/// Edit this path to use a config file specific to benchmarking +/// E.g. `Utf8PathBuf::from("/home//lurk-rs/lurk-bench.toml");` +pub static BENCH_CONFIG_PATH: Lazy = + Lazy::new(|| lurk_default_dir().join("lurk.toml")); + +/// Sets the config settings with the given file +pub fn set_bench_config() { + lurk_config(Some(&BENCH_CONFIG_PATH), None); +} diff --git a/benches/end2end.rs b/benches/end2end.rs index 3459c864e3..2c10459093 100644 --- a/benches/end2end.rs +++ b/benches/end2end.rs @@ -1,8 +1,7 @@ -use camino::Utf8Path; use criterion::{ black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, SamplingMode, }; -use pasta_curves::pallas::Scalar as Fr; +use pasta_curves::pallas::Scalar as Fq; use lurk::{ circuit::circuit_frame::MultiFrame, @@ -22,11 +21,12 @@ use lurk::{ state::State, store::Store, }; -use pasta_curves::pallas; use std::time::Duration; use std::{cell::RefCell, rc::Rc, sync::Arc}; -const PUBLIC_PARAMS_PATH: &str = "/var/tmp/lurk_benches/public_params"; +mod common; +use common::set_bench_config; + const DEFAULT_REDUCTION_COUNT: usize = 10; fn go_base(store: &Store, state: Rc>, a: u64, b: u64) -> Ptr { @@ -58,8 +58,9 @@ fn end2end_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(120)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; - let lang_pallas = Lang::>::new(); + let lang_pallas = Lang::>::new(); let lang_pallas_rc = Arc::new(lang_pallas.clone()); let reduction_count = DEFAULT_REDUCTION_COUNT; @@ -75,11 +76,7 @@ fn end2end_benchmark(c: &mut Criterion) { true, Kind::NovaPublicParams, ); - let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let size = (10, 0); let benchmark_id = BenchmarkId::new("end2end_go_base_nova", format!("_{}_{}", size.0, size.1)); @@ -88,7 +85,7 @@ fn end2end_benchmark(c: &mut Criterion) { group.bench_with_input(benchmark_id, &size, |b, &s| { b.iter(|| { - let ptr = go_base::(&store, state.clone(), s.0, s.1); + let ptr = go_base::(&store, state.clone(), s.0, s.1); let _result = prover .evaluate_and_prove(&pp, ptr, env, &store, limit, &lang_pallas_rc) .unwrap(); @@ -107,8 +104,7 @@ fn store_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(5)) .sample_size(60); - let bls12_store = Store::::default(); - let pallas_store = Store::::default(); + let pallas_store = Store::::default(); let state = State::init_lurk_state().rccell(); @@ -117,18 +113,10 @@ fn store_benchmark(c: &mut Criterion) { for size in sizes { let parameter_string = format!("_{}_{}", size.0, size.1); - let bls12_id = BenchmarkId::new("store_go_base_bls12", ¶meter_string); - group.bench_with_input(bls12_id, &size, |b, &s| { - b.iter(|| { - let result = go_base::(&bls12_store, state.clone(), s.0, s.1); - black_box(result) - }) - }); - let pasta_id = BenchmarkId::new("store_go_base_pallas", ¶meter_string); group.bench_with_input(pasta_id, &size, |b, &s| { b.iter(|| { - let result = go_base::(&pallas_store, state.clone(), s.0, s.1); + let result = go_base::(&pallas_store, state.clone(), s.0, s.1); black_box(result) }) }); @@ -146,8 +134,7 @@ fn hydration_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(5)) .sample_size(60); - let bls12_store = Store::::default(); - let pallas_store = Store::::default(); + let pallas_store = Store::::default(); let state = State::init_lurk_state().rccell(); @@ -156,18 +143,10 @@ fn hydration_benchmark(c: &mut Criterion) { for size in sizes { let parameter_string = format!("_{}_{}", size.0, size.1); - { - let benchmark_id = BenchmarkId::new("hydration_go_base_bls12", ¶meter_string); - group.bench_with_input(benchmark_id, &size, |b, &s| { - let _ptr = go_base::(&bls12_store, state.clone(), s.0, s.1); - b.iter(|| bls12_store.hydrate_scalar_cache()) - }); - } - { let benchmark_id = BenchmarkId::new("hydration_go_base_pallas", ¶meter_string); group.bench_with_input(benchmark_id, &size, |b, &s| { - let _ptr = go_base::(&pallas_store, state.clone(), s.0, s.1); + let _ptr = go_base::(&pallas_store, state.clone(), s.0, s.1); b.iter(|| pallas_store.hydrate_scalar_cache()) }); } @@ -186,10 +165,8 @@ fn eval_benchmark(c: &mut Criterion) { .sample_size(60); let limit = 1_000_000_000; - let lang_bls12 = Lang::>::new(); - let lang_pallas = Lang::>::new(); - let bls12_store = Store::::default(); - let pallas_store = Store::::default(); + let lang_pallas = Lang::>::new(); + let pallas_store = Store::::default(); let state = State::init_lurk_state().rccell(); @@ -198,27 +175,10 @@ fn eval_benchmark(c: &mut Criterion) { for size in sizes { let parameter_string = format!("_{}_{}", size.0, size.1); - { - let benchmark_id = BenchmarkId::new("eval_go_base_bls12", ¶meter_string); - group.bench_with_input(benchmark_id, &size, |b, &s| { - let ptr = go_base::(&bls12_store, state.clone(), s.0, s.1); - b.iter(|| { - Evaluator::new( - ptr, - empty_sym_env(&bls12_store), - &bls12_store, - limit, - &lang_bls12, - ) - .eval() - }) - }); - } - { let benchmark_id = BenchmarkId::new("eval_go_base_pallas", ¶meter_string); group.bench_with_input(benchmark_id, &size, |b, &s| { - let ptr = go_base::(&pallas_store, state.clone(), s.0, s.1); + let ptr = go_base::(&pallas_store, state.clone(), s.0, s.1); b.iter(|| { Evaluator::new( ptr, @@ -279,8 +239,9 @@ fn prove_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(120)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; - let lang_pallas = Lang::>::new(); + let lang_pallas = Lang::>::new(); let lang_pallas_rc = Arc::new(lang_pallas.clone()); let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; @@ -289,20 +250,18 @@ fn prove_benchmark(c: &mut Criterion) { let benchmark_id = BenchmarkId::new("prove_go_base_nova", format!("_{}_{}", size.0, size.1)); let state = State::init_lurk_state().rccell(); + + // use cached public params let instance = Instance::new( reduction_count, lang_pallas_rc.clone(), true, Kind::NovaPublicParams, ); - let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); group.bench_with_input(benchmark_id, &size, |b, &s| { - let ptr = go_base::(&store, state.clone(), s.0, s.1); + let ptr = go_base::(&store, state.clone(), s.0, s.1); let prover = NovaProver::new(reduction_count, lang_pallas.clone()); let frames = MultiFrame::get_evaluation_frames( |count| prover.needs_frame_padding(count), @@ -331,8 +290,9 @@ fn prove_compressed_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(120)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; - let lang_pallas = Lang::>::new(); + let lang_pallas = Lang::>::new(); let lang_pallas_rc = Arc::new(lang_pallas.clone()); let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; @@ -344,20 +304,18 @@ fn prove_compressed_benchmark(c: &mut Criterion) { ); let state = State::init_lurk_state().rccell(); + + // use cached public params let instance = Instance::new( reduction_count, lang_pallas_rc.clone(), true, Kind::NovaPublicParams, ); - let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); group.bench_with_input(benchmark_id, &size, |b, &s| { - let ptr = go_base::(&store, state.clone(), s.0, s.1); + let ptr = go_base::(&store, state.clone(), s.0, s.1); let prover = NovaProver::new(reduction_count, lang_pallas.clone()); let frames = prover .get_evaluation_frames( @@ -387,24 +345,23 @@ fn verify_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(10)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; - let lang_pallas = Lang::>::new(); + let lang_pallas = Lang::>::new(); let lang_pallas_rc = Arc::new(lang_pallas.clone()); let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; let state = State::init_lurk_state().rccell(); + + // use cached public params let instance = Instance::new( reduction_count, lang_pallas_rc.clone(), true, Kind::NovaPublicParams, ); - let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let sizes = vec![(10, 0)]; for size in sizes { @@ -449,24 +406,23 @@ fn verify_compressed_benchmark(c: &mut Criterion) { .measurement_time(Duration::from_secs(10)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; - let lang_pallas = Lang::>::new(); + let lang_pallas = Lang::>::new(); let lang_pallas_rc = Arc::new(lang_pallas.clone()); let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; let state = State::init_lurk_state().rccell(); + + // use cached public params let instance = Instance::new( reduction_count, lang_pallas_rc.clone(), true, Kind::NovaPublicParams, ); - let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let sizes = vec![(10, 0)]; for size in sizes { diff --git a/benches/end2end_lem.rs b/benches/end2end_lem.rs index be5ebb362d..4a7d4cf804 100644 --- a/benches/end2end_lem.rs +++ b/benches/end2end_lem.rs @@ -14,13 +14,17 @@ use lurk::{ pointers::Ptr, store::Store, }, - proof::{ - nova::{public_params, NovaProver, PublicParams}, - Prover, + proof::{nova::NovaProver, Prover}, + public_parameters::{ + self, + instance::{Instance, Kind}, }, state::State, }; +mod common; +use common::set_bench_config; + const DEFAULT_REDUCTION_COUNT: usize = 10; fn go_base(store: &Store, state: Rc>, a: u64, b: u64) -> Ptr { @@ -52,17 +56,26 @@ fn end2end_benchmark_lem(c: &mut Criterion) { .measurement_time(Duration::from_secs(120)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; let reduction_count = DEFAULT_REDUCTION_COUNT; // setup - let lang = Arc::new(Lang::new()); + let lang_pallas = Lang::>::new(); + let lang_pallas_rc = Arc::new(lang_pallas.clone()); + let store = Store::default(); let prover: NovaProver<'_, Fq, Coproc, MultiFrame<'_, Fq, Coproc>> = - NovaProver::new(reduction_count, (*lang).clone()); - - let pp: PublicParams>> = - public_params(reduction_count, lang.clone()); + NovaProver::new(reduction_count, (*lang_pallas_rc).clone()); + + // use cached public params + let instance = Instance::new( + reduction_count, + lang_pallas_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let size = (10, 0); let benchmark_id = BenchmarkId::new("end2end_go_base_nova", format!("_{}_{}", size.0, size.1)); @@ -73,7 +86,7 @@ fn end2end_benchmark_lem(c: &mut Criterion) { b.iter(|| { let ptr = go_base::(&store, state.clone(), s.0, s.1); let (frames, _) = evaluate::>(None, ptr, &store, limit).unwrap(); - let _result = prover.prove(&pp, &frames, &store, &lang).unwrap(); + let _result = prover.prove(&pp, &frames, &store, &lang_pallas_rc).unwrap(); }) }); @@ -241,6 +254,7 @@ fn prove_benchmark_lem(c: &mut Criterion) { .measurement_time(Duration::from_secs(120)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; let reduction_count = DEFAULT_REDUCTION_COUNT; @@ -251,9 +265,17 @@ fn prove_benchmark_lem(c: &mut Criterion) { let state = State::init_lurk_state().rccell(); - let lang = Arc::new(Lang::new()); - let pp: PublicParams>> = - public_params(reduction_count, lang.clone()); + let lang_pallas = Lang::>::new(); + let lang_pallas_rc = Arc::new(lang_pallas.clone()); + + // use cached public params + let instance = Instance::new( + reduction_count, + lang_pallas_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); group.bench_with_input(benchmark_id, &size, |b, &s| { let ptr = go_base::(&store, state.clone(), s.0, s.1); @@ -262,7 +284,7 @@ fn prove_benchmark_lem(c: &mut Criterion) { let (frames, _) = evaluate::>(None, ptr, &store, limit).unwrap(); b.iter(|| { - let result = prover.prove(&pp, &frames, &store, &lang).unwrap(); + let result = prover.prove(&pp, &frames, &store, &lang_pallas_rc).unwrap(); black_box(result); }) }); @@ -278,6 +300,7 @@ fn prove_compressed_benchmark_lem(c: &mut Criterion) { .measurement_time(Duration::from_secs(120)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; @@ -290,9 +313,17 @@ fn prove_compressed_benchmark_lem(c: &mut Criterion) { let state = State::init_lurk_state().rccell(); - let lang = Arc::new(Lang::new()); - let pp: PublicParams>> = - public_params(reduction_count, lang.clone()); + let lang_pallas = Lang::>::new(); + let lang_pallas_rc = Arc::new(lang_pallas.clone()); + + // use cached public params + let instance = Instance::new( + reduction_count, + lang_pallas_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); group.bench_with_input(benchmark_id, &size, |b, &s| { let ptr = go_base::(&store, state.clone(), s.0, s.1); @@ -300,7 +331,7 @@ fn prove_compressed_benchmark_lem(c: &mut Criterion) { let (frames, _) = evaluate::>(None, ptr, &store, limit).unwrap(); b.iter(|| { - let (proof, _, _, _) = prover.prove(&pp, &frames, &store, &lang).unwrap(); + let (proof, _, _, _) = prover.prove(&pp, &frames, &store, &lang_pallas_rc).unwrap(); let compressed_result = proof.compress(&pp).unwrap(); black_box(compressed_result); @@ -317,15 +348,24 @@ fn verify_benchmark_lem(c: &mut Criterion) { .measurement_time(Duration::from_secs(10)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; let state = State::init_lurk_state().rccell(); - let lang = Arc::new(Lang::new()); - let pp: PublicParams>> = - public_params(reduction_count, lang.clone()); + let lang_pallas = Lang::>::new(); + let lang_pallas_rc = Arc::new(lang_pallas.clone()); + + // use cached public params + let instance = Instance::new( + reduction_count, + lang_pallas_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let sizes = [(10, 0)]; for size in sizes { @@ -335,7 +375,8 @@ fn verify_benchmark_lem(c: &mut Criterion) { let ptr = go_base(&store, state.clone(), s.0, s.1); let prover = NovaProver::new(reduction_count, Lang::new()); let (frames, _) = evaluate::>(None, ptr, &store, limit).unwrap(); - let (proof, z0, zi, num_steps) = prover.prove(&pp, &frames, &store, &lang).unwrap(); + let (proof, z0, zi, num_steps) = + prover.prove(&pp, &frames, &store, &lang_pallas_rc).unwrap(); b.iter_batched( || z0.clone(), @@ -360,15 +401,24 @@ fn verify_compressed_benchmark_lem(c: &mut Criterion) { .measurement_time(Duration::from_secs(10)) .sample_size(10); + set_bench_config(); let limit = 1_000_000_000; let store = Store::default(); let reduction_count = DEFAULT_REDUCTION_COUNT; let state = State::init_lurk_state().rccell(); - let lang = Arc::new(Lang::new()); - let pp: PublicParams>> = - public_params(reduction_count, lang.clone()); + let lang_pallas = Lang::>::new(); + let lang_pallas_rc = Arc::new(lang_pallas.clone()); + + // use cached public params + let instance = Instance::new( + reduction_count, + lang_pallas_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_parameters::public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let sizes = [(10, 0)]; for size in sizes { @@ -378,7 +428,8 @@ fn verify_compressed_benchmark_lem(c: &mut Criterion) { let ptr = go_base(&store, state.clone(), s.0, s.1); let prover = NovaProver::new(reduction_count, Lang::new()); let (frames, _) = evaluate::>(None, ptr, &store, limit).unwrap(); - let (proof, z0, zi, num_steps) = prover.prove(&pp, &frames, &store, &lang).unwrap(); + let (proof, z0, zi, num_steps) = + prover.prove(&pp, &frames, &store, &lang_pallas_rc).unwrap(); let compressed_proof = proof.compress(&pp).unwrap(); diff --git a/benches/fibonacci.rs b/benches/fibonacci.rs index 6161082482..e393c158bd 100644 --- a/benches/fibonacci.rs +++ b/benches/fibonacci.rs @@ -1,140 +1,266 @@ use std::{cell::RefCell, rc::Rc, sync::Arc, time::Duration}; -use camino::Utf8Path; use criterion::{ black_box, criterion_group, criterion_main, measurement, BatchSize, BenchmarkGroup, BenchmarkId, Criterion, SamplingMode, }; -use pasta_curves::pallas; - use lurk::{ - circuit::circuit_frame::MultiFrame, - eval::{ - empty_sym_env, - lang::{Coproc, Lang}, - }, + eval::lang::{Coproc, Lang}, field::LurkField, - proof::nova::NovaProver, - proof::Prover, - ptr::Ptr, + proof::{ + nova::{NovaProver, Proof}, + Prover, + }, public_parameters::{ instance::{Instance, Kind}, public_params, }, state::State, - store::Store, }; +use pasta_curves::pallas; -const PUBLIC_PARAMS_PATH: &str = "/var/tmp/lurk_benches/public_params"; - -fn fib(store: &Store, state: Rc>, _a: u64) -> Ptr { - let program = r#" -(letrec ((next (lambda (a b) (next b (+ a b)))) - (fib (next 0 1))) - (fib)) -"#; - - store.read_with_state(state, program).unwrap() -} - -// The env output in the `fib_frame`th frame of the above, infinite Fibonacci computation will contain a binding of the -// nth Fibonacci number to `a`. -// means of computing it.] -fn fib_frame(n: usize) -> usize { - 11 + 16 * n -} +mod common; +use common::set_bench_config; -// Set the limit so the last step will be filled exactly, since Lurk currently only pads terminal/error continuations. -fn fib_limit(n: usize, rc: usize) -> usize { - let frame = fib_frame(n); - rc * (frame / rc + usize::from(frame % rc != 0)) +#[allow(clippy::upper_case_acronyms)] +#[derive(Copy, Debug, Clone, PartialEq, Eq)] +enum Version { + ALPHA, + LEM, } -struct ProveParams { - fib_n: usize, +pub struct ProveParams { + folding_steps: usize, reduction_count: usize, + version: Version, } impl ProveParams { fn name(&self) -> String { + format!("{:?},rc={}", self.version, self.reduction_count) + } +} + +mod alpha { + use lurk::{circuit::circuit_frame::MultiFrame, eval::empty_sym_env, ptr::Ptr, store::Store}; + + use super::*; + + fn fib(store: &Store, state: Rc>) -> Ptr { + let program = r#" + (letrec ((next (lambda (a b) (next b (+ a b)))) + (fib (next 0 1))) + (fib)) + "#; + + store.read_with_state(state, program).unwrap() + } + + pub fn prove( + prove_params: ProveParams, + c: &mut BenchmarkGroup<'_, M>, + state: Rc>, + ) { + let ProveParams { + folding_steps, + reduction_count, + version, + } = prove_params; + + assert_eq!(version, Version::ALPHA); + let limit = reduction_count * (folding_steps + 1); + + // Track the number of `folded iterations / sec` + c.throughput(criterion::Throughput::Elements( + (reduction_count * folding_steps) as u64, + )); + + let lang_pallas = Lang::>::new(); + let lang_rc = Arc::new(lang_pallas.clone()); + + // use cached public params + let instance = Instance::new( + reduction_count, + lang_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); + let date = env!("VERGEN_GIT_COMMIT_DATE"); let sha = env!("VERGEN_GIT_SHA"); - format!("{date}:{sha}:Fibonacci-rc={}", self.reduction_count) + let parameter = format!("{},{},steps={}", date, sha, folding_steps); + + c.bench_with_input( + BenchmarkId::new(prove_params.name(), parameter), + &prove_params, + |b, prove_params| { + let store = Store::default(); + + let env = empty_sym_env(&store); + let ptr = fib::(&store, state.clone()); + let prover = NovaProver::new(prove_params.reduction_count, lang_pallas.clone()); + + let frames = &prover + .get_evaluation_frames(ptr, env, &store, limit, lang_rc.clone()) + .unwrap(); + + // Here we split the proving step by first generating the recursive snark, + // then have `criterion` only bench the rest of the folding steps + let (recursive_snark, circuits, z0, _zi, _num_steps) = prover + .recursive_snark(&pp, frames, &store, &lang_rc) + .unwrap(); + + b.iter_batched( + || (recursive_snark.clone(), z0.clone(), lang_rc.clone()), + |(recursive_snark, z0, lang_rc)| { + let result = Proof::prove_recursively( + &pp, + &store, + Some(recursive_snark), + &circuits, + reduction_count, + z0, + lang_rc, + ); + let _ = black_box(result); + }, + BatchSize::LargeInput, + ) + }, + ); } } -fn fibo_prove( - prove_params: ProveParams, - c: &mut BenchmarkGroup<'_, M>, - state: Rc>, -) { - let ProveParams { - fib_n, - reduction_count, - } = prove_params; - - let limit = fib_limit(fib_n, reduction_count); - let lang_pallas = Lang::>::new(); - let lang_rc = Arc::new(lang_pallas.clone()); - - // use cached public params - let instance = Instance::new( - reduction_count, - lang_rc.clone(), - true, - Kind::NovaPublicParams, - ); - let pp = - public_params::<_, _, MultiFrame<'_, _, _>>(&instance, Utf8Path::new(PUBLIC_PARAMS_PATH)) - .unwrap(); - - c.bench_with_input( - BenchmarkId::new(prove_params.name(), fib_n), - &prove_params, - |b, prove_params| { - let store = Store::default(); - - let env = empty_sym_env(&store); - let ptr = fib::( - &store, - state.clone(), - black_box(prove_params.fib_n as u64), - ); - let prover = NovaProver::new(prove_params.reduction_count, lang_pallas.clone()); - - let frames = &prover - .get_evaluation_frames(ptr, env, &store, limit, lang_rc.clone()) - .unwrap(); - - b.iter_batched( - || (frames, lang_rc.clone()), - |(frames, lang_rc)| { - let result = prover.prove(&pp, frames, &store, &lang_rc); - let _ = black_box(result); - }, - BatchSize::LargeInput, - ) - }, - ); +mod lem { + use lurk::lem::{eval::evaluate, multiframe::MultiFrame, pointers::Ptr, store::Store}; + + use super::*; + + fn fib(store: &Store, state: Rc>) -> Ptr { + let program = r#" +(letrec ((next (lambda (a b) (next b (+ a b)))) + (fib (next 0 1))) + (fib)) +"#; + + store.read(state, program).unwrap() + } + + pub fn prove( + prove_params: ProveParams, + c: &mut BenchmarkGroup<'_, M>, + state: Rc>, + ) { + let ProveParams { + folding_steps, + reduction_count, + version, + } = prove_params; + + assert_eq!(version, Version::LEM); + let limit = reduction_count * (folding_steps + 1); + + // Track the number of `folded iterations / sec` + c.throughput(criterion::Throughput::Elements( + (reduction_count * folding_steps) as u64, + )); + + let lang_pallas = Lang::>::new(); + let lang_rc = Arc::new(lang_pallas.clone()); + + // use cached public params + let instance: Instance< + '_, + pasta_curves::Fq, + Coproc, + MultiFrame<'_, pasta_curves::Fq, Coproc>, + > = Instance::new( + reduction_count, + lang_rc.clone(), + true, + Kind::NovaPublicParams, + ); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); + + let date = env!("VERGEN_GIT_COMMIT_DATE"); + let sha = env!("VERGEN_GIT_SHA"); + let parameter = format!("{},{},steps={}", date, sha, folding_steps); + + c.bench_with_input( + BenchmarkId::new(prove_params.name(), parameter), + &prove_params, + |b, prove_params| { + let store = Store::default(); + + let ptr = fib::(&store, state.clone()); + let prover = NovaProver::new(prove_params.reduction_count, lang_pallas.clone()); + + let frames = &evaluate::>( + None, ptr, &store, limit, + ) + .unwrap() + .0; + + // Here we split the proving step by first generating the recursive snark, + // then have `criterion` only bench the rest of the folding steps + let (recursive_snark, circuits, z0, _zi, _num_steps) = prover + .recursive_snark(&pp, frames, &store, &lang_rc) + .unwrap(); + + b.iter_batched( + || (recursive_snark.clone(), z0.clone(), lang_rc.clone()), + |(recursive_snark, z0, lang_rc)| { + let result = Proof::prove_recursively( + &pp, + &store, + Some(recursive_snark), + &circuits, + reduction_count, + z0, + lang_rc, + ); + let _ = black_box(result); + }, + BatchSize::LargeInput, + ) + }, + ); + } } -fn fibonacci_prove(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); +fn fib_bench(c: &mut Criterion) { + set_bench_config(); + tracing::debug!("{:?}", lurk::config::LURK_CONFIG); let reduction_counts = [100, 600, 700, 800, 900]; - let batch_sizes = [100, 200]; - let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("Prove"); + let folding_step_sizes = [2, 4, 8]; + + let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("Fibonacci"); group.sampling_mode(SamplingMode::Flat); // This can take a *while* group.sample_size(10); + let state = State::init_lurk_state().rccell(); - for fib_n in batch_sizes.iter() { + for folding_steps in folding_step_sizes.iter() { + for reduction_count in reduction_counts.iter() { + let alpha_params = ProveParams { + folding_steps: *folding_steps, + reduction_count: *reduction_count, + version: Version::ALPHA, + }; + alpha::prove(alpha_params, &mut group, state.clone()); + } + } + + for folding_steps in folding_step_sizes.iter() { for reduction_count in reduction_counts.iter() { - let prove_params = ProveParams { - fib_n: *fib_n, + let lem_params = ProveParams { + folding_steps: *folding_steps, reduction_count: *reduction_count, + version: Version::ALPHA, }; - fibo_prove(prove_params, &mut group, state.clone()); + lem::prove(lem_params, &mut group, state.clone()); } } } @@ -148,7 +274,7 @@ cfg_if::cfg_if! { .sample_size(10) .with_profiler(pprof::criterion::PProfProfiler::new(100, pprof::criterion::Output::Flamegraph(None))); targets = - fibonacci_prove, + fib_bench, } } else { criterion_group! { @@ -157,7 +283,7 @@ cfg_if::cfg_if! { .measurement_time(Duration::from_secs(120)) .sample_size(10); targets = - fibonacci_prove, + fib_bench, } } } diff --git a/benches/fibonacci_lem.rs b/benches/fibonacci_lem.rs deleted file mode 100644 index 5a760faea9..0000000000 --- a/benches/fibonacci_lem.rs +++ /dev/null @@ -1,160 +0,0 @@ -use std::{cell::RefCell, rc::Rc, sync::Arc, time::Duration}; - -use camino::Utf8Path; -use criterion::{ - black_box, criterion_group, criterion_main, measurement, BatchSize, BenchmarkGroup, - BenchmarkId, Criterion, SamplingMode, -}; - -use pasta_curves::pallas; - -use lurk::{ - eval::lang::{Coproc, Lang}, - field::LurkField, - lem::{eval::evaluate, multiframe::MultiFrame, pointers::Ptr, store::Store}, - proof::nova::NovaProver, - proof::Prover, - public_parameters::{ - instance::{Instance, Kind}, - public_params, - }, - state::State, -}; - -const PUBLIC_PARAMS_PATH: &str = "/var/tmp/lurk_benches/public_params"; - -fn fib(store: &Store, state: Rc>, _a: u64) -> Ptr { - let program = r#" -(letrec ((next (lambda (a b) (next b (+ a b)))) - (fib (next 0 1))) - (fib)) -"#; - - store.read(state, program).unwrap() -} - -// The env output in the `fib_frame`th frame of the above, infinite Fibonacci computation will contain a binding of the -// nth Fibonacci number to `a`. -// means of computing it.] -fn fib_frame(n: usize) -> usize { - 11 + 16 * n -} - -// Set the limit so the last step will be filled exactly, since Lurk currently only pads terminal/error continuations. -fn fib_limit(n: usize, rc: usize) -> usize { - let frame = fib_frame(n); - rc * (frame / rc + usize::from(frame % rc != 0)) -} - -struct ProveParams { - fib_n: usize, - reduction_count: usize, -} - -impl ProveParams { - fn name(&self) -> String { - let date = env!("VERGEN_GIT_COMMIT_DATE"); - let sha = env!("VERGEN_GIT_SHA"); - format!("{date}:{sha}:Fibonacci-LEM-rc={}", self.reduction_count) - } -} - -fn fibo_prove( - prove_params: ProveParams, - c: &mut BenchmarkGroup<'_, M>, - state: Rc>, -) { - let ProveParams { - fib_n, - reduction_count, - } = prove_params; - - let limit = fib_limit(fib_n, reduction_count); - let lang_pallas = Lang::>::new(); - let lang_rc = Arc::new(lang_pallas.clone()); - - // use cached public params - let instance = Instance::new( - reduction_count, - lang_rc.clone(), - true, - Kind::NovaPublicParams, - ); - let pp = - public_params::<_, _, MultiFrame<'_, _, _>>(&instance, Utf8Path::new(PUBLIC_PARAMS_PATH)) - .unwrap(); - - c.bench_with_input( - BenchmarkId::new(prove_params.name(), fib_n), - &prove_params, - |b, prove_params| { - let store = Store::default(); - - let ptr = fib::( - &store, - state.clone(), - black_box(prove_params.fib_n as u64), - ); - let prover = NovaProver::new(prove_params.reduction_count, lang_pallas.clone()); - - let frames = - &evaluate::>(None, ptr, &store, limit) - .unwrap() - .0; - - b.iter_batched( - || (frames, lang_rc.clone()), - |(frames, lang_rc)| { - let result = prover.prove(&pp, frames, &store, &lang_rc); - let _ = black_box(result); - }, - BatchSize::LargeInput, - ) - }, - ); -} - -fn fibonacci_prove(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); - let reduction_counts = [100, 600, 700, 800, 900]; - let batch_sizes = [100, 200]; - let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("Prove"); - group.sampling_mode(SamplingMode::Flat); // This can take a *while* - group.sample_size(10); - let state = State::init_lurk_state().rccell(); - - for fib_n in batch_sizes.iter() { - for reduction_count in reduction_counts.iter() { - let prove_params = ProveParams { - fib_n: *fib_n, - reduction_count: *reduction_count, - }; - fibo_prove(prove_params, &mut group, state.clone()); - } - } -} - -cfg_if::cfg_if! { - if #[cfg(feature = "flamegraph")] { - criterion_group! { - name = benches; - config = Criterion::default() - .measurement_time(Duration::from_secs(120)) - .sample_size(10) - .with_profiler(pprof::criterion::PProfProfiler::new(100, pprof::criterion::Output::Flamegraph(None))); - targets = - fibonacci_prove, - } - } else { - criterion_group! { - name = benches; - config = Criterion::default() - .measurement_time(Duration::from_secs(120)) - .sample_size(10); - targets = - fibonacci_prove, - } - } -} - -criterion_main!(benches); diff --git a/benches/sha256.rs b/benches/sha256.rs index 382ec30f34..9902821fd5 100644 --- a/benches/sha256.rs +++ b/benches/sha256.rs @@ -5,8 +5,6 @@ //! //! Note: The example [example/sha256_ivc.rs] is this same benchmark but as an example //! that's easier to play with and run. - -use camino::Utf8Path; use criterion::{ black_box, criterion_group, criterion_main, measurement, BatchSize, BenchmarkGroup, BenchmarkId, Criterion, SamplingMode, @@ -29,7 +27,8 @@ use lurk::{ store::Store, }; -const PUBLIC_PARAMS_PATH: &str = "/var/tmp/lurk_benches/public_params"; +mod common; +use common::set_bench_config; fn sha256_ivc( store: &Store, @@ -108,7 +107,6 @@ fn sha256_ivc_prove( let lang_rc = Arc::new(lang.clone()); // use cached public params - let instance: Instance< '_, pasta_curves::Fq, @@ -120,9 +118,7 @@ fn sha256_ivc_prove( true, Kind::NovaPublicParams, ); - let pp = - public_params::<_, _, MultiFrame<'_, _, _>>(&instance, Utf8Path::new(PUBLIC_PARAMS_PATH)) - .unwrap(); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); c.bench_with_input( BenchmarkId::new(prove_params.name(), arity), @@ -156,7 +152,8 @@ fn sha256_ivc_prove( } fn ivc_prove_benchmarks(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); + set_bench_config(); + tracing::debug!("{:?}", &lurk::config::LURK_CONFIG); let reduction_counts = [10, 100]; let batch_sizes = [1, 2, 5, 10, 20]; let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("prove"); @@ -205,9 +202,7 @@ fn sha256_ivc_prove_compressed( true, Kind::NovaPublicParams, ); - let pp = - public_params::<_, _, MultiFrame<'_, _, _>>(&instance, Utf8Path::new(PUBLIC_PARAMS_PATH)) - .unwrap(); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); c.bench_with_input( BenchmarkId::new(prove_params.name(), arity), @@ -243,7 +238,8 @@ fn sha256_ivc_prove_compressed( } fn ivc_prove_compressed_benchmarks(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); + set_bench_config(); + tracing::debug!("{:?}", &lurk::config::LURK_CONFIG); let reduction_counts = [10, 100]; let batch_sizes = [1, 2, 5, 10, 20]; let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("prove_compressed"); @@ -292,11 +288,7 @@ fn sha256_nivc_prove( true, Kind::SuperNovaAuxParams, ); - let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); c.bench_with_input( BenchmarkId::new(prove_params.name(), arity), @@ -330,7 +322,8 @@ fn sha256_nivc_prove( } fn nivc_prove_benchmarks(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); + set_bench_config(); + tracing::debug!("{:?}", &lurk::config::LURK_CONFIG); let reduction_counts = [10, 100]; let batch_sizes = [1, 2, 5, 10, 20]; let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("prove"); diff --git a/benches/sha256_lem.rs b/benches/sha256_lem.rs index 67bbd05903..aa6c5c5655 100644 --- a/benches/sha256_lem.rs +++ b/benches/sha256_lem.rs @@ -6,7 +6,6 @@ //! Note: The example [example/sha256_ivc.rs] is this same benchmark but as an example //! that's easier to play with and run. -use camino::Utf8Path; use criterion::{ black_box, criterion_group, criterion_main, measurement, BatchSize, BenchmarkGroup, BenchmarkId, Criterion, SamplingMode, @@ -32,7 +31,8 @@ use lurk::{ state::{user_sym, State}, }; -const PUBLIC_PARAMS_PATH: &str = "/var/tmp/lurk_benches/public_params"; +mod common; +use common::set_bench_config; fn sha256_ivc( store: &Store, @@ -111,16 +111,13 @@ fn sha256_ivc_prove( let lurk_step = make_eval_step_from_lang(&lang, true); // use cached public params - let instance: Instance<'_, Fr, Sha256Coproc, MultiFrame<'_, _, _>> = Instance::new( reduction_count, lang_rc.clone(), true, Kind::NovaPublicParams, ); - let pp = - public_params::<_, _, MultiFrame<'_, _, _>>(&instance, Utf8Path::new(PUBLIC_PARAMS_PATH)) - .unwrap(); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); c.bench_with_input( BenchmarkId::new(prove_params.name(), arity), @@ -153,7 +150,8 @@ fn sha256_ivc_prove( } fn ivc_prove_benchmarks(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); + set_bench_config(); + tracing::debug!("{:?}", &lurk::config::LURK_CONFIG); let reduction_counts = [10, 100]; let batch_sizes = [1, 2, 5, 10, 20]; let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("prove"); @@ -202,9 +200,7 @@ fn sha256_ivc_prove_compressed( true, Kind::NovaPublicParams, ); - let pp = - public_params::<_, _, MultiFrame<'_, _, _>>(&instance, Utf8Path::new(PUBLIC_PARAMS_PATH)) - .unwrap(); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); c.bench_with_input( BenchmarkId::new(prove_params.name(), arity), @@ -239,7 +235,8 @@ fn sha256_ivc_prove_compressed( } fn ivc_prove_compressed_benchmarks(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); + set_bench_config(); + tracing::debug!("{:?}", &lurk::config::LURK_CONFIG); let reduction_counts = [10, 100]; let batch_sizes = [1, 2, 5, 10, 20]; let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("prove_compressed"); @@ -288,11 +285,7 @@ fn sha256_nivc_prove( true, Kind::SuperNovaAuxParams, ); - let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>( - &instance, - Utf8Path::new(PUBLIC_PARAMS_PATH), - ) - .unwrap(); + let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); c.bench_with_input( BenchmarkId::new(prove_params.name(), arity), @@ -325,7 +318,8 @@ fn sha256_nivc_prove( } fn nivc_prove_benchmarks(c: &mut Criterion) { - tracing::debug!("{:?}", &*lurk::config::CONFIG); + set_bench_config(); + tracing::debug!("{:?}", &lurk::config::LURK_CONFIG); let reduction_counts = [10, 100]; let batch_sizes = [1, 2, 5, 10, 20]; let mut group: BenchmarkGroup<'_, _> = c.benchmark_group("prove"); diff --git a/clutch/src/lib.rs b/clutch/src/lib.rs index f59d2b1dbc..49c8c4ca36 100644 --- a/clutch/src/lib.rs +++ b/clutch/src/lib.rs @@ -1,12 +1,10 @@ #![doc = include_str!("../README.md")] use anyhow::{anyhow, bail, Context, Error, Result}; -use camino::Utf8PathBuf; use clap::{Arg, ArgAction, Command}; use fcomm::{ - committed_expression_store, file_map::data_dir, nova_proof_cache, Claim, Commitment, - CommittedExpression, CommittedExpressionMap, LurkCont, LurkPtr, NovaProofCache, Opening, Proof, - PtrEvaluation, + committed_expression_store, nova_proof_cache, Claim, Commitment, CommittedExpression, + CommittedExpressionMap, LurkCont, LurkPtr, NovaProofCache, Opening, Proof, PtrEvaluation, }; use lurk::circuit::circuit_frame::MultiFrame; use lurk::lurk_sym_ptr; @@ -41,10 +39,6 @@ use std::thread; const DEFAULT_REDUCTION_COUNT: usize = 10; -pub fn public_param_dir() -> Utf8PathBuf { - data_dir().join("public_params") -} - #[derive(Clone, Debug)] struct Demo { inputs: Vec, @@ -145,9 +139,7 @@ impl ReplTrait> for ClutchState> { let lang_rc = Arc::new(lang.clone()); let instance = Instance::new(reduction_count, lang_rc, true, Kind::NovaPublicParams); // Load params from disk cache, or generate them in the background. - thread::spawn(move || { - public_params::<_, _, MultiFrame<'_, _, Coproc<_>>>(&instance, &public_param_dir()) - }); + thread::spawn(move || public_params::<_, _, MultiFrame<'_, _, Coproc<_>>>(&instance)); Self { repl_state: ReplState::new(s, limit, command, lang), @@ -527,7 +519,7 @@ impl ClutchState> { true, Kind::NovaPublicParams, ); - let pp = public_params(&instance, &public_param_dir())?; + let pp = public_params(&instance)?; let proof = if rest.is_nil() { self.last_claim @@ -592,7 +584,7 @@ impl ClutchState> { true, Kind::NovaPublicParams, ); - let pp = public_params(&instance, &public_param_dir())?; + let pp = public_params(&instance)?; let result = proof.verify(&pp, &self.lang()).unwrap(); if result.verified { diff --git a/examples/circom.rs b/examples/circom.rs index 08c8666ca1..f644e17bc2 100644 --- a/examples/circom.rs +++ b/examples/circom.rs @@ -32,19 +32,20 @@ use std::marker::PhantomData; use std::sync::Arc; use std::time::Instant; -use lurk::circuit::circuit_frame::MultiFrame; use lurk::circuit::gadgets::circom::CircomGadget; use lurk::circuit::gadgets::pointer::AllocatedPtr; +use lurk::lem::multiframe::MultiFrame; #[cfg(not(target_arch = "wasm32"))] use lurk::coprocessor::circom::non_wasm::CircomCoprocessor; -use lurk::eval::{empty_sym_env, lang::Lang}; +use lurk::eval::lang::Lang; use lurk::field::LurkField; +use lurk::lem::{pointers::Ptr as LEMPtr, store::Store as LEMStore}; use lurk::proof::{nova::NovaProver, Prover}; use lurk::ptr::Ptr; use lurk::public_parameters::instance::{Instance, Kind}; -use lurk::public_parameters::{public_params, public_params_default_dir}; +use lurk::public_parameters::public_params; use lurk::store::Store; use lurk::{Num, Symbol}; use lurk_macros::Coproc; @@ -89,6 +90,16 @@ impl CircomGadget for CircomSha256 { ); s.intern_num(expected) } + + fn simple_evaluate_lem(&self, _s: &LEMStore, _args: &[LEMPtr]) -> LEMPtr { + // TODO: actually use the lurk inputs + LEMPtr::num( + F::from_str_vartime( + "55165702627807990590530466439275329993482327026534454077267643456", + ) + .unwrap(), + ) + } } #[derive(Clone, Debug, Coproc)] @@ -100,21 +111,14 @@ enum Sha256Coproc { /// `cargo run --release -- circom --name sha256_2 examples/sha256/` /// `cargo run --release --example circom` fn main() { - let store = &mut Store::::new(); + let store = &LEMStore::::default(); let sym_str = Symbol::new(&[".circom_sha256_2"], false); // two inputs - let circom_sha256 = CircomSha256::new(0); - let lang = Lang::>::new_with_bindings( - store, - vec![( - sym_str.clone(), - CircomCoprocessor::new(circom_sha256).into(), - )], - ); + let circom_sha256: CircomSha256 = CircomSha256::new(0); + let mut lang = Lang::>::new(); + lang.add_coprocessor_lem(sym_str, CircomCoprocessor::new(circom_sha256), store); - let coproc_expr = format!("{sym_str}"); - - let expr = format!("({coproc_expr})"); - let ptr = store.read(&expr).unwrap(); + let expr = "(.circom_sha256_2)".to_string(); + let ptr = store.read_with_default_state(&expr).unwrap(); let nova_prover = NovaProver::, MultiFrame<'_, _, _>>::new( REDUCTION_COUNT, @@ -131,8 +135,7 @@ fn main() { true, Kind::NovaPublicParams, ); - let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance, &public_params_default_dir()) - .unwrap(); + let pp = public_params::<_, _, MultiFrame<'_, _, _>>(&instance).unwrap(); let pp_end = pp_start.elapsed(); println!("Public parameters took {pp_end:?}"); @@ -141,7 +144,7 @@ fn main() { let proof_start = Instant::now(); let (proof, z0, zi, num_steps) = nova_prover - .evaluate_and_prove(&pp, ptr, empty_sym_env(store), store, 10000, &lang_rc) + .evaluate_and_prove(&pp, ptr, store.intern_nil(), store, 10000, &lang_rc) .unwrap(); let proof_end = proof_start.elapsed(); diff --git a/examples/sha256_ivc.rs b/examples/sha256_ivc.rs index b0452c9ba5..0e2e035883 100644 --- a/examples/sha256_ivc.rs +++ b/examples/sha256_ivc.rs @@ -12,7 +12,7 @@ use lurk::{ ptr::Ptr, public_parameters::{ instance::{Instance, Kind}, - public_params, public_params_default_dir, + public_params, }, state::user_sym, store::Store, @@ -88,7 +88,7 @@ fn main() { Kind::NovaPublicParams, ); // see the documentation on `with_public_params` - let pp = public_params(&instance, &public_params_default_dir()).unwrap(); + let pp = public_params(&instance).unwrap(); let pp_end = pp_start.elapsed(); println!("Public parameters took {:?}", pp_end); diff --git a/examples/sha256_nivc.rs b/examples/sha256_nivc.rs index 30818ef9d6..8c5f8ae25b 100644 --- a/examples/sha256_nivc.rs +++ b/examples/sha256_nivc.rs @@ -12,7 +12,7 @@ use lurk::{ ptr::Ptr, public_parameters::{ instance::{Instance, Kind}, - public_params_default_dir, supernova_public_params, + supernova_public_params, }, state::user_sym, store::Store, @@ -87,11 +87,7 @@ fn main() { true, Kind::SuperNovaAuxParams, ); - let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>( - &instance_primary, - &public_params_default_dir(), - ) - .unwrap(); + let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>(&instance_primary).unwrap(); let pp_end = pp_start.elapsed(); println!("Running claim parameters took {:?}", pp_end); diff --git a/examples/sha256_nivc_lem.rs b/examples/sha256_nivc_lem.rs index 4abfb29652..def64ef586 100644 --- a/examples/sha256_nivc_lem.rs +++ b/examples/sha256_nivc_lem.rs @@ -16,7 +16,7 @@ use lurk::{ proof::{supernova::SuperNovaProver, Prover}, public_parameters::{ instance::{Instance, Kind}, - public_params_default_dir, supernova_public_params, + supernova_public_params, }, state::user_sym, }; @@ -91,11 +91,7 @@ fn main() { true, Kind::SuperNovaAuxParams, ); - let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>( - &instance_primary, - &public_params_default_dir(), - ) - .unwrap(); + let pp = supernova_public_params::<_, _, MultiFrame<'_, _, _>>(&instance_primary).unwrap(); let pp_end = pp_start.elapsed(); println!("Running claim parameters took {:?}", pp_end); diff --git a/fcomm/src/bin/fcomm.rs b/fcomm/src/bin/fcomm.rs index 497cf286ed..41c3bedba8 100644 --- a/fcomm/src/bin/fcomm.rs +++ b/fcomm/src/bin/fcomm.rs @@ -1,4 +1,5 @@ use abomonation::Abomonation; +use fcomm::file_map::data_dir; use lurk::circuit::circuit_frame::MultiFrame; use lurk::lurk_sym_ptr; use lurk::proof::nova::{CurveCycleEquipped, G1, G2}; @@ -31,9 +32,9 @@ use clap::{Args, Parser, Subcommand}; use clap_verbosity_flag::{Verbosity, WarnLevel}; use fcomm::{ - committed_expression_store, error::Error, evaluate, file_map::FileStore, public_param_dir, - Claim, Commitment, CommittedExpression, Evaluation, Expression, LurkPtr, Opening, - OpeningRequest, Proof, ReductionCount, S1, + committed_expression_store, error::Error, evaluate, file_map::FileStore, Claim, Commitment, + CommittedExpression, Evaluation, Expression, LurkPtr, Opening, OpeningRequest, Proof, + ReductionCount, S1, }; use lurk::public_parameters::public_params; @@ -239,7 +240,7 @@ impl Open { ); let lang_rc = Arc::new(lang.clone()); let instance = Instance::new(rc.count(), lang_rc, true, Kind::NovaPublicParams); - let pp = public_params(&instance, &public_param_dir()).expect("public params"); + let pp = public_params(&instance).expect("public params"); let function_map = committed_expression_store(); let handle_proof = |out_path, proof: Proof<'_, S1>| { @@ -347,7 +348,7 @@ impl Prove { ); let lang_rc = Arc::new(lang.clone()); let instance = Instance::new(rc.count(), lang_rc.clone(), true, Kind::NovaPublicParams); - let pp = public_params(&instance, &public_param_dir()).unwrap(); + let pp = public_params(&instance).unwrap(); let proof = match &self.claim { Some(claim) => { @@ -399,7 +400,7 @@ impl Verify { true, Kind::NovaPublicParams, ); - let pp = public_params(&instance, &public_param_dir()).unwrap(); + let pp = public_params(&instance).unwrap(); let result = proof.verify(&pp, lang).unwrap(); serde_json::to_writer(io::stdout(), &result).unwrap(); @@ -536,6 +537,8 @@ fn main() { .with(EnvFilter::from_default_env()); tracing::subscriber::set_global_default(subscriber).unwrap(); + std::env::set_var("LURK_PUBLIC_PARAMS_DIR", data_dir().join("public_params")); + // TODO: make this properly configurable, e.g. allowing coprocessors let lang = Lang::new(); diff --git a/fcomm/src/lib.rs b/fcomm/src/lib.rs index d16b698572..09d282389c 100644 --- a/fcomm/src/lib.rs +++ b/fcomm/src/lib.rs @@ -41,7 +41,6 @@ use lurk_macros::serde_test; #[cfg(not(target_arch = "wasm32"))] use lurk::z_data; -use camino::Utf8PathBuf; use lurk::{error::ReductionError, proof::nova::CurveCycleEquipped}; use once_cell::sync::OnceCell; use pasta_curves::pallas; @@ -49,7 +48,7 @@ use rand::rngs::OsRng; use serde::de::DeserializeOwned; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use crate::file_map::{data_dir, FileMap}; +use crate::file_map::FileMap; pub mod error; pub mod file_map; @@ -86,10 +85,6 @@ pub fn committed_expression_store() -> CommittedExpressionMap { FileMap::, CommittedExpression>::new("committed_expressions").unwrap() } -pub fn public_param_dir() -> Utf8PathBuf { - data_dir().join("public_params") -} - // Number of circuit reductions per step, equivalent to `chunk_frame_count` #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)] pub enum ReductionCount { @@ -1190,10 +1185,15 @@ mod test { let tmp_dir = Builder::new().prefix("tmp").tempdir().expect("tmp dir"); let tmp_dir_path = Utf8Path::from_path(tmp_dir.path()).unwrap(); let fcomm_path_val = tmp_dir_path.join("fcomm_data"); - std::env::set_var(fcomm_path_key, fcomm_path_val.clone()); + + std::env::set_var(fcomm_path_key, &fcomm_path_val); + std::env::set_var( + "LURK_PUBLIC_PARAMS_DIR", + fcomm_path_val.join("public_params"), + ); assert_eq!( - std::env::var(fcomm_path_key), - Ok(fcomm_path_val.clone().into_string()) + std::env::var(fcomm_path_key).unwrap(), + fcomm_path_val.as_str() ); let function_source = "(letrec ((secret 12345) (a (lambda (acc x) (let ((acc (+ acc x))) (cons acc (hide secret (a acc))))))) (a 0))"; @@ -1210,8 +1210,7 @@ mod test { let lang_rc = Arc::new(lang.clone()); let rc = ReductionCount::One; let instance = Instance::new(rc.count(), lang_rc.clone(), true, Kind::NovaPublicParams); - let pp = - public_params(&instance, &fcomm_path_val.join("public_params")).expect("public params"); + let pp = public_params(&instance).expect("public params"); let chained = true; let s = &mut Store::::default(); diff --git a/src/circuit/circuit_frame.rs b/src/circuit/circuit_frame.rs index 1e0566f0f4..392ae4c83e 100644 --- a/src/circuit/circuit_frame.rs +++ b/src/circuit/circuit_frame.rs @@ -15,7 +15,7 @@ use crate::{ data::GlobalAllocations, pointer::{AllocatedContPtr, AllocatedPtr, AsAllocatedHashComponents}, }, - config::CONFIG, + config::lurk_config, eval::{empty_sym_env, lang::Lang}, field::LurkField, hash::HashConst, @@ -481,7 +481,13 @@ impl<'a, F: LurkField, C: Coprocessor> MultiFrame<'a, F, C> { frames: &[CircuitFrame<'_, F, C>], g: &GlobalAllocations, ) -> (AllocatedPtr, AllocatedPtr, AllocatedContPtr) { - if cs.is_witness_generator() && CONFIG.parallelism.synthesis.is_parallel() { + if cs.is_witness_generator() + && lurk_config(None, None) + .perf + .parallelism + .synthesis + .is_parallel() + { self.synthesize_frames_parallel(cs, store, input_expr, input_env, input_cont, frames, g) } else { self.synthesize_frames_sequential( @@ -586,14 +592,16 @@ impl<'a, F: LurkField, C: Coprocessor> MultiFrame<'a, F, C> { g: &GlobalAllocations, ) -> (AllocatedPtr, AllocatedPtr, AllocatedContPtr) { assert!(cs.is_witness_generator()); - assert!(CONFIG.parallelism.synthesis.is_parallel()); + let config = lurk_config(None, None); + assert!(config.perf.parallelism.synthesis.is_parallel()); // TODO: this probably belongs in config, perhaps per-Flow. const MIN_CHUNK_SIZE: usize = 10; let num_frames = frames.len(); - let chunk_size = CONFIG + let chunk_size = config + .perf .parallelism .synthesis .chunk_size(num_frames, MIN_CHUNK_SIZE); @@ -652,7 +660,7 @@ impl<'a, F: LurkField, C: Coprocessor> MultiFrame<'a, F, C> { }; } - if CONFIG.parallelism.poseidon_witnesses.is_parallel() { + if config.perf.parallelism.poseidon_witnesses.is_parallel() { chunk.par_iter().map(f!()).collect::>() } else { chunk.iter().map(f!()).collect::>() diff --git a/src/circuit/gadgets/circom/mod.rs b/src/circuit/gadgets/circom/mod.rs index c3912f898d..8f756ca66f 100644 --- a/src/circuit/gadgets/circom/mod.rs +++ b/src/circuit/gadgets/circom/mod.rs @@ -2,7 +2,12 @@ //! //! See `examples/circom.rs` for a quick example of how to declare a circom coprocessor. -use crate::{field::LurkField, ptr::Ptr, store::Store}; +use crate::{ + field::LurkField, + lem::{pointers::Ptr as LEMPtr, store::Store as LEMStore}, + ptr::Ptr, + store::Store, +}; use super::pointer::AllocatedPtr; @@ -23,4 +28,6 @@ pub trait CircomGadget: Send + Sync + Clone { fn into_circom_input(self, input: &[AllocatedPtr]) -> Vec<(String, Vec)>; fn simple_evaluate(&self, s: &Store, args: &[Ptr]) -> Ptr; + + fn simple_evaluate_lem(&self, s: &LEMStore, args: &[LEMPtr]) -> LEMPtr; } diff --git a/src/circuit/gadgets/hashes.rs b/src/circuit/gadgets/hashes.rs index 38f3bb3c46..5020fdc95d 100644 --- a/src/circuit/gadgets/hashes.rs +++ b/src/circuit/gadgets/hashes.rs @@ -8,7 +8,7 @@ use neptune::circuit2_witness::{poseidon_hash_allocated_witness, poseidon_hash_s use crate::circuit::gadgets::pointer::{AllocatedPtr, AsAllocatedHashComponents}; -use crate::config::CONFIG; +use crate::config::lurk_config; use crate::field::{FWrap, LurkField}; use crate::hash::{HashConst, HashConstants}; use crate::hash_witness::{ @@ -280,12 +280,16 @@ impl<'a, F: LurkField> AllocatedConsWitness<'a, F> { let names_and_ptrs = cons_circuit_witness.names_and_ptrs(s); let cons_constants: HashConst<'_, F> = s.poseidon_constants().constants(4.into()); - let circuit_witness_blocks = - if cs.is_witness_generator() && CONFIG.witness_generation.precompute_neptune { - Some(cons_circuit_witness.circuit_witness_blocks(s, cons_constants)) - } else { - None - }; + let circuit_witness_blocks = if cs.is_witness_generator() + && lurk_config(None, None) + .perf + .witness_generation + .precompute_neptune + { + Some(cons_circuit_witness.circuit_witness_blocks(s, cons_constants)) + } else { + None + }; for (i, (name, spr)) in names_and_ptrs.iter().enumerate() { let cs = &mut cs.namespace(|| format!("slot-{i}")); @@ -403,12 +407,16 @@ impl<'a, F: LurkField> AllocatedContWitness<'a, F> { let names_and_ptrs = cont_circuit_witness.names_and_ptrs(s); let cont_constants: HashConst<'_, F> = s.poseidon_constants().constants(8.into()); - let circuit_witness_blocks = - if cs.is_witness_generator() && CONFIG.witness_generation.precompute_neptune { - Some(cont_circuit_witness.circuit_witness_blocks(s, cont_constants)) - } else { - None - }; + let circuit_witness_blocks = if cs.is_witness_generator() + && lurk_config(None, None) + .perf + .witness_generation + .precompute_neptune + { + Some(cont_circuit_witness.circuit_witness_blocks(s, cont_constants)) + } else { + None + }; for (i, (name, spr)) in names_and_ptrs.iter().enumerate() { let cs = &mut cs.namespace(|| format!("slot-{i}")); diff --git a/src/cli/backend.rs b/src/cli/backend.rs index a2bc397805..77f7e69137 100644 --- a/src/cli/backend.rs +++ b/src/cli/backend.rs @@ -1,8 +1,12 @@ use anyhow::{bail, Result}; +use clap::ValueEnum; +use serde::Deserialize; use crate::field::LanguageField; +#[derive(Clone, Default, Debug, Deserialize, ValueEnum, PartialEq, Eq)] pub enum Backend { + #[default] Nova, } @@ -15,12 +19,6 @@ impl std::fmt::Display for Backend { } impl Backend { - pub(crate) fn default_field(&self) -> LanguageField { - match self { - Self::Nova => LanguageField::Pallas, - } - } - fn compatible_fields(&self) -> Vec { use LanguageField::{Pallas, Vesta}; match self { diff --git a/src/cli/config.rs b/src/cli/config.rs new file mode 100644 index 0000000000..7d2cdc5acc --- /dev/null +++ b/src/cli/config.rs @@ -0,0 +1,181 @@ +//! Global config for the CLI +//! Includes settings for cache locations and proof backend +use std::collections::HashMap; + +use crate::config::{lurk_config_file, Settings, LURK_CONFIG}; +use crate::field::LanguageField; +use camino::Utf8PathBuf; +use config::{Config, ConfigError, Environment, File}; +use once_cell::sync::OnceCell; +use serde::Deserialize; + +use super::backend::Backend; +use super::paths::{circom_default_dir, commits_default_dir, proofs_default_dir}; + +/// Global config varable for `CliSettings` +pub static CLI_CONFIG: OnceCell = OnceCell::new(); + +/// Gets the `CLI_CONFIG` settings. If uninitialized, sets the global variable +/// in the following order (greatest to least precedence): +/// - `settings` map if provided, e.g. with key ("proofs", "$HOME/lurk-rs/proofs") +/// This contains any CLI args, set e.g. by `lurk --proofs-dir /path/to/proofs_dir` +/// - Env var per setting, e.g. `LURK_PROOFS_DIR` +/// - Config file, which also has a configurable location (see `lurk_config_file()`), +/// and has the following syntax for e.g. TOML: +/// ```toml +/// proofs_dir = "/path/to/proofs_dir" +/// ``` +/// Other file formats are supported by the `config` crate, but only TOML is tested +/// - Default values, e.g. `$HOME/.lurk/proofs` +pub fn cli_config( + config_file: Option<&Utf8PathBuf>, + settings: Option<&HashMap<&str, String>>, +) -> &'static CliSettings { + LURK_CONFIG + .set(Settings::from_config(lurk_config_file(config_file), settings).unwrap_or_default()) + .unwrap_or(()); + CLI_CONFIG.get_or_init(|| { + CliSettings::from_config(lurk_config_file(config_file), settings).unwrap_or_default() + }) +} + +/// Contains the CLI configuration settings +// NOTE: Config settings share the same file for both the Lurk library and the CLI. +// It's good practice to avoid duplication of shared settings like `public_params_dir` +// in downstream configs like these to prevent conflicts. +#[derive(Debug, Deserialize)] +pub struct CliSettings { + /// Cache directory for proofs + pub proofs_dir: Utf8PathBuf, + /// Cache directory for commitments + pub commits_dir: Utf8PathBuf, + /// Cache directory for Circom files + pub circom_dir: Utf8PathBuf, + /// Proof generation and verification system + pub backend: Backend, + /// Finite field used for evaluation and proving + pub field: LanguageField, + /// Reduction count, which is the number of circuit reductions per step + pub rc: usize, + /// Iteration limit for the program, which is arbitrary to user preferences + /// Used mainly as a safety check, similar to default stack size + pub limit: usize, +} + +impl CliSettings { + /// Loads config settings from a file or env var, or CLI arg if applicable + pub fn from_config( + config_file: &Utf8PathBuf, + cli_settings: Option<&HashMap<&str, String>>, + ) -> Result { + let (proofs, commits, circom, backend, field, rc, limit) = ( + "proofs_dir", + "commits_dir", + "circom_dir", + "backend", + "field", + "rc", + "limit", + ); + Config::builder() + .set_default(proofs, proofs_default_dir().to_string())? + .set_default(commits, commits_default_dir().to_string())? + .set_default(circom, circom_default_dir().to_string())? + .set_default(backend, Backend::default().to_string())? + .set_default(field, LanguageField::default().to_string())? + .set_default(rc, 10)? + .set_default(limit, 100_000_000)? + .add_source(File::with_name(config_file.as_str()).required(false)) + // Then overwrite with any `LURK` environment variables + .add_source(Environment::with_prefix("LURK")) + // TODO: Derive config::Source for `cli_settings` and use `add_source` instead + .set_override_option(proofs, cli_settings.and_then(|s| s.get(proofs).map(|v| v.to_owned())))? + .set_override_option(commits, cli_settings.and_then(|s| s.get(commits).map(|v| v.to_owned())))? + .set_override_option(circom, cli_settings.and_then(|s| s.get(circom).map(|v| v.to_owned())))? + .set_override_option(backend, cli_settings.and_then(|s| s.get(backend).map(|v| v.to_owned())))? + .set_override_option(field, cli_settings.and_then(|s| s.get(field).map(|v| v.to_owned())))? + .set_override_option(rc, cli_settings.and_then(|s| s.get(rc).map(|v| v.to_owned())))? + .set_override_option(limit, cli_settings.and_then(|s| s.get(limit).map(|v| v.to_owned())))? + .build() + .and_then(|c| c.try_deserialize()) + } +} + +impl Default for CliSettings { + fn default() -> Self { + Self { + proofs_dir: proofs_default_dir(), + commits_dir: commits_default_dir(), + circom_dir: circom_default_dir(), + backend: Backend::default(), + field: LanguageField::default(), + rc: 10, + limit: 100_000_000, + } + } +} + +#[cfg(test)] +mod tests { + use camino::Utf8Path; + use std::io::prelude::*; + use tempfile::Builder; + + use crate::cli::backend::Backend; + use crate::cli::config::CliSettings; + use crate::config::Settings; + use crate::field::LanguageField; + + // Tests a generic config file with identical syntax to that used in `CLI_CONFIG` + #[test] + fn test_config_cli() { + let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); + let tmp_dir = Utf8Path::from_path(tmp_dir.path()).unwrap(); + let config_dir = tmp_dir.join("lurk.toml"); + let public_params_dir = tmp_dir.join("public_params").into_string(); + let proofs_dir = tmp_dir.join("proofs").into_string(); + let commits_dir = tmp_dir.join("commits").into_string(); + let circom_dir = tmp_dir.join("circom").into_string(); + let backend = "Nova"; + let field = "Pallas"; + let rc = 100; + let limit = 100_000; + + let mut config_file = std::fs::File::create(config_dir.clone()).unwrap(); + config_file + .write_all(format!("public_params_dir = \"{public_params_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("proofs_dir = \"{proofs_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("commits_dir = \"{commits_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("circom_dir = \"{circom_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("backend = \"{backend}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("field = \"{field}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("rc = {rc}\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("limit = {limit}\n").as_bytes()) + .unwrap(); + + let cli_config = CliSettings::from_config(&config_dir, None).unwrap(); + let lurk_config = Settings::from_config(&config_dir, None).unwrap(); + assert_eq!(lurk_config.public_params_dir, public_params_dir); + assert_eq!(cli_config.proofs_dir, proofs_dir); + assert_eq!(cli_config.commits_dir, commits_dir); + assert_eq!(cli_config.circom_dir, circom_dir); + assert_eq!(cli_config.backend, Backend::Nova); + assert_eq!(cli_config.field, LanguageField::Pallas); + assert_eq!(cli_config.rc, rc); + assert_eq!(cli_config.limit, limit); + } +} diff --git a/src/cli/lurk_proof.rs b/src/cli/lurk_proof.rs index 04a830e68d..f1e085b6da 100644 --- a/src/cli/lurk_proof.rs +++ b/src/cli/lurk_proof.rs @@ -26,7 +26,7 @@ use crate::{ use super::{ field_data::{dump, load, HasFieldModulus}, - paths::{proof_meta_path, proof_path, public_params_dir}, + paths::{proof_meta_path, proof_path}, zstore::ZStore, }; @@ -144,7 +144,7 @@ where } => { tracing::info!("Loading public parameters"); let instance = Instance::new(rc, Arc::new(lang), true, Kind::NovaPublicParams); - let pp = public_params(&instance, &public_params_dir())?; + let pp = public_params(&instance)?; Ok(proof.verify(&pp, num_steps, &public_inputs, &public_outputs)?) } } diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 9f77fb9476..87fb93fa25 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -1,6 +1,7 @@ pub mod backend; mod circom; mod commitment; +pub mod config; mod field_data; mod lurk_proof; pub mod paths; @@ -10,7 +11,6 @@ mod zstore; use anyhow::{bail, Context, Result}; use camino::Utf8PathBuf; use clap::{Args, Parser, Subcommand}; -use config::{Config, Environment, File}; use pasta_curves::pallas; use std::{ @@ -24,20 +24,19 @@ use crate::{ eval::lang::Coproc, field::{LanguageField, LurkField}, lem::{multiframe::MultiFrame, store::Store}, + public_parameters::disk_cache::public_params_dir, public_parameters::instance::Metadata, }; use crate::cli::{ - paths::set_lurk_dirs, + backend::Backend, + config::cli_config, + paths::create_lurk_dirs, repl::{validate_non_zero, Repl}, zstore::ZStore, }; -use self::{backend::Backend, field_data::load, paths::public_params_dir}; - -const DEFAULT_LIMIT: usize = 100_000_000; -const DEFAULT_RC: usize = 10; -const DEFAULT_BACKEND: Backend = Backend::Nova; +use self::field_data::load; #[derive(Parser, Debug)] #[clap(version)] @@ -89,12 +88,12 @@ struct LoadArgs { limit: Option, /// Prover backend (defaults to "Nova") - #[clap(long, value_parser)] - backend: Option, + #[clap(long, value_enum)] + backend: Option, /// Arithmetic field (defaults to the backend's standard field) - #[clap(long, value_parser)] - field: Option, + #[clap(long, value_enum)] + field: Option, /// Path to public parameters directory #[clap(long, value_parser)] @@ -133,11 +132,11 @@ struct LoadCli { #[clap(long, value_parser)] limit: Option, - #[clap(long, value_parser)] - backend: Option, + #[clap(long, value_enum)] + backend: Option, - #[clap(long, value_parser)] - field: Option, + #[clap(long, value_enum)] + field: Option, #[clap(long, value_parser)] public_params_dir: Option, @@ -173,14 +172,14 @@ impl LoadArgs { #[derive(Args, Debug)] struct ReplArgs { - /// ZStore to be preloaded before entering the REPL (and loading a file) - #[clap(long, value_parser)] - zstore: Option, - /// Optional file to be loaded before entering the REPL #[clap(long, value_parser)] load: Option, + /// ZStore to be preloaded before entering the REPL (and loading a file) + #[clap(long, value_parser)] + zstore: Option, + /// Config file, containing the lowest precedence parameters #[clap(long, value_parser)] config: Option, @@ -194,12 +193,12 @@ struct ReplArgs { limit: Option, /// Prover backend (defaults to "Nova") - #[clap(long, value_parser)] - backend: Option, + #[clap(long, value_enum)] + backend: Option, /// Arithmetic field (defaults to the backend's standard field) - #[clap(long, value_parser)] - field: Option, + #[clap(long, value_enum)] + field: Option, /// Path to public parameters directory #[clap(long, value_parser)] @@ -235,11 +234,11 @@ struct ReplCli { #[clap(long, value_parser)] limit: Option, - #[clap(long, value_parser)] - backend: Option, + #[clap(long, value_enum)] + backend: Option, - #[clap(long, value_parser)] - field: Option, + #[clap(long, value_enum)] + field: Option, #[clap(long, value_parser)] public_params_dir: Option, @@ -272,21 +271,6 @@ impl ReplArgs { } } -fn parse_backend(backend_str: &String) -> Result { - match backend_str.to_lowercase().as_str() { - "nova" => Ok(Backend::Nova), - _ => bail!("Backend not supported: {backend_str}"), - } -} - -fn parse_field(field_str: &String) -> Result { - match field_str.to_lowercase().as_str() { - "pallas" => Ok(LanguageField::Pallas), - "vesta" => Ok(LanguageField::Vesta), - _ => bail!("Field not supported: {field_str}"), - } -} - fn parse_filename(file: &str) -> Result { if file == "help" { bail!("help is not a valid filename. printing help console instead"); @@ -295,50 +279,6 @@ fn parse_filename(file: &str) -> Result { Ok(path) } -fn get_parsed_usize( - param_name: &str, - arg: &Option, - config: &HashMap, - default: usize, -) -> Result { - match arg { - Some(arg) => Ok(*arg), - None => match config.get(param_name) { - None => Ok(default), - Some(arg_str) => Ok(arg_str.parse::()?), - }, - } -} - -fn get_parsed( - param_name: &str, - arg: &Option, - config: &HashMap, - parse_fn: fn(&String) -> Result, - default: T, -) -> Result { - match arg { - Some(arg) => parse_fn(arg), - None => match config.get(param_name) { - None => Ok(default), - Some(arg) => parse_fn(arg), - }, - } -} - -pub fn get_config(config_path: &Option) -> Result> { - // First load from the config file - let builder = match config_path { - Some(config_path) if config_path.exists() => { - Config::builder().add_source(File::with_name(config_path.as_str())) - } - _ => Config::builder(), - }; - // Then potentially overwrite with environment variables - let builder = builder.add_source(Environment::with_prefix("LURK")); - Ok(builder.build()?.try_deserialize()?) -} - fn get_store serde::de::Deserialize<'a>>( z_store_path: &Option, ) -> Result> { @@ -369,35 +309,41 @@ impl ReplCli { repl.start() }}; } - let config = get_config(&self.config)?; - tracing::info!("Configured variables: {:?}", config); - set_lurk_dirs( - &config, - &self.public_params_dir, - &self.proofs_dir, - &self.commits_dir, - &self.circom_dir, + macro_rules! map_insert { + ( $map:expr, $( $field:ident ),* ) => { + $( + if let Some(val) = &self.$field { + $map.insert(stringify!($field), val.to_string()); + } + )* + }; + } + let mut cli_settings: HashMap<&str, String> = HashMap::new(); + map_insert!( + &mut cli_settings, + public_params_dir, + proofs_dir, + commits_dir, + circom_dir, + backend, + field, + rc, + limit ); - let rc = get_parsed_usize("rc", &self.rc, &config, DEFAULT_RC)?; - let limit = get_parsed_usize("limit", &self.limit, &config, DEFAULT_LIMIT)?; - let backend = get_parsed( - "backend", - &self.backend, - &config, - parse_backend, - DEFAULT_BACKEND, - )?; - let field = get_parsed( - "field", - &self.field, - &config, - parse_field, - backend.default_field(), - )?; + + // Initializes CLI config with CLI arguments as overrides + let config = cli_config(self.config.as_ref(), Some(&cli_settings)); + + create_lurk_dirs().unwrap(); + + let rc = config.rc; + let limit = config.limit; + let backend = &config.backend; + let field = &config.field; validate_non_zero("rc", rc)?; - backend.validate_field(&field)?; + backend.validate_field(field)?; match field { - LanguageField::Pallas => repl!(rc, limit, pallas::Scalar, backend), + LanguageField::Pallas => repl!(rc, limit, pallas::Scalar, backend.clone()), // LanguageField::Vesta => repl!(rc, limit, vesta::Scalar, backend), LanguageField::Vesta => todo!(), LanguageField::BLS12_381 => todo!(), @@ -419,35 +365,41 @@ impl LoadCli { Ok(()) }}; } - let config = get_config(&self.config)?; - tracing::info!("Configured variables: {:?}", config); - set_lurk_dirs( - &config, - &self.public_params_dir, - &self.proofs_dir, - &self.commits_dir, - &self.circom_dir, + macro_rules! map_insert { + ( $map:expr, $( $field:ident ),* ) => { + $( + if let Some(val) = &self.$field { + $map.insert(stringify!($field), val.to_string()); + } + )* + }; + } + let mut cli_settings: HashMap<&str, String> = HashMap::new(); + map_insert!( + &mut cli_settings, + public_params_dir, + proofs_dir, + commits_dir, + circom_dir, + backend, + field, + rc, + limit ); - let rc = get_parsed_usize("rc", &self.rc, &config, DEFAULT_RC)?; - let limit = get_parsed_usize("limit", &self.limit, &config, DEFAULT_LIMIT)?; - let backend = get_parsed( - "backend", - &self.backend, - &config, - parse_backend, - DEFAULT_BACKEND, - )?; - let field = get_parsed( - "field", - &self.field, - &config, - parse_field, - backend.default_field(), - )?; + + // Initializes CLI config with CLI arguments as overrides + let config = cli_config(self.config.as_ref(), Some(&cli_settings)); + + create_lurk_dirs().unwrap(); + + let rc = config.rc; + let limit = config.limit; + let backend = &config.backend; + let field = &config.field; validate_non_zero("rc", rc)?; - backend.validate_field(&field)?; + backend.validate_field(field)?; match field { - LanguageField::Pallas => load!(rc, limit, pallas::Scalar, backend), + LanguageField::Pallas => load!(rc, limit, pallas::Scalar, backend.clone()), // LanguageField::Vesta => load!(rc, limit, vesta::Scalar, backend), LanguageField::Vesta => todo!(), LanguageField::BLS12_381 => todo!(), @@ -463,10 +415,6 @@ struct VerifyArgs { #[clap(value_parser)] proof_id: String, - /// Config file, containing the lowest precedence parameters - #[clap(long, value_parser)] - config: Option, - /// Path to public parameters directory #[clap(long, value_parser)] public_params_dir: Option, @@ -474,6 +422,10 @@ struct VerifyArgs { /// Path to proofs directory #[clap(long, value_parser)] proofs_dir: Option, + + /// Config file, containing the lowest precedence parameters + #[clap(long, value_parser)] + config: Option, } /// To setup a new circom gadget ``, place your circom files in a designated folder and @@ -495,13 +447,13 @@ struct CircomArgs { #[clap(long, value_parser)] name: String, - /// Config file, containing the lowest precedence parameters - #[clap(long, value_parser)] - config: Option, - /// Path to circom directory #[clap(long, value_parser)] circom_dir: Option, + + /// Config file, containing the lowest precedence parameters + #[clap(long, value_parser)] + config: Option, } #[derive(Args, Debug)] @@ -509,23 +461,26 @@ struct PublicParamArgs { /// Lists all the cached params #[arg(long)] list: bool, + /// Clears everything #[arg(long)] clean: bool, + /// Remove specified params from cache #[clap(long, value_parser)] remove: Option, + /// Show specified params configurations #[clap(long, value_parser)] show: Option, - /// Config file, containing the lowest precedence parameters - #[clap(long, value_parser)] - config: Option, - /// Path to public params directory #[clap(long, value_parser)] public_params_dir: Option, + + /// Config file, containing the lowest precedence parameters + #[clap(long, value_parser)] + config: Option, } impl PublicParamArgs { @@ -609,15 +564,15 @@ impl Cli { #[allow(unused_variables)] Command::Verify(verify_args) => { use crate::cli::lurk_proof::LurkProof; - let config = get_config(&verify_args.config)?; - tracing::info!("Configured variables: {:?}", config); - set_lurk_dirs( - &config, - &verify_args.public_params_dir, - &verify_args.proofs_dir, - &None, - &None, - ); + let mut cli_settings = HashMap::new(); + if let Some(dir) = verify_args.public_params_dir { + cli_settings.insert("public_params_dir", dir.to_string()); + } + if let Some(dir) = verify_args.proofs_dir { + cli_settings.insert("proofs_dir", dir.to_string()); + } + cli_config(verify_args.config.as_ref(), Some(&cli_settings)); + LurkProof::<_, _, MultiFrame<'_, _, Coproc>>::verify_proof( &verify_args.proof_id, )?; @@ -628,24 +583,24 @@ impl Cli { if circom_args.name == "main" { bail!("Circom gadget name cannot be `main`, see circom documentation") } - - let config = get_config(&circom_args.config)?; - tracing::info!("Configured variables: {:?}", config); - set_lurk_dirs(&config, &None, &None, &None, &circom_args.circom_dir); + let mut cli_settings = HashMap::new(); + if let Some(dir) = circom_args.circom_dir { + cli_settings.insert("circom_dir", dir.to_string()); + } + cli_config(circom_args.config.as_ref(), Some(&cli_settings)); create_circom_gadget(circom_args.circom_folder, circom_args.name)?; Ok(()) } Command::PublicParams(public_params_args) => { - let config = get_config(&public_params_args.config)?; - tracing::info!("Configured variables: {:?}", config); - set_lurk_dirs( - &config, - &public_params_args.public_params_dir, - &None, - &None, - &None, - ); + let mut cli_settings = HashMap::new(); + if let Some(dir) = public_params_args.public_params_dir.clone() { + cli_settings.insert("public_params_dir", dir.to_string()); + } + + cli_config(public_params_args.config.as_ref(), Some(&cli_settings)); + + create_lurk_dirs().unwrap(); public_params_args.run()?; Ok(()) } diff --git a/src/cli/paths.rs b/src/cli/paths.rs index 5469ded3aa..683c00a011 100644 --- a/src/cli/paths.rs +++ b/src/cli/paths.rs @@ -1,20 +1,10 @@ use anyhow::Result; use camino::{Utf8Path, Utf8PathBuf}; -use once_cell::sync::OnceCell; -use std::collections::HashMap; use std::fs; -use crate::public_parameters::public_params_default_dir; - -pub(crate) static LURK_DIRS: OnceCell = OnceCell::new(); - -pub(crate) struct LurkDirs { - public_params: Utf8PathBuf, - proofs: Utf8PathBuf, - commits: Utf8PathBuf, - circom: Utf8PathBuf, -} +use crate::cli::config::cli_config; +use crate::public_parameters::disk_cache::public_params_dir; #[cfg(not(target_arch = "wasm32"))] fn home_dir() -> Utf8PathBuf { @@ -23,81 +13,40 @@ fn home_dir() -> Utf8PathBuf { } #[cfg(not(target_arch = "wasm32"))] -fn lurk_dir() -> Utf8PathBuf { +pub fn lurk_default_dir() -> Utf8PathBuf { home_dir().join(Utf8Path::new(".lurk")) } #[cfg(target_arch = "wasm32")] -pub(crate) fn lurk_dir() -> Utf8PathBuf { +pub fn lurk_default_dir() -> Utf8PathBuf { Utf8PathBuf::from(".lurk") } -#[cfg(not(target_arch = "wasm32"))] -pub(crate) fn proofs_default_dir() -> Utf8PathBuf { - let home = home::home_dir().unwrap(); - Utf8PathBuf::from_path_buf(home.join(".lurk/proofs")).expect("path contains invalid Unicode") -} - -#[cfg(target_arch = "wasm32")] pub(crate) fn proofs_default_dir() -> Utf8PathBuf { - Utf8PathBuf::from(".lurk/public_params") -} - -#[cfg(not(target_arch = "wasm32"))] -pub(crate) fn commits_default_dir() -> Utf8PathBuf { - let home = home::home_dir().unwrap(); - Utf8PathBuf::from_path_buf(home.join(".lurk/commits")).expect("path contains invalid Unicode") + lurk_default_dir().join("proofs") } -#[cfg(target_arch = "wasm32")] pub(crate) fn commits_default_dir() -> Utf8PathBuf { - Utf8PathBuf::from(".lurk/commits") -} - -#[cfg(not(target_arch = "wasm32"))] -pub(crate) fn circom_default_dir() -> Utf8PathBuf { - let home = home::home_dir().unwrap(); - Utf8PathBuf::from_path_buf(home.join(".lurk/circom")).expect("path contains invalid Unicode") + lurk_default_dir().join("commits") } -#[cfg(target_arch = "wasm32")] pub(crate) fn circom_default_dir() -> Utf8PathBuf { - Utf8PathBuf::from(".lurk/circom") -} - -pub(crate) fn public_params_dir() -> Utf8PathBuf { - LURK_DIRS - .get() - .expect("failed to initialize beforehand with `set_lurk_dirs()`") - .public_params - .to_owned() + lurk_default_dir().join("circom") } -pub(crate) fn proofs_dir() -> Utf8PathBuf { - LURK_DIRS - .get() - .expect("failed to initialize beforehand with `set_lurk_dirs()`") - .proofs - .to_owned() +pub(crate) fn proofs_dir() -> &'static Utf8PathBuf { + &cli_config(None, None).proofs_dir } -pub(crate) fn commits_dir() -> Utf8PathBuf { - LURK_DIRS - .get() - .expect("failed to initialize beforehand with `set_lurk_dirs()`") - .commits - .to_owned() +pub(crate) fn commits_dir() -> &'static Utf8PathBuf { + &cli_config(None, None).commits_dir } -pub(crate) fn circom_dir() -> Utf8PathBuf { - LURK_DIRS - .get() - .expect("failed to initialize beforehand with `set_lurk_dirs()`") - .circom - .to_owned() +pub(crate) fn circom_dir() -> &'static Utf8PathBuf { + &cli_config(None, None).circom_dir } -fn lurk_leaf_dirs() -> [Utf8PathBuf; 4] { +fn lurk_leaf_dirs() -> [&'static Utf8PathBuf; 4] { [ proofs_dir(), commits_dir(), @@ -106,41 +55,9 @@ fn lurk_leaf_dirs() -> [Utf8PathBuf; 4] { ] } -pub(crate) fn set_lurk_dirs( - config: &HashMap, - public_params_dir: &Option, - proofs_dir: &Option, - commits_dir: &Option, - circom_dir: &Option, -) { - let get_path = |given_path: &Option, config_key: &str, default: Utf8PathBuf| { - given_path.clone().unwrap_or_else(|| { - config - .get(config_key) - .map_or_else(|| default, Utf8PathBuf::from) - }) - }; - - let public_params = get_path( - public_params_dir, - "public_params", - public_params_default_dir(), - ); - let proofs = get_path(proofs_dir, "proofs", proofs_default_dir()); - let commits = get_path(commits_dir, "commits", commits_default_dir()); - let circom = get_path(circom_dir, "circom", circom_default_dir()); - - LURK_DIRS.get_or_init(|| LurkDirs { - public_params, - proofs, - commits, - circom, - }); - - create_lurk_dirs().unwrap(); -} - -/// Must call this function after setting `LURK_DIRS` via the `set_lurk_dirs()` function +// Creates dirs for public params, proofs, commits, and circom +// NOTE: call this function after `cli_config()` or `lurk_config()` if non-default +// config settings are desired, as it will initialize them if unset pub(crate) fn create_lurk_dirs() -> Result<()> { for dir in lurk_leaf_dirs() { fs::create_dir_all(dir)?; @@ -150,7 +67,7 @@ pub(crate) fn create_lurk_dirs() -> Result<()> { // Not currently configurable pub(crate) fn repl_history() -> Utf8PathBuf { - lurk_dir().join(Utf8Path::new("repl-history")) + lurk_default_dir().join(Utf8Path::new("repl-history")) } pub(crate) fn commitment_path(name: &str) -> Utf8PathBuf { diff --git a/src/cli/repl.rs b/src/cli/repl.rs index 813cd469b3..16560b24ea 100644 --- a/src/cli/repl.rs +++ b/src/cli/repl.rs @@ -13,7 +13,7 @@ use std::{cell::RefCell, collections::HashMap, fs::read_to_string, rc::Rc, sync: use tracing::info; use crate::{ - cli::paths::{proof_path, public_params_dir}, + cli::paths::proof_path, eval::lang::{Coproc, Lang}, field::LurkField, lem::{ @@ -244,7 +244,7 @@ impl Repl { info!("Loading public parameters"); let instance = Instance::new(self.rc, self.lang.clone(), true, Kind::NovaPublicParams); - let pp = public_params(&instance, &public_params_dir())?; + let pp = public_params(&instance)?; let prover = NovaProver::, MultiFrame<'_, F, Coproc>>::new( self.rc, diff --git a/src/config.rs b/src/config.rs index 645ff8b571..aa0a86d52f 100644 --- a/src/config.rs +++ b/src/config.rs @@ -1,80 +1,127 @@ -//! Global config for parallelism. -use anyhow::bail; -use once_cell::sync::Lazy; +//! Global config for Lurk +//! Includes settings for cache locations, public parameters, and parallelism. +use std::collections::HashMap; -pub static CONFIG: Lazy = Lazy::new(init_config); +use camino::Utf8PathBuf; +use config::{Config, ConfigError, Environment, File}; +use once_cell::sync::OnceCell; +use serde::Deserialize; -fn canned_config_from_env() -> Option { - if let Ok(x) = std::env::var("LURK_CANNED_CONFIG") { - let canned = CannedConfig::try_from(x.as_str()).ok(); +use crate::cli::paths::lurk_default_dir; - tracing::debug!("{:?}", &canned); +/// Global config variable for `Settings` +pub static LURK_CONFIG: OnceCell = OnceCell::new(); - canned - } else { - None - } +/// Global path variable for configuration file +pub static LURK_CONFIG_FILE: OnceCell = OnceCell::new(); + +/// Gets the `LURK_CONFIG` settings. If uninitialized, sets the global variable +/// in the following order (greatest to least precedence): +/// - `settings` map if provided, e.g. with key ("public_params_dir", "$HOME/lurk-rs/public_params") +/// - Env var per setting, e.g. `LURK_PUBLIC_PARAMS_DIR` +/// - Config file, which also has a configurable location (see `lurk_config_file()`), +/// and has the following syntax for e.g. TOML: +/// ```toml +/// public_params_dir = "/path/to/params" +/// ``` +/// Other file formats are supported by the `config` crate, but only TOML is tested +/// - Default values, e.g. `$HOME/.lurk/public_params` +pub fn lurk_config( + file: Option<&Utf8PathBuf>, + settings: Option<&HashMap<&str, String>>, +) -> &'static Settings { + LURK_CONFIG + .get_or_init(|| Settings::from_config(lurk_config_file(file), settings).unwrap_or_default()) } -#[derive(Default, Debug)] -pub enum Flow { - #[default] - Sequential, - Parallel, // Try to be smart. - ParallelN(usize), // How many threads to use? (Advisory, might be ignored.) +/// Gets the `LURK_CONFIG_FILE` path. If uninitialized, sets the global variable +/// in the following order (greatest to least precedence): +/// - `config_file` parameter if provided, e.g. "$HOME/lurk-rs/lurk-local.toml" +/// - `LURK_CONFIG_FILE` env var +/// - Default location at `$HOME/.lurk/lurk.toml` or `/.config/lurk.toml` on WASM. +pub fn lurk_config_file(config_file: Option<&Utf8PathBuf>) -> &'static Utf8PathBuf { + LURK_CONFIG_FILE.get_or_init(|| { + if let Some(file) = config_file { + file.clone() + } else if let Ok(file) = std::env::var("LURK_CONFIG_FILE") { + Utf8PathBuf::from(file) + } else { + lurk_default_dir().join("lurk.toml") + } + }) } -impl Flow { - pub fn is_sequential(&self) -> bool { - matches!(self, Self::Sequential) - } +/// Contains the Lurk config settings +/// The `public_params_dir` setting can also be overriden by a CLI arg if in use +#[derive(Debug, Deserialize, PartialEq, Eq)] +pub struct Settings { + /// Public parameter disk cache location + pub public_params_dir: Utf8PathBuf, - pub fn is_parallel(&self) -> bool { - !self.is_sequential() - } + /// Parallelism & witness gen configs + pub perf: PerfConfig, +} - pub fn num_threads(&self) -> usize { - match self { - Self::Sequential => 1, - Self::Parallel => num_cpus::get(), - Self::ParallelN(threads) => *threads, - } +impl Settings { + /// Loads config settings from a file or env vars + /// The public parameter disk cache can also be overriden by a CLI arg if applicable + pub fn from_config( + config_file: &Utf8PathBuf, + settings: Option<&HashMap<&str, String>>, + ) -> Result { + let public_params = "public_params_dir"; + // Settings are read first to last, in order of increasing precedence. + // Hence, default values must come first so they are overriden by all other methods. + Config::builder() + // Default settings if unspecified in the config file + .set_default(public_params, public_params_default_dir().to_string())? + .set_default("perf", "fully-sequential".to_string())? + .add_source(File::with_name(config_file.as_str()).required(false)) + // Then override with any `LURK` environment variables + .add_source(Environment::with_prefix("LURK")) + // Optionally override if settings were specified via CLI arg + .set_override_option(public_params, settings.and_then(|s| s.get(public_params).map(|v| v.to_owned())))? + .build() + .and_then(|c| c.try_deserialize()) } +} - pub fn chunk_size(&self, total_n: usize, min_chunk_size: usize) -> usize { - if self.is_sequential() { - total_n - } else { - let num_threads = self.num_threads(); - let divides_evenly = total_n % num_threads == 0; - - ((total_n / num_threads) + usize::from(!divides_evenly)).max(min_chunk_size) +impl Default for Settings { + fn default() -> Self { + Self { + public_params_dir: public_params_default_dir(), + perf: PerfConfig::default(), } } } -#[derive(Default, Debug)] -pub struct ParallelConfig { - pub recursive_steps: Flow, // Multiple `StepCircuit`s. - pub synthesis: Flow, // Synthesis (within one `StepCircuit`) - pub poseidon_witnesses: Flow, // The poseidon witness part of synthesis. +pub fn public_params_default_dir() -> Utf8PathBuf { + #[cfg(not(target_arch = "wasm32"))] + let params_path = home_dir(); + #[cfg(target_arch = "wasm32")] + let params_path = Utf8PathBuf::new(); + params_path.join(".lurk/public_params") } -/// Should we use optimized witness-generation when possible? -#[derive(Debug, Default)] -pub struct WitnessGeneration { - // NOTE: Neptune itself *will* do this transparently at the level of individual hashes, where possible. - // so this configuration is only required for higher-level decisions. - pub precompute_neptune: bool, +// TODO: Should we crash if the user has no home dir? +/// Returns the home directory used by `cargo`` and `rustup` +#[cfg(not(target_arch = "wasm32"))] +pub fn home_dir() -> Utf8PathBuf { + Utf8PathBuf::from_path_buf(home::home_dir().expect("missing home directory")) + .expect("path contains invalid Unicode") } -#[derive(Default, Debug)] -pub struct Config { +/// Performance-related configuration settings +#[derive(Default, Debug, PartialEq, Eq, Deserialize)] +#[serde(from = "CannedConfig")] +pub struct PerfConfig { + /// Parallelism settings pub parallelism: ParallelConfig, + /// Witness generation settings pub witness_generation: WitnessGeneration, } -impl Config { +impl PerfConfig { fn fully_sequential() -> Self { Self { parallelism: ParallelConfig { @@ -128,15 +175,82 @@ impl Config { } } -#[derive(Debug)] +/// Parallel configuration settings +#[derive(Default, Debug, PartialEq, Eq)] +pub struct ParallelConfig { + /// Multiple `StepCircuit`s. + pub recursive_steps: Flow, + /// Synthesis (within one `StepCircuit`) + pub synthesis: Flow, + /// The poseidon witness part of synthesis. + pub poseidon_witnesses: Flow, +} + +/// Should we use optimized witness-generation when possible? +#[derive(Debug, Default, PartialEq, Eq)] +pub struct WitnessGeneration { + /// NOTE: Neptune itself *will* do this transparently at the level of individual hashes, where possible. + /// so this configuration is only required for higher-level decisions. + pub precompute_neptune: bool, +} + +/// The level of parallelism used when synthesizing the Lurk circuit +#[derive(Default, Debug, PartialEq, Eq)] +pub enum Flow { + /// Runs without parallelism (default) + #[default] + Sequential, + /// Try to be smart about thread management based on # of cpus + Parallel, + /// How many threads to use? (Advisory, might be ignored.) + ParallelN(usize), +} + +impl Flow { + /// Returns `true` on `Flow::Sequential` + pub fn is_sequential(&self) -> bool { + matches!(self, Self::Sequential) + } + + /// Returns `true` on `Flow::Parallel` or `Flow::ParallelN` + pub fn is_parallel(&self) -> bool { + !self.is_sequential() + } + + /// Returns the number of parallel threads to run + pub fn num_threads(&self) -> usize { + match self { + Self::Sequential => 1, + Self::Parallel => num_cpus::get(), + Self::ParallelN(threads) => *threads, + } + } + + /// Returns the number of parallel steps to run per thread with `rayon::prelude::par_chunks()` + pub fn chunk_size(&self, total_n: usize, min_chunk_size: usize) -> usize { + if self.is_sequential() { + total_n + } else { + let num_threads = self.num_threads(); + let divides_evenly = total_n % num_threads == 0; + + ((total_n / num_threads) + usize::from(!divides_evenly)).max(min_chunk_size) + } + } +} + +/// Shortcut to easily set `PerfConfig` +#[derive(Debug, Default, Deserialize)] +#[serde(rename_all = "kebab-case")] enum CannedConfig { + #[default] FullySequential, MaxParallelSimple, ParallelStepsOnly, ParallelSynthesis, } -impl From for Config { +impl From for PerfConfig { fn from(canned: CannedConfig) -> Self { match canned { CannedConfig::FullySequential => Self::fully_sequential(), @@ -147,20 +261,84 @@ impl From for Config { } } -impl TryFrom<&str> for CannedConfig { - type Error = anyhow::Error; +#[cfg(test)] +mod tests { + use camino::Utf8Path; + use std::io::prelude::*; + use std::{collections::HashMap, fs::File}; + use tempfile::Builder; - fn try_from(s: &str) -> Result { - match s { - "FULLY-SEQUENTIAL" => Ok(Self::FullySequential), - "MAX-PARALLEL-SIMPLE" => Ok(Self::MaxParallelSimple), - "PARALLEL-SYNTHESIS" => Ok(Self::ParallelSynthesis), - "PARALLEL-STEPS-ONLY" => Ok(Self::ParallelStepsOnly), - _ => bail!("Invalid CannedConfig: {s}"), - } + use crate::config::{CannedConfig, PerfConfig, Settings}; + + // Tests a generic config file with identical syntax to that used in `LURK_CONFIG` + // Doesn't test `OnceCell` behavior as the tests seem to share memory + #[test] + fn test_config_lurk() { + let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); + let tmp_dir = Utf8Path::from_path(tmp_dir.path()).unwrap(); + let config_dir = tmp_dir.join("lurk.toml"); + let public_params_dir = tmp_dir.join("public_params").into_string(); + let perf_config = PerfConfig::from(CannedConfig::MaxParallelSimple); + + let mut config_file = File::create(config_dir.clone()).unwrap(); + config_file + .write_all(format!("public_params_dir = \"{public_params_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all("perf = \"max-parallel-simple\"\n".as_bytes()) + .unwrap(); + + let config = Settings::from_config(&config_dir, None).unwrap(); + + assert_eq!(config.public_params_dir, public_params_dir); + assert_eq!(config.perf, perf_config); } -} -fn init_config() -> Config { - canned_config_from_env().map_or_else(Config::fully_sequential, |x| x.into()) + // Tests overwriting the config file and CLI argument + // Doesn't test env var as it can overwrite other tests when run in parallel + #[test] + fn test_config_override() { + let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); + let tmp_dir = Utf8Path::from_path(tmp_dir.path()).unwrap(); + let config_dir = tmp_dir.join("lurk.toml"); + let public_params_dir = tmp_dir.join("public_params").into_string(); + + let mut config_file = File::create(config_dir.clone()).unwrap(); + config_file + .write_all(format!("public_params_dir = \"{public_params_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all("perf = \"parallel-steps-only\"\n".as_bytes()) + .unwrap(); + + // Overwrite public params dir to simulate CLI setting + let public_params_dir_cli = tmp_dir.join("public_params_cli"); + let mut overrides = HashMap::new(); + overrides.insert("public_params_dir", public_params_dir_cli.to_string()); + + let config = Settings::from_config(&config_dir, Some(&overrides)).unwrap(); + + assert_eq!(config.public_params_dir, public_params_dir_cli); + assert_eq!(config.perf, PerfConfig::parallel_steps_only()); + } + + // Tests that duplicate config keys result in an error + #[test] + fn test_config_duplicate() { + let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); + let tmp_dir = Utf8Path::from_path(tmp_dir.path()).unwrap(); + let config_dir = tmp_dir.join("lurk.toml"); + let public_params_dir = tmp_dir.join("public_params").into_string(); + let public_params_dir_dup = tmp_dir.join("public_params_dup").into_string(); + + let mut config_file = File::create(config_dir.clone()).unwrap(); + config_file + .write_all(format!("public_params_dir = \"{public_params_dir}\"\n").as_bytes()) + .unwrap(); + config_file + .write_all(format!("public_params_dir = \"{public_params_dir_dup}\"\n").as_bytes()) + .unwrap(); + + assert!(Settings::from_config(&config_dir, None).is_err()) + } } diff --git a/src/coprocessor/circom.rs b/src/coprocessor/circom.rs index d50e6fdfea..aadec9a6d7 100644 --- a/src/coprocessor/circom.rs +++ b/src/coprocessor/circom.rs @@ -6,7 +6,7 @@ #[cfg(not(target_arch = "wasm32"))] pub mod non_wasm { use core::fmt::Debug; - use std::{collections::HashMap, fs::read_dir}; + use std::fs::read_dir; use ansi_term::Colour::Red; use anyhow::{bail, Result}; @@ -20,9 +20,10 @@ pub mod non_wasm { data::GlobalAllocations, pointer::{AllocatedContPtr, AllocatedPtr}, }, - cli::paths::{circom_dir, set_lurk_dirs}, + cli::paths::circom_dir, coprocessor::{CoCircuit, Coprocessor}, field::LurkField, + lem::{pointers::Ptr as LEMPtr, store::Store as LEMStore, Tag}, ptr::Ptr, store::Store, }; @@ -48,9 +49,6 @@ Then run `lurk coprocessor --name {name} <{}_FOLDER>` to instantiate a new gadge } fn validate_gadget>(gadget: &C) -> Result<()> { - // TODO: This is a temporary hack, see: https://github.com/lurk-lab/lurk-rs/issues/621 - set_lurk_dirs(&HashMap::new(), &None, &None, &None, &None); - if !circom_dir().exists() { std::fs::create_dir_all(circom_dir())?; return print_error(gadget.name(), vec![]); @@ -135,6 +133,29 @@ Then run `lurk coprocessor --name {name} <{}_FOLDER>` to instantiate a new gadge Ok((res, input_env.clone(), input_cont.clone())) } + + fn synthesize_lem_simple>( + &self, + cs: &mut CS, + g: &crate::lem::circuit::GlobalAllocator, + _s: &LEMStore, + _not_dummy: &bellpepper_core::boolean::Boolean, + args: &[AllocatedPtr], + ) -> std::result::Result, SynthesisError> { + let input = self.gadget.clone().into_circom_input(args); + let witness = + circom_scotia::calculate_witness(&self.config, input, true).map_err(|e| { + eprintln!("{:?}", e); + SynthesisError::Unsatisfiable + })?; + let output = circom_scotia::synthesize(cs, self.config.r1cs.clone(), Some(witness))?; + let num_tag = g + .get_allocated_const(Tag::Expr(crate::tag::ExprTag::Num).to_field()) + .expect("Num tag should have been allocated"); + let res = AllocatedPtr::from_parts(num_tag.clone(), output); + + Ok(res) + } } impl + Debug> Coprocessor for CircomCoprocessor { @@ -147,6 +168,10 @@ Then run `lurk coprocessor --name {name} <{}_FOLDER>` to instantiate a new gadge self.gadget.simple_evaluate(s, args) } + fn evaluate_lem_simple(&self, s: &LEMStore, args: &[LEMPtr]) -> LEMPtr { + self.gadget.simple_evaluate_lem(s, args) + } + fn has_circuit(&self) -> bool { true } diff --git a/src/field.rs b/src/field.rs index f9f26a7bab..cf64480cdf 100644 --- a/src/field.rs +++ b/src/field.rs @@ -4,6 +4,7 @@ //! This defines the LurkField trait used pervasively in the code base //! as an extension of the ff::PrimeField trait, with conveniance methods //! relating this field to the expresions of the language. +use clap::ValueEnum; use ff::{PrimeField, PrimeFieldBits}; use nova::provider::bn256_grumpkin::bn256; use serde::{Deserialize, Serialize}; @@ -31,11 +32,12 @@ use crate::tag::{ContTag, ExprTag, Op1, Op2}; /// Because confusion on this point, perhaps combined with cargo-cult copying of incorrect previous usage has led to /// inconsistencies and inaccuracies in the code base, please prefer the named Scalar forms when correspondence to a /// named `LanguageField` is important. -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, ValueEnum)] #[cfg_attr(not(target_arch = "wasm32"), derive(Arbitrary))] #[cfg_attr(not(target_arch = "wasm32"), serde_test)] pub enum LanguageField { /// The Pallas field, + #[default] Pallas, /// The Vesta field, Vesta, diff --git a/src/lem/eval.rs b/src/lem/eval.rs index 0730d5cdcc..ec2c6fd91e 100644 --- a/src/lem/eval.rs +++ b/src/lem/eval.rs @@ -309,13 +309,18 @@ fn car_cdr() -> Func { /// return (cproc, env, cont); /// } fn run_cproc(cproc_sym: Symbol, arity: usize) -> Func { - let evaluated_args = Var::new("evaluated_args"); + let evaluated_args = if arity == 0 { + Var::new("_evaluated_args") + } else { + Var::new("evaluated_args") + }; let expr = Var::new("expr"); let env = Var::new("env"); let cont = Var::new("cont"); let nil = Var::new("nil"); let is_nil = Var::new("is_nil"); let cproc = Var::new("cproc"); + let cproc_name = Var::new("cproc_name"); let cproc_out = vec![expr.clone(), env.clone(), cont.clone()]; let func_out = vec![expr, env.clone(), cont.clone()]; let err_block = Block { @@ -324,7 +329,7 @@ fn run_cproc(cproc_sym: Symbol, arity: usize) -> Func { }; let def_block = Block { ops: vec![], - ctrl: ctrl!(return (cproc, env, err)), + ctrl: ctrl!(return (cproc, env, cont)), }; let mut cproc_inp = (0..arity) .map(|i| Var(format!("x{i}").into())) @@ -361,9 +366,12 @@ fn run_cproc(cproc_sym: Symbol, arity: usize) -> Func { let mut match_symbol_map = IndexMap::default(); match_symbol_map.insert(cproc_sym, block); block = Block { - ops: vec![op!(let (cproc_name, evaluated_args) = decons2(cproc))], + ops: vec![Op::Decons2( + [cproc_name.clone(), evaluated_args], + cproc.clone(), + )], ctrl: Ctrl::MatchSymbol( - Var::new("cproc_name"), + cproc_name, match_symbol_map, Some(Box::new(def_block.clone())), ), @@ -373,11 +381,15 @@ fn run_cproc(cproc_sym: Symbol, arity: usize) -> Func { let mut match_tag_map = IndexMap::default(); match_tag_map.insert(Tag::Expr(Cproc), block); block = Block { - ops: vec![ - op!(let err: Cont::Error), - op!(let nil = Symbol("nil")), - op!(let nil = cast(nil, Expr::Nil)), - ], + ops: if arity == 0 { + vec![] + } else { + vec![ + op!(let err: Cont::Error), + op!(let nil = Symbol("nil")), + op!(let nil = cast(nil, Expr::Nil)), + ] + }, ctrl: Ctrl::MatchTag(cproc.clone(), match_tag_map, Some(Box::new(def_block))), }; let func_inp = vec![cproc, env, cont]; @@ -481,8 +493,13 @@ fn is_cproc(cprocs: &[(&Symbol, usize)]) -> Func { /// return (cproc_name, env, err, errctrl); /// } fn match_and_run_cproc(cprocs: &[(&Symbol, usize)]) -> Func { + let max_arity = cprocs.iter().fold(0, |acc, (_, a)| std::cmp::max(acc, *a)); let cproc_name = Var::new("cproc_name"); - let evaluated_args = Var::new("evaluated_args"); + let evaluated_args = if max_arity == 0 { + Var::new("_evaluated_args") + } else { + Var::new("evaluated_args") + }; let expr = Var::new("expr"); let env = Var::new("env"); let cont = Var::new("cont"); @@ -537,13 +554,21 @@ fn match_and_run_cproc(cprocs: &[(&Symbol, usize)]) -> Func { let def = Some(Box::new(err_block)); let ctrl = Ctrl::MatchSymbol(cproc_name.clone(), match_symbol_map, def); let func_inp = vec![cproc_name, evaluated_args, env, cont]; - let ops = vec![ - op!(let err: Cont::Error), - op!(let nil = Symbol("nil")), - op!(let nil = cast(nil, Expr::Nil)), - op!(let makethunk = Symbol("make-thunk")), - op!(let errctrl = Symbol("error")), - ]; + let ops = if max_arity == 0 { + vec![ + op!(let err: Cont::Error), + op!(let makethunk = Symbol("make-thunk")), + op!(let errctrl = Symbol("error")), + ] + } else { + vec![ + op!(let err: Cont::Error), + op!(let makethunk = Symbol("make-thunk")), + op!(let errctrl = Symbol("error")), + op!(let nil = Symbol("nil")), + op!(let nil = cast(nil, Expr::Nil)), + ] + }; Func::new( "match_and_run_cproc".into(), func_inp, diff --git a/src/lem/multiframe.rs b/src/lem/multiframe.rs index 7407fe7ecf..e7ba4e088e 100644 --- a/src/lem/multiframe.rs +++ b/src/lem/multiframe.rs @@ -9,7 +9,7 @@ use std::sync::Arc; use crate::{ circuit::gadgets::pointer::AllocatedPtr, - config::CONFIG, + config::lurk_config, coprocessor::Coprocessor, error::{ProofError, ReductionError}, eval::{lang::Lang, Meta}, @@ -133,13 +133,18 @@ impl<'a, F: LurkField, C: Coprocessor> MultiFrame<'a, F, C> { slots_witnesses: &[SlotsWitness], ) -> Result>, SynthesisError> { assert!(cs.is_witness_generator()); - assert!(CONFIG.parallelism.synthesis.is_parallel()); + assert!(lurk_config(None, None) + .perf + .parallelism + .synthesis + .is_parallel()); assert_eq!(frames.len(), slots_witnesses.len()); const MIN_CHUNK_SIZE: usize = 10; let num_frames = frames.len(); - let chunk_size = CONFIG + let chunk_size = lurk_config(None, None) + .perf .parallelism .synthesis .chunk_size(num_frames, MIN_CHUNK_SIZE); @@ -352,12 +357,22 @@ impl<'a, F: LurkField, C: Coprocessor + 'a> MultiFrameTrait<'a, F, C> for Mul allocations, } }; - let slots_witnesses = if CONFIG.parallelism.poseidon_witnesses.is_parallel() { + let slots_witnesses = if lurk_config(None, None) + .perf + .parallelism + .poseidon_witnesses + .is_parallel() + { frames.par_iter().map(gen_slots_witness).collect::>() } else { frames.iter().map(gen_slots_witness).collect::>() }; - if CONFIG.parallelism.synthesis.is_parallel() { + if lurk_config(None, None) + .perf + .parallelism + .synthesis + .is_parallel() + { self.synthesize_frames_parallel(cs, g, store, input, frames, &slots_witnesses) } else { self.synthesize_frames_sequential( diff --git a/src/proof/nova.rs b/src/proof/nova.rs index cfabefe48b..ecff6c9f51 100644 --- a/src/proof/nova.rs +++ b/src/proof/nova.rs @@ -32,7 +32,7 @@ use crate::{ }, CircuitFrame, MultiFrame, }, - config::CONFIG, + config::lurk_config, coprocessor::Coprocessor, error::ProofError, eval::{lang::Lang, Meta}, @@ -306,17 +306,13 @@ where store: &'a M::Store, lang: &Arc>, ) -> Result<(Proof<'a, F, C, M>, Vec, Vec, usize), ProofError> { - store.hydrate_z_cache(); - let z0 = M::io_to_scalar_vector(store, frames[0].input()).map_err(|e| e.into())?; - let zi = - M::io_to_scalar_vector(store, frames.last().unwrap().output()).map_err(|e| e.into())?; - let folding_config = Arc::new(FoldingConfig::new_ivc(lang.clone(), self.reduction_count())); - let circuits = M::from_frames(self.reduction_count(), frames, store, folding_config); + let (recursive_snark, circuits, z0, zi, num_steps) = + self.recursive_snark(pp, frames, store, lang)?; - let num_steps = circuits.len(); let proof = Proof::prove_recursively( pp, store, + Some(recursive_snark), &circuits, self.reduction_count, z0.clone(), @@ -346,6 +342,53 @@ where )?; self.prove(pp, &frames, store, lang) } + + /// Returns the first step of the [RecursiveSNARK] to be proved + pub fn recursive_snark( + &self, + pp: &PublicParams, + frames: &[M::EvalFrame], + store: &'a M::Store, + lang: &Arc>, + ) -> Result< + ( + RecursiveSNARK, G2, M, C2>, + Vec, + Vec, + Vec, + usize, + ), + ProofError, + > { + store.hydrate_z_cache(); + let z0 = M::io_to_scalar_vector(store, frames[0].input()).map_err(|e| e.into())?; + let zi = + M::io_to_scalar_vector(store, frames.last().unwrap().output()).map_err(|e| e.into())?; + + let folding_config = Arc::new(FoldingConfig::new_ivc(lang.clone(), self.reduction_count())); + let circuits = M::from_frames(self.reduction_count(), frames, store, folding_config); + + assert!(!circuits.is_empty()); + assert_eq!(circuits[0].arity(), z0.len()); + let num_steps = circuits.len(); + + let z0_primary = &z0; + let z0_secondary = Proof::::z0_secondary(); + + Ok(( + RecursiveSNARK::new( + &pp.pp, + &circuits[0], + &TrivialCircuit::default(), + z0_primary.clone(), + z0_secondary.clone(), + ), + circuits, + z0, + zi, + num_steps, + )) + } } impl<'a, F: CurveCycleEquipped, C: Coprocessor, M: MultiFrameTrait<'a, F, C>> Proof<'a, F, C, M> @@ -358,6 +401,7 @@ where pub fn prove_recursively( pp: &PublicParams, store: &M::Store, + recursive_snark: Option, G2, M, C2>>, circuits: &[M], num_iters_per_step: usize, z0: Vec, @@ -378,10 +422,15 @@ where tracing::debug!("circuits.len: {}", circuits.len()); // produce a recursive SNARK - let mut recursive_snark: Option, G2, M, C2>> = None; + let mut recursive_snark: Option, G2, M, C2>> = recursive_snark; // the shadowing here is voluntary - let recursive_snark = if CONFIG.parallelism.recursive_steps.is_parallel() { + let recursive_snark = if lurk_config(None, None) + .perf + .parallelism + .recursive_steps + .is_parallel() + { let cc = circuits .iter() .map(|c| Mutex::new(c.clone())) diff --git a/src/proof/supernova.rs b/src/proof/supernova.rs index 5e9678ac15..94f9ebb3dc 100644 --- a/src/proof/supernova.rs +++ b/src/proof/supernova.rs @@ -124,6 +124,7 @@ where #[tracing::instrument(skip_all, name = "supernova::prove_recursively")] pub fn prove_recursively( pp: &PublicParams, + recursive_snark: Option, G2>>, _store: &M::Store, nivc_steps: &[M], z0: Vec, @@ -131,7 +132,7 @@ where // Is this assertion strictly necessary? assert!(!nivc_steps.is_empty()); - let mut recursive_snark_option: Option, G2>> = None; + let mut recursive_snark_option: Option, G2>> = recursive_snark; let z0_primary = z0; let z0_secondary = Self::z0_secondary(); @@ -281,7 +282,7 @@ where let num_steps = nivc_steps.len(); let (proof, last_running_claim) = - Proof::prove_recursively(pp, store, &nivc_steps, z0.clone())?; + Proof::prove_recursively(pp, None, store, &nivc_steps, z0.clone())?; Ok((proof, z0, zi, num_steps, last_running_claim)) } @@ -307,6 +308,45 @@ where info!("got {} evaluation frames", frames.len()); self.prove(pp, &frames, store, lang) } + + /// Returns the first step of the [RecursiveSNARK] to be proved + pub fn recursive_snark( + &self, + pp: &PublicParams, + frames: &[M::EvalFrame], + store: &'a M::Store, + lang: &Arc>, + ) -> Result<(RecursiveSNARK, G2>, Vec, Vec, usize), ProofError> { + store.hydrate_z_cache(); + let z0 = M::io_to_scalar_vector(store, frames[0].input()).map_err(|e| e.into())?; + let zi = + M::io_to_scalar_vector(store, frames.last().unwrap().output()).map_err(|e| e.into())?; + let folding_config = Arc::new(FoldingConfig::new_ivc(lang.clone(), self.reduction_count())); + + let nivc_steps = M::from_frames(self.reduction_count(), frames, store, folding_config); + + let num_steps = nivc_steps.len(); + + let z0_primary = &z0; + let z0_secondary = Proof::::z0_secondary(); + + let augmented_circuit_index: &usize = &nivc_steps[0].circuit_index(); + let program_counter = F::from(*augmented_circuit_index as u64); + + let recursive_snark = RecursiveSNARK::iter_base_step( + &pp.pp, + *augmented_circuit_index, + &nivc_steps[0], + &nivc_steps[0].secondary_circuit(), + Some(program_counter), + *augmented_circuit_index, + nivc_steps[0].num_circuits(), + z0_primary, + &z0_secondary, + ) + .unwrap(); + Ok((recursive_snark, z0, zi, num_steps)) + } } #[derive(Clone, Debug)] diff --git a/src/public_parameters/disk_cache.rs b/src/public_parameters/disk_cache.rs index 33a1a7b1c6..c9fa560405 100644 --- a/src/public_parameters/disk_cache.rs +++ b/src/public_parameters/disk_cache.rs @@ -6,6 +6,7 @@ use abomonation::{encode, Abomonation}; use camino::{Utf8Path, Utf8PathBuf}; use nova::traits::Group; +use crate::config::lurk_config; use crate::coprocessor::Coprocessor; use crate::proof::nova::{CurveCycleEquipped, PublicParams, G1, G2}; use crate::proof::MultiFrameTrait; @@ -13,6 +14,12 @@ use crate::public_parameters::error::Error; use super::instance::Instance; +/// Returns the public parameter disk cache directory, which has +/// either been configured or defaults to `$HOME/.lurk/public_params` +pub(crate) fn public_params_dir() -> &'static Utf8PathBuf { + &lurk_config(None, None).public_params_dir +} + pub(crate) struct DiskCache<'a, F, C, M> where F: CurveCycleEquipped, diff --git a/src/public_parameters/mem_cache.rs b/src/public_parameters/mem_cache.rs index 4c181ffc58..142c66bb03 100644 --- a/src/public_parameters/mem_cache.rs +++ b/src/public_parameters/mem_cache.rs @@ -4,7 +4,6 @@ use std::{ }; use abomonation::{decode, Abomonation}; -use camino::Utf8Path; use nova::traits::Group; use once_cell::sync::Lazy; use tap::TapFallible; @@ -17,7 +16,10 @@ use crate::{ }; use crate::{proof::nova::CurveCycleEquipped, public_parameters::error::Error}; -use super::{disk_cache::DiskCache, instance::Instance}; +use super::{ + disk_cache::{public_params_dir, DiskCache}, + instance::Instance, +}; type AnyMap = anymap::Map; type PublicParamMap = HashMap<(usize, bool), Arc>>; @@ -47,14 +49,13 @@ impl PublicParamMemCache { &'static self, instance: &Instance<'static, F, C, M>, default: Fn, - disk_cache_path: &Utf8Path, ) -> Result>, Error> where < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, { // subdirectory search - let disk_cache = DiskCache::new(disk_cache_path).unwrap(); + let disk_cache = DiskCache::new(public_params_dir()).unwrap(); // read the file if it exists, otherwise initialize if instance.abomonated { @@ -108,7 +109,6 @@ impl PublicParamMemCache { &'static self, instance: &Instance<'static, F, C, M>, default: Fn, - disk_cache_path: &Utf8Path, ) -> Result>, Error> where F::CK1: Sync + Send, @@ -125,8 +125,7 @@ impl PublicParamMemCache { match param_entry.entry((instance.rc, instance.abomonated)) { Entry::Occupied(o) => Ok(o.into_mut()), Entry::Vacant(v) => { - let val = - self.get_from_disk_cache_or_update_with(instance, default, disk_cache_path)?; + let val = self.get_from_disk_cache_or_update_with(instance, default)?; Ok(v.insert(val)) } } diff --git a/src/public_parameters/mod.rs b/src/public_parameters/mod.rs index e8462ca187..6cdf04da8d 100644 --- a/src/public_parameters/mod.rs +++ b/src/public_parameters/mod.rs @@ -1,6 +1,5 @@ use ::nova::traits::Group; use abomonation::{decode, Abomonation}; -use camino::{Utf8Path, Utf8PathBuf}; use std::sync::Arc; use crate::coprocessor::Coprocessor; @@ -14,30 +13,18 @@ pub mod instance; mod mem_cache; use crate::proof::supernova::{self, SuperNovaAuxParams, SuperNovaPublicParams}; +use crate::public_parameters::disk_cache::public_params_dir; use crate::public_parameters::error::Error; use self::disk_cache::DiskCache; use self::instance::Instance; -#[cfg(not(target_arch = "wasm32"))] -pub fn public_params_default_dir() -> Utf8PathBuf { - let home = home::home_dir().unwrap(); - Utf8PathBuf::from_path_buf(home.join(".lurk/public_params")) - .expect("path contains invalid Unicode") -} - -#[cfg(target_arch = "wasm32")] -pub fn public_params_default_dir() -> Utf8PathBuf { - Utf8PathBuf::from(".lurk/public_params") -} - pub fn public_params< F: CurveCycleEquipped, C: Coprocessor + 'static, M: MultiFrameTrait<'static, F, C>, >( instance: &Instance<'static, F, C, M>, - disk_cache_path: &Utf8Path, ) -> Result>, Error> where F::CK1: Sync + Send, @@ -48,11 +35,7 @@ where let f = |instance: &Instance<'static, F, C, M>| { Arc::new(nova::public_params(instance.rc, instance.lang())) }; - mem_cache::PUBLIC_PARAM_MEM_CACHE.get_from_mem_cache_or_update_with( - instance, - f, - disk_cache_path, - ) + mem_cache::PUBLIC_PARAM_MEM_CACHE.get_from_mem_cache_or_update_with(instance, f) } /// Attempts to extract abomonated public parameters. @@ -62,7 +45,6 @@ where /// rely on a closure to capture the data and continue the computation in `bind`. pub fn with_public_params<'a, F, C, M, Fn, T>( instance: &Instance<'a, F, C, M>, - disk_cache_path: &Utf8Path, bind: Fn, ) -> Result where @@ -75,7 +57,7 @@ where { let default = |instance: &Instance<'a, F, C, M>| nova::public_params(instance.rc, instance.lang()); - let disk_cache = DiskCache::::new(disk_cache_path).unwrap(); + let disk_cache = DiskCache::::new(public_params_dir()).unwrap(); let mut bytes = vec![]; let pp = disk_cache.read_bytes(instance, &mut bytes).and_then(|()| { @@ -106,7 +88,6 @@ pub fn supernova_circuit_params< M: MultiFrameTrait<'a, F, C>, >( instance: &Instance<'a, F, C, M>, - disk_cache_path: &Utf8Path, ) -> Result, Error> where F::CK1: Sync + Send, @@ -114,7 +95,7 @@ where < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, { - let disk_cache = DiskCache::::new(disk_cache_path).unwrap(); + let disk_cache = DiskCache::::new(public_params_dir()).unwrap(); let mut bytes = vec![]; disk_cache.read_bytes(instance, &mut bytes).and_then(|()| { @@ -135,7 +116,6 @@ pub fn supernova_aux_params< M: MultiFrameTrait<'a, F, C>, >( instance: &Instance<'a, F, C, M>, - disk_cache_path: &Utf8Path, ) -> Result, Error> where F::CK1: Sync + Send, @@ -143,7 +123,7 @@ where < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, < as Group>::Scalar as ff::PrimeField>::Repr: Abomonation, { - let disk_cache = DiskCache::::new(disk_cache_path).unwrap(); + let disk_cache = DiskCache::::new(public_params_dir()).unwrap(); let mut bytes = vec![]; disk_cache.read_bytes(instance, &mut bytes).and_then(|()| { @@ -169,7 +149,6 @@ pub fn supernova_public_params< M: MultiFrameTrait<'a, F, C> + SuperStepCircuit + NonUniformCircuit, G2, M, C2>, >( instance_primary: &Instance<'a, F, C, M>, - disk_cache_path: &Utf8Path, ) -> Result, Error> where F::CK1: Sync + Send, @@ -180,15 +159,15 @@ where let default = |instance: &Instance<'a, F, C, M>| { supernova::public_params::<'a, F, C, M>(instance.rc, instance.lang()) }; - let disk_cache = DiskCache::::new(disk_cache_path).unwrap(); + let disk_cache = DiskCache::::new(public_params_dir()).unwrap(); let maybe_circuit_params_vec = instance_primary .circuit_param_instances() .iter() - .map(|instance| supernova_circuit_params::(instance, disk_cache_path)) + .map(|instance| supernova_circuit_params::(instance)) .collect::>, _>>(); - let maybe_aux_params = supernova_aux_params::(instance_primary, disk_cache_path); + let maybe_aux_params = supernova_aux_params::(instance_primary); let pp = match (maybe_circuit_params_vec, maybe_aux_params) { (Ok(circuit_params_vec), Ok(aux_params)) => { @@ -235,18 +214,14 @@ mod tests { // Note: No Eq instance for PublicParams currently, just testing disk read/write fn serde_public_params_roundtrip() { let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); - let public_params_dir = Utf8Path::from_path(tmp_dir.path()) - .unwrap() - .join("public_params"); + std::env::set_var("LURK_PUBLIC_PARAMS", tmp_dir.path()); let lang: Arc>> = Arc::new(Lang::new()); type OG = crate::proof::nova::C1Lurk<'static, S1, Coproc>; let instance = Instance::new(10, lang, true, Kind::NovaPublicParams); // Without disk cache, writes to tmpfile - let _public_params = - public_params::, OG>(&instance, &public_params_dir).unwrap(); + let _public_params = public_params::, OG>(&instance).unwrap(); // With disk cache, reads from tmpfile - let _public_params = - public_params::, OG>(&instance, &public_params_dir).unwrap(); + let _public_params = public_params::, OG>(&instance).unwrap(); } } diff --git a/tests/lurk-cli-tests.rs b/tests/lurk-cli-tests.rs index 66c1acb047..1ff08829ca 100644 --- a/tests/lurk-cli-tests.rs +++ b/tests/lurk-cli-tests.rs @@ -4,8 +4,6 @@ use std::fs::File; use std::io::prelude::*; use std::process::Command; use tempfile::Builder; -use tracing_subscriber::{fmt, prelude::*, EnvFilter, Registry}; -use tracing_texray::TeXRayLayer; fn lurk_cmd() -> Command { Command::cargo_bin("lurk").unwrap() @@ -45,47 +43,6 @@ fn test_bad_command() { cmd.assert().failure(); } -#[test] -fn test_config_file() { - let subscriber = Registry::default() - .with(fmt::layer().pretty().with_test_writer()) - .with(EnvFilter::from_default_env()) - // note: we don't `tracing_texray::examine` anything below, so no spans are printed - // but we add the layer to allow the option in the future, maybe with a feature? - .with(TeXRayLayer::new()); - - tracing::subscriber::set_global_default(subscriber).unwrap(); - - let tmp_dir = Builder::new().prefix("tmp").tempdir().unwrap(); - let tmp_dir = Utf8Path::from_path(tmp_dir.path()).unwrap(); - let config_dir = tmp_dir.join("lurk.toml"); - let public_params_dir = tmp_dir.join("public_params").into_string(); - let proofs_dir = tmp_dir.join("proofs").into_string(); - let commits_dir = tmp_dir.join("commits").into_string(); - - let mut config_file = File::create(&config_dir).unwrap(); - config_file - .write_all(format!("public_params = \"{public_params_dir}\"\n").as_bytes()) - .unwrap(); - config_file - .write_all(format!("proofs = \"{proofs_dir}\"\n").as_bytes()) - .unwrap(); - config_file - .write_all(format!("commits = \"{commits_dir}\"\n").as_bytes()) - .unwrap(); - - // Overwrite proof dir with env var - let proofs_dir_env = tmp_dir.join("proofs_env").into_string(); - - std::env::set_var("LURK_PROOFS", proofs_dir_env.clone()); - - let config = lurk::cli::get_config(&Some(config_dir)).unwrap(); - - assert_eq!(config.get("public_params").unwrap(), &public_params_dir); - assert_eq!(config.get("proofs").unwrap(), &proofs_dir_env); - assert_eq!(config.get("commits").unwrap(), &commits_dir); -} - // TODO: Use a snapshot test for the proof ID and/or test the REPL process #[test] fn test_prove_and_verify() {