Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding criterion benchmarks #2

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 38 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,40 @@
[workspace]
members = [ "lcpc-2d", "lcpc-ligero-pc", "lcpc-brakedown-pc", "lcpc-test-fields" ]

[workspace.dependencies]
bincode = "1.3"
bitvec = "1"
blake3 = { version = "1.5", features = ["traits-preview"] }
byteorder = "1"

digest = "0.10"

err-derive = "0.2"

ff = "0.12"
ff-derive-num = "0.2"
fffft = "0.4"

itertools = "0.10"

lcpc-2d = { path = "./lcpc-2d" }
lcpc-test-fields = { path = "./lcpc-test-fields" }

merlin = "2.0"

ndarray = ">=0.11.0,<0.15"
num-traits = "0.2"

paste = "1.0"

rand = "0.8"
rand_chacha = "0.3"
rand_core = "0.6"
rayon = "1.5"

serde = { version = "1.0", features = ["derive"] }
serde_bytes = "0.11"
sprs = "0.10"
subtle = "2.2"

typenum = "1.13"
30 changes: 15 additions & 15 deletions lcpc-2d/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "lcpc-2d"
version = "0.1.1"
version = "0.2.0"
authors = ["kwantam <[email protected]>"]
edition = "2018"
description = "polynomial commitment scheme from linear codes"
Expand All @@ -10,19 +10,19 @@ repository = "https://github.com/conroi/lcpc"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
digest = "0.9"
err-derive = "0.2"
ff = "0.12"
merlin = "2.0"
rand = "0.8"
rand_chacha = "0.3"
rayon = "1.5"
serde = { version = "1.0", features = ["derive"] }
serde_bytes = "0.11"
digest.workspace = true
err-derive.workspace = true
ff.workspace = true
merlin.workspace = true
rand.workspace = true
rand_chacha.workspace = true
rayon.workspace = true
serde.workspace = true
serde_bytes.workspace = true

[dev-dependencies]
bincode = "1.3"
blake3 = { version = "1", features = ["traits-preview"] }
fffft = "0.4"
itertools = "0.10"
lcpc-test-fields = { path = "../lcpc-test-fields" }
bincode.workspace = true
blake3.workspace = true
fffft.workspace = true
itertools.workspace = true
lcpc-test-fields.workspace = true
90 changes: 56 additions & 34 deletions lcpc-2d/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
lcpc2d is a polynomial commitment scheme based on linear codes
*/

use digest::{Digest, Output};
use digest::{Digest, FixedOutputReset, Output};
use err_derive::Error;
use ff::{Field, PrimeField};
use merlin::Transcript;
Expand Down Expand Up @@ -175,12 +175,18 @@ where
D: Digest,
E: LcEncoding,
{
comm: Vec<FldT<E>>,
coeffs: Vec<FldT<E>>,
n_rows: usize,
n_cols: usize,
n_per_row: usize,
hashes: Vec<Output<D>>,
/// The encoded values
pub comm: Vec<FldT<E>>,
/// The coefficients pre-encoding
pub coeffs: Vec<FldT<E>>,
/// Number of rows in the commitment
pub n_rows: usize,
/// Number of columns in the commitment
pub n_cols: usize,
/// Number of pre-encoded values per row
pub n_per_row: usize,
/// Hashed values for Merkle commit
pub hashes: Vec<Output<D>>,
}

#[derive(Debug, Serialize, Deserialize)]
Expand All @@ -206,7 +212,11 @@ where
D: Digest,
E: LcEncoding<F = F>,
{
let hashes = self.hashes.into_iter().map(|c| c.unwrap::<D, E>().root).collect();
let hashes = self
.hashes
.into_iter()
.map(|c| c.unwrap::<D, E>().root)
.collect();

LcCommit {
comm: self.comm,
Expand All @@ -226,7 +236,11 @@ where
E::F: Serialize,
{
fn wrapped(&self) -> WrappedLcCommit<FldT<E>> {
let hashes_wrapped = self.hashes.iter().map(|h| WrappedOutput { bytes: h.to_vec() }).collect();
let hashes_wrapped = self
.hashes
.iter()
.map(|h| WrappedOutput { bytes: h.to_vec() })
.collect();

WrappedLcCommit {
comm: self.comm.clone(),
Expand Down Expand Up @@ -269,7 +283,7 @@ where

impl<D, E> LcCommit<D, E>
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
/// returns the Merkle root of this polynomial commitment (which is the commitment itself)
Expand Down Expand Up @@ -318,7 +332,8 @@ where
D: Digest,
E: LcEncoding,
{
root: Output<D>,
/// The Merkle root
pub root: Output<D>,
_p: std::marker::PhantomData<E>,
}

Expand Down Expand Up @@ -403,8 +418,10 @@ where
D: Digest,
E: LcEncoding,
{
col: Vec<FldT<E>>,
path: Vec<Output<D>>,
/// The values in the column
pub col: Vec<FldT<E>>,
/// The Merkle path
pub path: Vec<Output<D>>,
}

impl<D, E> LcColumn<D, E>
Expand Down Expand Up @@ -493,15 +510,19 @@ where
D: Digest,
E: LcEncoding,
{
n_cols: usize,
p_eval: Vec<FldT<E>>,
p_random_vec: Vec<Vec<FldT<E>>>,
columns: Vec<LcColumn<D, E>>,
/// Number of columns in this proof
pub n_cols: usize,
/// Evaluation row
pub p_eval: Vec<FldT<E>>,
/// Random combinations of rows
pub p_random_vec: Vec<Vec<FldT<E>>>,
/// Opened columns
pub columns: Vec<LcColumn<D, E>>,
}

impl<D, E> LcEvalProof<D, E>
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
/// Get the number of elements in an encoded vector
Expand Down Expand Up @@ -549,7 +570,7 @@ where

/// An evaluation and proof of its correctness and of the low-degreeness of the commitment.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct WrappedLcEvalProof<F>
struct WrappedLcEvalProof<F>
where
F: Serialize,
{
Expand Down Expand Up @@ -612,7 +633,7 @@ where
/// for a code with `len`-length codewords over `flog2`-bit field
pub fn n_degree_tests(lambda: usize, len: usize, flog2: usize) -> usize {
let den = flog2 - log2(len);
(lambda + den - 1) / den
lambda.div_ceil(den)
}

// parallelization limit when working on columns
Expand All @@ -621,7 +642,7 @@ const LOG_MIN_NCOLS: usize = 5;
/// Commit to a univariate polynomial whose coefficients are `coeffs` using encoding `enc`
fn commit<D, E>(coeffs_in: &[FldT<E>], enc: &E) -> ProverResult<LcCommit<D, E>, ErrT<E>>
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
let (n_rows, n_per_row, n_cols) = enc.get_dims(coeffs_in.len());
Expand Down Expand Up @@ -689,7 +710,7 @@ where

fn merkleize<D, E>(comm: &mut LcCommit<D, E>)
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
// step 1: hash each column of the commitment (we always reveal a full column)
Expand Down Expand Up @@ -746,7 +767,7 @@ fn hash_columns<D, E>(

fn merkle_tree<D>(ins: &[Output<D>], outs: &mut [Output<D>])
where
D: Digest,
D: Digest + FixedOutputReset,
{
// array should always be of length 2^k - 1
assert_eq!(ins.len(), outs.len() + 1);
Expand All @@ -761,16 +782,16 @@ where

fn merkle_layer<D>(ins: &[Output<D>], outs: &mut [Output<D>])
where
D: Digest,
D: Digest + FixedOutputReset,
{
assert_eq!(ins.len(), 2 * outs.len());

if ins.len() <= (1 << LOG_MIN_NCOLS) {
// base case: just compute all of the hashes
let mut digest = D::new();
for idx in 0..outs.len() {
digest.update(ins[2 * idx].as_ref());
digest.update(ins[2 * idx + 1].as_ref());
Digest::update(&mut digest, ins[2 * idx].as_ref());
Digest::update(&mut digest, ins[2 * idx + 1].as_ref());
outs[idx] = digest.finalize_reset();
}
} else {
Expand Down Expand Up @@ -838,7 +859,7 @@ fn verify<D, E>(
tr: &mut Transcript,
) -> VerifierResult<FldT<E>, ErrT<E>>
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
// make sure arguments are well formed
Expand Down Expand Up @@ -954,11 +975,11 @@ where
// Check a column opening
fn verify_column_path<D, E>(column: &LcColumn<D, E>, col_num: usize, root: &Output<D>) -> bool
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
let mut digest = D::new();
digest.update(<Output<D> as Default>::default());
Digest::update(&mut digest, <Output<D> as Default>::default());
for e in &column.col[..] {
e.digest_update(&mut digest);
}
Expand All @@ -968,11 +989,11 @@ where
let mut col = col_num;
for p in &column.path[..] {
if col % 2 == 0 {
digest.update(&hash);
digest.update(p);
Digest::update(&mut digest, &hash);
Digest::update(&mut digest, p);
} else {
digest.update(p);
digest.update(&hash);
Digest::update(&mut digest, p);
Digest::update(&mut digest, &hash);
}
hash = digest.finalize_reset();
col >>= 1;
Expand Down Expand Up @@ -1092,6 +1113,7 @@ where
})
}

#[allow(clippy::only_used_in_recursion)]
fn collapse_columns<E>(
coeffs: &[FldT<E>],
tensor: &[FldT<E>],
Expand Down Expand Up @@ -1167,7 +1189,7 @@ fn verify_column<D, E>(
poly_eval: &FldT<E>,
) -> bool
where
D: Digest,
D: Digest + FixedOutputReset,
E: LcEncoding,
{
verify_column_path(column, col_num, root) && verify_column_value(column, tensor, poly_eval)
Expand Down
9 changes: 4 additions & 5 deletions lcpc-2d/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ use digest::Output;
use ff::Field;
use fffft::{FFTError, FFTPrecomp, FieldFFT};
use itertools::iterate;
use lcpc_test_fields::ft63::*;
use merlin::Transcript;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use std::iter::repeat_with;
use lcpc_test_fields::ft63::*;

#[derive(Clone, Debug)]
struct LigeroEncoding<Ft> {
Expand Down Expand Up @@ -49,7 +49,7 @@ where

// minimize nr subject to #cols and rho
let np = ((nc as f64) * rho).floor() as usize;
let nr = (len + np - 1) / np;
let nr = len.div_ceil(np);
assert!(np * nr >= len);
assert!(np * (nr - 1) < len);

Expand Down Expand Up @@ -99,7 +99,7 @@ where
}

fn get_dims(&self, len: usize) -> (usize, usize, usize) {
let n_rows = (len + self.n_per_row - 1) / self.n_per_row;
let n_rows = len.div_ceil(self.n_per_row);
(n_rows, self.n_per_row, self.n_cols)
}

Expand Down Expand Up @@ -298,8 +298,7 @@ fn end_to_end() {
)
.unwrap();

let root2 =
bincode::deserialize::<LcRoot<Blake3, LigeroEncoding<Ft63>>>(&encroot[..]).unwrap();
let root2 = bincode::deserialize::<LcRoot<Blake3, LigeroEncoding<Ft63>>>(&encroot[..]).unwrap();
let pf2: LigeroEvalProof<Blake3, Ft63> = bincode::deserialize(&encoded[..]).unwrap();
let mut tr3 = Transcript::new(b"test transcript");
tr3.append_message(b"polycommit", root.as_ref());
Expand Down
Loading