Skip to content
Snippets Groups Projects
Commit 6f6647cd authored by STEVAN Antoine's avatar STEVAN Antoine :crab:
Browse files

update the API (dragoon/komodo!71)

## changelog
- rename the `encode` function to `prove` and have it take _shards_ instead of an _encoding matrix_: this is to isolate the "encoding" process inside the `fec` module and leave the main `komodo::prove` only compute the "proof", i.e. the commits of the data

from
```rust
fn encode<F, G, P>(
    bytes: &[u8],
    encoding_mat: &Matrix<F>,
    powers: &Powers<F, G>,
) -> Result<Vec<Block<F, G>>, KomodoError>
```
to
```rust
fn prove<F, G, P>(
    bytes: &[u8],
    powers: &Powers<F, G>,
    k: usize,
) -> Result<Vec<Commitment<F, G>>, KomodoError>
```
- rename `fec::Shard.combine` to `fec::Shard.recode_with` to get rid of "combine"
- rename `fec::recode` to `fec::recode_with_coeffs` to show that this version takes a list of coefficients
- rename `Block.commit` to `Block.proof`: "commit" should be "commits" and it's usually refered to as "proof"
- split `prove` further into `prove` and `build`: `prove` now outputs a `Vec<Commitment<F>>`, `build` simply takes a `Vec<Shard<F>>` and a `Vec<Commitment<F>>` and outputs a `Vec<Block<F>>`
- add `fec::recode_random` that does the "shard" part of `recode` to wrap around `fec::recode_with_coeffs`
- remove `R: RngCore` from the signature of `zk::setup`, to avoid having to pass a generic `_` annotation everywhere `zk::setup` is used, same change has been applied to `recode` and the `generate_random_powers` in `main.rs`

from
```rust
fn setup<R: RngCore, F: PrimeField, G: CurveGroup<ScalarField = F>>(
    max_degree: usize,
    rng: &mut R,
) -> Result<Powers<F, G>, KomodoError> {
```
to
```rust
fn setup<F: PrimeField, G: CurveGroup<ScalarField = F>>(
    max_degree: usize,
    rng: &mut impl RngCore,
) -> Result<Powers<F, G>, KomodoError> {
```

### some extra minor changes
- remove some useles generic type annotations, e.g. `prove::<F, G, P>` can become a simpler `prove` most of the time, i.e. when there is at least one generic annotation somewhere in the scope
parent 4e78d50c
No related branches found
Tags 0.2.0
No related merge requests found
[package]
name = "komodo"
version = "0.1.0"
version = "0.2.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
......
......@@ -19,7 +19,7 @@ where
{
let rng = &mut rand::thread_rng();
let setup = zk::setup::<_, F, G>(degree, rng).unwrap();
let setup = zk::setup::<F, G>(degree, rng).unwrap();
let polynomial = P::rand(degree, rng);
c.bench_function(&format!("commit (komodo) {} on {}", degree, curve), |b| {
......
......@@ -6,7 +6,7 @@ use ark_std::rand::Rng;
use criterion::{criterion_group, criterion_main, Criterion};
use komodo::{
fec::{combine, Shard},
fec::{recode_with_coeffs, Shard},
field,
};
......@@ -50,7 +50,7 @@ fn bench_template<F: PrimeField>(
"recoding {} bytes and {} shards with k = {} on {}",
nb_bytes, nb_shards, k, curve
),
|b| b.iter(|| combine(&shards, &coeffs)),
|b| b.iter(|| recode_with_coeffs(&shards, &coeffs)),
);
}
......
......@@ -21,7 +21,7 @@ where
let rng = &mut rand::thread_rng();
c.bench_function(&format!("setup (komodo) {} on {}", degree, curve), |b| {
b.iter(|| zk::setup::<_, F, G>(degree, rng).unwrap())
b.iter(|| zk::setup::<F, G>(degree, rng).unwrap())
});
}
......@@ -60,7 +60,7 @@ where
let rng = &mut rand::thread_rng();
let setup = zk::setup::<_, F, G>(degree, rng).unwrap();
let setup = zk::setup::<F, G>(degree, rng).unwrap();
group.bench_function(
&format!("serializing with compression {} on {}", degree, curve),
......
......@@ -20,7 +20,7 @@ where
let max_degree = *degrees.iter().max().unwrap_or(&0);
eprint!("building trusted setup for degree {}... ", max_degree);
let setup = zk::setup::<_, F, G>(max_degree, rng).unwrap();
let setup = zk::setup::<F, G>(max_degree, rng).unwrap();
eprintln!("done");
for (i, degree) in degrees.iter().enumerate() {
......
......@@ -17,7 +17,7 @@ where
eprintln!("degree: {}", degree);
let setup = zk::setup::<_, F, G>(degree, rng).unwrap();
let setup = zk::setup::<F, G>(degree, rng).unwrap();
for compress in [Compress::Yes, Compress::No] {
println!(
......
......@@ -15,6 +15,8 @@ pub enum KomodoError {
IncompatibleMatrixShapes(usize, usize, usize, usize),
#[error("Expected at least {1} shards, got {0}")]
TooFewShards(usize, usize),
#[error("Shards are incompatible: {0}")]
IncompatibleShards(String),
#[error("Blocks are incompatible: {0}")]
IncompatibleBlocks(String),
#[error("Degree is zero")]
......
......@@ -2,6 +2,7 @@
use ark_ff::PrimeField;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::rand::RngCore;
use rs_merkle::{algorithms::Sha256, Hasher};
......@@ -25,7 +26,7 @@ pub struct Shard<F: PrimeField> {
impl<F: PrimeField> Shard<F> {
/// compute the linear combination between two [`Shard`]s
pub fn combine(&self, alpha: F, other: &Self, beta: F) -> Self {
pub fn recode_with(&self, alpha: F, other: &Self, beta: F) -> Self {
if alpha.is_zero() {
return other.clone();
} else if beta.is_zero() {
......@@ -55,11 +56,11 @@ impl<F: PrimeField> Shard<F> {
/// compute the linear combination between an arbitrary number of [`Shard`]s
///
/// > **Note**
/// > this is basically a multi-[`Shard`] wrapper around [`Shard::combine`]
/// > this is basically a multi-[`Shard`] wrapper around [`Shard::recode_with`]
/// >
/// > returns [`None`] if number of shards is not the same as the number of
/// > coefficients or if no shards are provided.
pub fn combine<F: PrimeField>(shards: &[Shard<F>], coeffs: &[F]) -> Option<Shard<F>> {
pub fn recode_with_coeffs<F: PrimeField>(shards: &[Shard<F>], coeffs: &[F]) -> Option<Shard<F>> {
if shards.len() != coeffs.len() {
return None;
}
......@@ -72,11 +73,49 @@ pub fn combine<F: PrimeField>(shards: &[Shard<F>], coeffs: &[F]) -> Option<Shard
.zip(coeffs)
.skip(1)
.fold((shards[0].clone(), coeffs[0]), |(acc_s, acc_c), (s, c)| {
(acc_s.combine(acc_c, s, *c), F::one())
(acc_s.recode_with(acc_c, s, *c), F::one())
});
Some(s)
}
/// compute a recoded shard from an arbitrary set of shards
///
/// coefficients will be drawn at random, one for each shard.
///
/// if the shards appear to come from different data, e.g. if `k` is not the
/// same or the hash of the data is different, an error will be returned.
///
/// > **Note**
/// > this is a wrapper around [`recode_with_coeffs`].
pub fn recode_random<F: PrimeField>(
shards: &[Shard<F>],
rng: &mut impl RngCore,
) -> Result<Option<Shard<F>>, KomodoError> {
for (i, (s1, s2)) in shards.iter().zip(shards.iter().skip(1)).enumerate() {
if s1.k != s2.k {
return Err(KomodoError::IncompatibleShards(format!(
"k is not the same at {}: {} vs {}",
i, s1.k, s2.k
)));
}
if s1.hash != s2.hash {
return Err(KomodoError::IncompatibleShards(format!(
"hash is not the same at {}: {:?} vs {:?}",
i, s1.hash, s2.hash
)));
}
if s1.size != s2.size {
return Err(KomodoError::IncompatibleShards(format!(
"size is not the same at {}: {} vs {}",
i, s1.size, s2.size
)));
}
}
let coeffs = shards.iter().map(|_| F::rand(rng)).collect::<Vec<_>>();
Ok(recode_with_coeffs(shards, &coeffs))
}
/// applies a given encoding matrix to some data to generate encoded shards
///
/// > **Note**
......@@ -91,7 +130,7 @@ pub fn encode<F: PrimeField>(
let k = encoding_mat.height;
let source_shards = Matrix::from_vec_vec(
field::split_data_into_field_elements::<F>(data, k)
field::split_data_into_field_elements(data, k)
.chunks(k)
.map(|c| c.to_vec())
.collect(),
......@@ -146,7 +185,7 @@ pub fn decode<F: PrimeField>(shards: Vec<Shard<F>>) -> Result<Vec<u8>, KomodoErr
let source_shards = encoding_mat.invert()?.mul(&shard_mat)?.transpose().elements;
let mut bytes = field::merge_elements_into_bytes::<F>(&source_shards);
let mut bytes = field::merge_elements_into_bytes(&source_shards);
bytes.resize(shards[0].size, 0);
Ok(bytes)
}
......@@ -162,7 +201,7 @@ mod tests {
linalg::Matrix,
};
use super::combine;
use super::recode_with_coeffs;
fn bytes() -> Vec<u8> {
include_bytes!("../tests/dragoon_32x32.png").to_vec()
......@@ -188,8 +227,8 @@ mod tests {
let mut rng = ark_std::test_rng();
let mut shards = encode(data, &Matrix::random(k, n, &mut rng)).unwrap();
shards[1] = shards[2].combine(to_curve::<F>(7), &shards[4], to_curve::<F>(6));
shards[2] = shards[1].combine(to_curve::<F>(5), &shards[3], to_curve::<F>(4));
shards[1] = shards[2].recode_with(to_curve(7), &shards[4], to_curve(6));
shards[2] = shards[1].recode_with(to_curve(5), &shards[3], to_curve(4));
assert_eq!(
data,
decode::<F>(shards).unwrap(),
......@@ -232,7 +271,7 @@ mod tests {
k: 2,
linear_combination: linear_combination.to_vec(),
hash: vec![],
data: field::split_data_into_field_elements::<F>(bytes, 1),
data: field::split_data_into_field_elements(bytes, 1),
size: 0,
}
}
......@@ -241,16 +280,16 @@ mod tests {
let a: Shard<F> = create_fake_shard(&[F::one(), F::zero()], &[1, 2, 3]);
let b: Shard<F> = create_fake_shard(&[F::zero(), F::one()], &[4, 5, 6]);
let c = a.combine(to_curve::<F>(3), &b, to_curve::<F>(5));
let c = a.recode_with(to_curve(3), &b, to_curve(5));
assert_eq!(
c,
create_fake_shard(&[to_curve::<F>(3), to_curve::<F>(5),], &[23, 31, 39])
create_fake_shard(&[to_curve(3), to_curve(5),], &[23, 31, 39])
);
assert_eq!(
c.combine(to_curve::<F>(2), &a, to_curve::<F>(4),),
create_fake_shard(&[to_curve::<F>(10), to_curve::<F>(10),], &[50, 70, 90],)
c.recode_with(to_curve(2), &a, to_curve(4),),
create_fake_shard(&[to_curve(10), to_curve(10),], &[50, 70, 90],)
);
}
......@@ -260,23 +299,20 @@ mod tests {
}
fn combine_shards_template<F: PrimeField>() {
let a = create_fake_shard::<F>(&[to_curve::<F>(1), to_curve::<F>(0)], &[1, 4, 7]);
let b = create_fake_shard::<F>(&[to_curve::<F>(0), to_curve::<F>(2)], &[2, 5, 8]);
let c = create_fake_shard::<F>(&[to_curve::<F>(3), to_curve::<F>(5)], &[3, 6, 9]);
let a = create_fake_shard::<F>(&[to_curve(1), to_curve(0)], &[1, 4, 7]);
let b = create_fake_shard::<F>(&[to_curve(0), to_curve(2)], &[2, 5, 8]);
let c = create_fake_shard::<F>(&[to_curve(3), to_curve(5)], &[3, 6, 9]);
assert!(combine::<F>(&[], &[]).is_none());
assert!(combine::<F>(
assert!(recode_with_coeffs::<F>(&[], &[]).is_none());
assert!(recode_with_coeffs(
&[a.clone(), b.clone(), c.clone()],
&[to_curve::<F>(1), to_curve::<F>(2)]
&[to_curve(1), to_curve(2)]
)
.is_none());
assert_eq!(
combine::<F>(
&[a, b, c],
&[to_curve::<F>(1), to_curve::<F>(2), to_curve::<F>(3)]
),
Some(create_fake_shard::<F>(
&[to_curve::<F>(10), to_curve::<F>(19)],
recode_with_coeffs(&[a, b, c], &[to_curve(1), to_curve(2), to_curve(3)]),
Some(create_fake_shard(
&[to_curve(10), to_curve(19)],
&[14, 32, 50]
))
);
......
......@@ -95,8 +95,8 @@ mod tests {
}
fn split_and_merge_template<F: PrimeField>(bytes: &[u8], modulus: usize) {
let elements = field::split_data_into_field_elements::<F>(bytes, modulus);
let mut actual = merge_elements_into_bytes::<F>(&elements);
let elements: Vec<F> = field::split_data_into_field_elements(bytes, modulus);
let mut actual = merge_elements_into_bytes(&elements);
actual.resize(bytes.len(), 0);
assert_eq!(bytes, actual, "TEST | modulus: {modulus}");
}
......
......@@ -90,7 +90,7 @@ pub fn read_blocks<F: PrimeField, G: CurveGroup<ScalarField = F>>(
let s = std::fs::read(filename)?;
Ok((
f.clone(),
Block::<F, G>::deserialize_with_mode(&s[..], compress, validate)?,
Block::deserialize_with_mode(&s[..], compress, validate)?,
))
})
.collect()
......
......@@ -3,7 +3,8 @@ use ark_ec::CurveGroup;
use ark_ff::PrimeField;
use ark_poly::DenseUVPolynomial;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{ops::Div, rand::RngCore};
use ark_std::ops::Div;
use ark_std::rand::RngCore;
use tracing::{debug, info};
......@@ -16,8 +17,7 @@ pub mod zk;
use crate::{
error::KomodoError,
fec::combine,
linalg::Matrix,
fec::Shard,
zk::{Commitment, Powers},
};
......@@ -28,7 +28,7 @@ use crate::{
#[derive(Debug, Default, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize)]
pub struct Block<F: PrimeField, G: CurveGroup<ScalarField = F>> {
pub shard: fec::Shard<F>,
commit: Vec<Commitment<F, G>>,
proof: Vec<Commitment<F, G>>,
}
impl<F: PrimeField, G: CurveGroup<ScalarField = F>> std::fmt::Display for Block<F, G> {
......@@ -70,7 +70,7 @@ impl<F: PrimeField, G: CurveGroup<ScalarField = F>> std::fmt::Display for Block<
write!(f, "}}")?;
write!(f, ",")?;
write!(f, "commits: [")?;
for commit in &self.commit {
for commit in &self.proof {
write!(f, r#""{}","#, commit.0)?;
}
write!(f, "]")?;
......@@ -80,16 +80,47 @@ impl<F: PrimeField, G: CurveGroup<ScalarField = F>> std::fmt::Display for Block<
}
}
/// compute encoded and proven blocks of data from some data and an encoding
/// method
/// compute a recoded block from an arbitrary set of blocks
///
/// coefficients will be drawn at random, one for each block.
///
/// if the blocks appear to come from different data, e.g. if the commits are
/// different, an error will be returned.
///
/// > **Note**
/// > this is a wrapper around [`fec::encode`].
pub fn encode<F, G, P>(
/// > this is a wrapper around [`fec::recode_random`].
pub fn recode<F: PrimeField, G: CurveGroup<ScalarField = F>>(
blocks: &[Block<F, G>],
rng: &mut impl RngCore,
) -> Result<Option<Block<F, G>>, KomodoError> {
for (i, (b1, b2)) in blocks.iter().zip(blocks.iter().skip(1)).enumerate() {
if b1.proof != b2.proof {
return Err(KomodoError::IncompatibleBlocks(format!(
"proofs are not the same at {}: {:?} vs {:?}",
i, b1.proof, b2.proof
)));
}
}
let shard = match fec::recode_random(
&blocks.iter().map(|b| b.shard.clone()).collect::<Vec<_>>(),
rng,
)? {
Some(s) => s,
None => return Ok(None),
};
Ok(Some(Block {
shard,
proof: blocks[0].proof.clone(),
}))
}
/// compute the Semi-AVID proof for some data
pub fn prove<F, G, P>(
bytes: &[u8],
encoding_mat: &Matrix<F>,
powers: &Powers<F, G>,
) -> Result<Vec<Block<F, G>>, KomodoError>
k: usize,
) -> Result<Vec<Commitment<F, G>>, KomodoError>
where
F: PrimeField,
G: CurveGroup<ScalarField = F>,
......@@ -98,10 +129,8 @@ where
{
info!("encoding and proving {} bytes", bytes.len());
let k = encoding_mat.height;
debug!("splitting bytes into polynomials");
let elements = field::split_data_into_field_elements::<F>(bytes, k);
let elements = field::split_data_into_field_elements(bytes, k);
let polynomials = elements
.chunks(k)
.map(|c| P::from_coefficients_vec(c.to_vec()))
......@@ -120,68 +149,25 @@ where
debug!("committing the polynomials");
let commits = zk::batch_commit(powers, &polynomials_to_commit)?;
Ok(fec::encode(bytes, encoding_mat)?
Ok(commits)
}
/// attach a Semi-AVID proof to a collection of encoded shards
#[inline(always)]
pub fn build<F, G, P>(shards: &[Shard<F>], proof: &[Commitment<F, G>]) -> Vec<Block<F, G>>
where
F: PrimeField,
G: CurveGroup<ScalarField = F>,
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
shards
.iter()
.map(|s| Block {
shard: s.clone(),
commit: commits.clone(),
proof: proof.to_vec(),
})
.collect::<Vec<_>>())
}
/// compute a recoded block from an arbitrary set of blocks
///
/// coefficients will be drawn at random, one for each block.
///
/// if the blocks appear to come from different data, e.g. if `k` is not the
/// same or the hash of the data is different, an error will be returned.
///
/// > **Note**
/// > this is a wrapper around [`fec::combine`].
pub fn recode<F: PrimeField, G: CurveGroup<ScalarField = F>, R: RngCore>(
blocks: &[Block<F, G>],
rng: &mut R,
) -> Result<Option<Block<F, G>>, KomodoError> {
let coeffs = blocks.iter().map(|_| F::rand(rng)).collect::<Vec<_>>();
for (i, (b1, b2)) in blocks.iter().zip(blocks.iter().skip(1)).enumerate() {
if b1.shard.k != b2.shard.k {
return Err(KomodoError::IncompatibleBlocks(format!(
"k is not the same at {}: {} vs {}",
i, b1.shard.k, b2.shard.k
)));
}
if b1.shard.hash != b2.shard.hash {
return Err(KomodoError::IncompatibleBlocks(format!(
"hash is not the same at {}: {:?} vs {:?}",
i, b1.shard.hash, b2.shard.hash
)));
}
if b1.shard.size != b2.shard.size {
return Err(KomodoError::IncompatibleBlocks(format!(
"size is not the same at {}: {} vs {}",
i, b1.shard.size, b2.shard.size
)));
}
if b1.commit != b2.commit {
return Err(KomodoError::IncompatibleBlocks(format!(
"commits are not the same at {}: {:?} vs {:?}",
i, b1.commit, b2.commit
)));
}
}
let shard = match combine(
&blocks.iter().map(|b| b.shard.clone()).collect::<Vec<_>>(),
&coeffs,
) {
Some(s) => s,
None => return Ok(None),
};
Ok(Some(Block {
shard,
commit: blocks[0].commit.clone(),
}))
.collect::<Vec<_>>()
}
/// verify that a single block of encoded and proven data is valid
......@@ -204,9 +190,9 @@ where
.linear_combination
.iter()
.enumerate()
.map(|(i, w)| Into::<G>::into(block.commit[i].0) * w)
.map(|(i, w)| block.proof[i].0.into() * w)
.sum();
Ok(Into::<G>::into(commit.0) == rhs)
Ok(commit.0.into() == rhs)
}
#[cfg(test)]
......@@ -218,11 +204,11 @@ mod tests {
use ark_std::{ops::Div, test_rng};
use crate::{
encode,
build,
error::KomodoError,
fec::{decode, Shard},
fec::{decode, encode, Shard},
linalg::Matrix,
recode, verify,
prove, recode, verify,
zk::{setup, Commitment},
};
......@@ -230,6 +216,12 @@ mod tests {
include_bytes!("../tests/dragoon_133x133.png").to_vec()
}
macro_rules! full {
($b:ident, $p:ident, $m:ident) => {
build::<F, G, P>(&encode($b, $m)?, &prove($b, &$p, $m.height)?)
};
}
fn verify_template<F, G, P>(bytes: &[u8], encoding_mat: &Matrix<F>) -> Result<(), KomodoError>
where
F: PrimeField,
......@@ -239,11 +231,12 @@ mod tests {
{
let rng = &mut test_rng();
let powers = setup(bytes.len(), rng)?;
let blocks = encode::<F, G, P>(bytes, encoding_mat, &powers)?;
let powers = setup::<F, G>(bytes.len(), rng)?;
let blocks = full!(bytes, powers, encoding_mat);
for block in &blocks {
assert!(verify::<F, G, P>(block, &powers)?);
assert!(verify(block, &powers)?);
}
Ok(())
......@@ -262,20 +255,21 @@ mod tests {
let rng = &mut test_rng();
let powers = setup(bytes.len(), rng)?;
let blocks = encode::<F, G, P>(bytes, encoding_mat, &powers)?;
let blocks = full!(bytes, powers, encoding_mat);
for block in &blocks {
assert!(verify::<F, G, P>(block, &powers)?);
assert!(verify(block, &powers)?);
}
let mut corrupted_block = blocks[0].clone();
// modify a field in the struct b to corrupt the block proof without corrupting the data serialization
let a = F::from_le_bytes_mod_order(&123u128.to_le_bytes());
let mut commits: Vec<G> = corrupted_block.commit.iter().map(|c| c.0.into()).collect();
let mut commits: Vec<G> = corrupted_block.proof.iter().map(|c| c.0.into()).collect();
commits[0] = commits[0].mul(a.pow([4321_u64]));
corrupted_block.commit = commits.iter().map(|&c| Commitment(c.into())).collect();
corrupted_block.proof = commits.iter().map(|&c| Commitment(c.into())).collect();
assert!(!verify::<F, G, P>(&corrupted_block, &powers)?);
assert!(!verify(&corrupted_block, &powers)?);
Ok(())
}
......@@ -292,14 +286,15 @@ mod tests {
{
let rng = &mut test_rng();
let powers = setup(bytes.len(), rng)?;
let blocks = encode::<F, G, P>(bytes, encoding_mat, &powers)?;
let powers = setup::<F, G>(bytes.len(), rng)?;
let blocks = full!(bytes, powers, encoding_mat);
assert!(verify::<F, G, P>(
assert!(verify(
&recode(&blocks[2..=3], rng).unwrap().unwrap(),
&powers
)?);
assert!(verify::<F, G, P>(
assert!(verify(
&recode(&[blocks[3].clone(), blocks[5].clone()], rng)
.unwrap()
.unwrap(),
......@@ -321,13 +316,13 @@ mod tests {
{
let rng = &mut test_rng();
let powers = setup(bytes.len(), rng)?;
let blocks: Vec<Shard<F>> = encode::<F, G, P>(bytes, encoding_mat, &powers)?
.iter()
.map(|b| b.shard.clone())
.collect();
let powers = setup::<F, G>(bytes.len(), rng)?;
assert_eq!(bytes, decode::<F>(blocks).unwrap());
let blocks = full!(bytes, powers, encoding_mat);
let shards: Vec<Shard<F>> = blocks.iter().map(|b| b.shard.clone()).collect();
assert_eq!(bytes, decode(shards).unwrap());
Ok(())
}
......@@ -344,8 +339,9 @@ mod tests {
{
let rng = &mut test_rng();
let powers = setup(bytes.len(), rng)?;
let blocks = encode::<F, G, P>(bytes, encoding_mat, &powers)?;
let powers = setup::<F, G>(bytes.len(), rng)?;
let blocks = full!(bytes, powers, encoding_mat);
let b_0_1 = recode(&blocks[0..=1], rng).unwrap().unwrap();
let shards = vec![
......@@ -353,7 +349,7 @@ mod tests {
blocks[2].shard.clone(),
blocks[3].shard.clone(),
];
assert_eq!(bytes, decode::<F>(shards).unwrap());
assert_eq!(bytes, decode(shards).unwrap());
let b_0_1 = recode(&[blocks[0].clone(), blocks[1].clone()], rng)
.unwrap()
......@@ -363,7 +359,7 @@ mod tests {
blocks[1].shard.clone(),
b_0_1.shard,
];
assert!(decode::<F>(shards).is_err());
assert!(decode(shards).is_err());
let b_0_1 = recode(&blocks[0..=1], rng).unwrap().unwrap();
let b_2_3 = recode(&blocks[2..=3], rng).unwrap().unwrap();
......@@ -371,12 +367,12 @@ mod tests {
.unwrap()
.unwrap();
let shards = vec![b_0_1.shard, b_2_3.shard, b_1_4.shard];
assert_eq!(bytes, decode::<F>(shards).unwrap());
assert_eq!(bytes, decode(shards).unwrap());
let fully_recoded_shards = (0..3)
.map(|_| recode(&blocks[0..=2], rng).unwrap().unwrap().shard)
.collect();
assert_eq!(bytes, decode::<F>(fully_recoded_shards).unwrap());
assert_eq!(bytes, decode(fully_recoded_shards).unwrap());
Ok(())
}
......
......@@ -13,12 +13,12 @@ use ark_std::rand::RngCore;
use tracing::{info, warn};
use komodo::{
encode,
build,
error::KomodoError,
fec::{decode, Shard},
fec::{self, decode, Shard},
fs,
linalg::Matrix,
recode, verify,
prove, recode, verify,
zk::{self, Powers},
Block,
};
......@@ -118,21 +118,20 @@ fn throw_error(code: i32, message: &str) {
exit(code);
}
fn generate_random_powers<F, G, P, R>(
fn generate_random_powers<F, G, P>(
n: usize,
powers_dir: &Path,
powers_filename: Option<&str>,
rng: &mut R,
rng: &mut impl RngCore,
) -> Result<()>
where
F: PrimeField,
G: CurveGroup<ScalarField = F>,
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
R: RngCore,
{
info!("generating new powers");
let powers = zk::setup::<_, F, G>(zk::nb_elements_in_setup::<F>(n), rng)?;
let powers = zk::setup::<F, G>(zk::nb_elements_in_setup::<F>(n), rng)?;
fs::dump(&powers, powers_dir, powers_filename, COMPRESS)?;
......@@ -189,7 +188,7 @@ fn main() {
let powers_file = powers_dir.join(powers_filename);
if do_generate_powers {
generate_random_powers::<Fr, G1Projective, DensePolynomial<Fr>, _>(
generate_random_powers::<Fr, G1Projective, DensePolynomial<Fr>>(
nb_bytes,
&powers_dir,
Some(powers_filename),
......@@ -286,7 +285,7 @@ fn main() {
} else {
warn!("could not read powers from `{:?}`", powers_file);
info!("regenerating temporary powers");
zk::setup::<_, Fr, G1Projective>(zk::nb_elements_in_setup::<Fr>(nb_bytes), &mut rng)
zk::setup::<Fr, G1Projective>(zk::nb_elements_in_setup::<Fr>(nb_bytes), &mut rng)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not generate powers: {}", e));
unreachable!()
......@@ -324,16 +323,18 @@ fn main() {
}
};
let formatted_output = fs::dump_blocks(
&encode::<Fr, G1Projective, DensePolynomial<Fr>>(&bytes, &encoding_mat, &powers)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not encode: {}", e));
unreachable!()
}),
&block_dir,
COMPRESS,
)
.unwrap_or_else(|e| {
let shards = fec::encode::<Fr>(&bytes, &encoding_mat).unwrap_or_else(|e| {
throw_error(1, &format!("could not encode: {}", e));
unreachable!()
});
let proof =
prove::<Fr, G1Projective, DensePolynomial<Fr>>(&bytes, &powers, k).unwrap_or_else(|e| {
throw_error(1, &format!("could not prove: {}", e));
unreachable!()
});
let blocks = build::<Fr, G1Projective, DensePolynomial<Fr>>(&shards, &proof);
let formatted_output = fs::dump_blocks(&blocks, &block_dir, COMPRESS).unwrap_or_else(|e| {
throw_error(1, &format!("could not dump blocks: {}", e));
unreachable!()
});
......
......@@ -35,9 +35,9 @@ impl<F: PrimeField, G: CurveGroup<ScalarField = F>> IntoIterator for Powers<F, G
pub struct Commitment<F: PrimeField, G: CurveGroup<ScalarField = F>>(pub G::Affine);
/// create a trusted setup of a given size, the expected maximum degree of the data
pub fn setup<R: RngCore, F: PrimeField, G: CurveGroup<ScalarField = F>>(
pub fn setup<F: PrimeField, G: CurveGroup<ScalarField = F>>(
max_degree: usize,
rng: &mut R,
rng: &mut impl RngCore,
) -> Result<Powers<F, G>, KomodoError> {
if max_degree < 1 {
return Err(KomodoError::DegreeIsZero);
......@@ -182,7 +182,7 @@ mod tests {
let rng = &mut test_rng();
let powers = setup::<_, F, G>(degree, rng).unwrap();
let powers = setup::<F, G>(degree, rng).unwrap();
assert_eq!(
powers.len(),
......@@ -201,7 +201,7 @@ mod tests {
fn generate_invalid_setup_template<F: PrimeField, G: CurveGroup<ScalarField = F>>() {
let rng = &mut test_rng();
let powers = setup::<_, F, G>(0, rng);
let powers = setup::<F, G>(0, rng);
assert!(
powers.is_err(),
"creating a trusted setup for a degree 0 polynomial should NOT work"
......@@ -212,7 +212,7 @@ mod tests {
"message should say the degree is zero"
);
assert!(
setup::<_, F, G>(1, rng).is_ok(),
setup::<F, G>(1, rng).is_ok(),
"creating a trusted setup for any polynomial with degree at least 1 should work"
);
}
......@@ -232,7 +232,7 @@ mod tests {
let rng = &mut test_rng();
let powers = setup::<_, F, G>(degree, rng).unwrap();
let powers = setup::<F, G>(degree, rng).unwrap();
assert!(
commit_to_test(&powers, &P::rand(degree - 1, rng)).is_ok(),
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment