Skip to content
Snippets Groups Projects
Commit 15184312 authored by STEVAN Antoine's avatar STEVAN Antoine :crab:
Browse files

fix feature dependencies and imports (dragoon/komodo!164)

in dragoon/komodo!162 and dragoon/komodo!163, i did only run `cargo ... --all-features` without checking the individual features...

this MR adds some `cargo check --features ...` to the `Makefile` and makes sure the imports make sense when compiling a single feature.

> :exclamation: **Important**
>
> the other notable change here is that `kzg::commit` has been moved to `zk::ark_commit` and is re-exported from `kzg` as `kzg::commit`.
parent 5782d4dc
No related branches found
No related tags found
No related merge requests found
......@@ -32,4 +32,4 @@ rand = "0.8.5"
[features]
kzg = ["dep:ark-poly-commit"]
aplonk = []
aplonk = ["dep:ark-poly-commit"]
......@@ -9,6 +9,9 @@ fmt:
cargo fmt --all
check:
cargo check --workspace --all-targets
cargo check --workspace --all-targets --features kzg
cargo check --workspace --all-targets --features aplonk
cargo check --workspace --all-targets --all-features
clippy:
......
use ark_ec::pairing::{Pairing, PairingOutput};
use ark_ec::pairing::Pairing;
#[cfg(feature = "aplonk")]
use ark_ec::pairing::PairingOutput;
use ark_poly::DenseUVPolynomial;
use ark_std::One;
use std::ops::{Div, Mul};
#[cfg(feature = "kzg")]
pub(crate) fn scalar_product_polynomial<E, P>(lhs: &[E::ScalarField], rhs: &[P]) -> P
where
E: Pairing,
......@@ -108,9 +109,13 @@ mod tests {
use ark_ec::pairing::Pairing;
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
#[cfg(feature = "aplonk")]
use ark_std::test_rng;
#[cfg(feature = "aplonk")]
use ark_std::UniformRand;
use std::ops::{Add, Div};
#[cfg(feature = "aplonk")]
use std::ops::Add;
use std::ops::Div;
type UniPoly381 = DensePolynomial<<Bls12_381 as Pairing>::ScalarField>;
......@@ -146,6 +151,7 @@ mod tests {
polynomial_template::<Bls12_381, UniPoly381>();
}
#[cfg(feature = "aplonk")]
fn scalar_template<E: Pairing>(lhs: Vec<u8>, rhs: Vec<u8>, result: u8) {
let lhs = lhs
.iter()
......@@ -160,20 +166,24 @@ mod tests {
assert_eq!(super::super::scalar_product::<E>(&lhs, &rhs), result);
}
#[cfg(feature = "aplonk")]
#[test]
fn scalar() {
scalar_template::<Bls12_381>(vec![1, 2], vec![3, 4], 11);
scalar_template::<Bls12_381>(vec![5, 6], vec![7, 8], 83);
}
#[cfg(feature = "aplonk")]
#[ignore = "scalar_product_g1 is a clone of scalar_product"]
#[test]
fn g_1() {}
#[cfg(feature = "aplonk")]
#[ignore = "scalar_product_g2 is a clone of scalar_product"]
#[test]
fn g_2() {}
#[cfg(feature = "aplonk")]
fn pairing_template<E: Pairing>() {
let rng = &mut test_rng();
......@@ -189,6 +199,7 @@ mod tests {
);
}
#[cfg(feature = "aplonk")]
#[test]
fn pairing() {
pairing_template::<Bls12_381>();
......
......@@ -19,7 +19,12 @@ use rs_merkle::Hasher;
use std::marker::PhantomData;
use std::ops::{Div, Mul};
use crate::{algebra, error::KomodoError, fec::Shard, kzg, zk::trim};
use crate::{
algebra,
error::KomodoError,
fec::Shard,
zk::{ark_commit, trim},
};
mod ipa;
mod polynomial;
......@@ -122,7 +127,7 @@ where
}
// commit.1.
let mu = match kzg::commit(&powers, &polynomials) {
let mu = match ark_commit(&powers, &polynomials) {
Ok((mu, _)) => mu,
Err(error) => return Err(KomodoError::Other(error.to_string())),
};
......
......@@ -6,10 +6,7 @@
use ark_ec::{pairing::Pairing, AffineRepr};
use ark_ff::PrimeField;
use ark_poly::DenseUVPolynomial;
use ark_poly_commit::{
kzg10::{Commitment, Powers, Proof, Randomness, VerifierKey, KZG10},
PCRandomness,
};
use ark_poly_commit::{kzg10, PCRandomness};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, SerializationError};
use ark_std::{ops::Div, Zero};
use rs_merkle::{algorithms::Sha256, Hasher};
......@@ -19,32 +16,13 @@ use crate::algebra;
use crate::error::KomodoError;
use crate::fec::Shard;
#[allow(clippy::type_complexity)]
pub fn commit<E, P>(
powers: &Powers<E>,
polynomials: &[P],
) -> Result<(Vec<Commitment<E>>, Vec<Randomness<E::ScalarField, P>>), ark_poly_commit::Error>
where
E: Pairing,
P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
let mut commits = Vec::new();
let mut randomnesses = Vec::new();
for polynomial in polynomials {
let (commit, randomness) = KZG10::<E, P>::commit(powers, polynomial, None, None)?;
commits.push(commit);
randomnesses.push(randomness);
}
Ok((commits, randomnesses))
}
pub use crate::zk::ark_commit as commit;
#[derive(Debug, Clone, Default, PartialEq, CanonicalDeserialize, CanonicalSerialize)]
pub struct Block<E: Pairing> {
pub shard: Shard<E::ScalarField>,
commit: Vec<Commitment<E>>,
proof: Proof<E>,
commit: Vec<kzg10::Commitment<E>>,
proof: kzg10::Proof<E>,
}
/// this function splits the data (bytes) into k shards and generates n shards with a proof for each.
......@@ -56,11 +34,11 @@ pub struct Block<E: Pairing> {
/// prove this polynomial with KZG10
/// store in the n Block the proof, the m commits and the m P_i evaluations
pub fn prove<E, P>(
commits: Vec<Commitment<E>>,
commits: Vec<kzg10::Commitment<E>>,
polynomials: Vec<P>,
shards: Vec<Shard<E::ScalarField>>,
points: Vec<E::ScalarField>,
powers: Powers<E>,
powers: kzg10::Powers<E>,
) -> Result<Vec<Block<E>>, KomodoError>
where
E: Pairing,
......@@ -99,11 +77,11 @@ where
let r_vec = algebra::powers_of::<E>(r, polynomials.len());
let poly_q = algebra::scalar_product_polynomial::<E, P>(&r_vec, &polynomials);
match KZG10::<E, P>::open(
match kzg10::KZG10::<E, P>::open(
&powers,
&poly_q,
*pt,
&Randomness::<E::ScalarField, P>::empty(),
&kzg10::Randomness::<E::ScalarField, P>::empty(),
) {
Ok(proof) => proofs.push(Block {
shard: s.clone(),
......@@ -151,7 +129,11 @@ where
/// compute y as a combination of the shards: y = sum(r^i * Shard_i) for i=[0..m[
/// compute c as a combination of the commitments: c = sum(r^i * Commit_i) for i=[0..m[
/// Check if e(c - yG1,G2) == e(proof,(T-alpha)G2)
pub fn verify<E, P>(block: &Block<E>, pt: E::ScalarField, verifier_key: &VerifierKey<E>) -> bool
pub fn verify<E, P>(
block: &Block<E>,
pt: E::ScalarField,
verifier_key: &kzg10::VerifierKey<E>,
) -> bool
where
E: Pairing,
P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
......@@ -188,7 +170,7 @@ where
pub fn batch_verify<E, P>(
blocks: &[Block<E>],
pts: &[E::ScalarField],
verifier_key: &VerifierKey<E>,
verifier_key: &kzg10::VerifierKey<E>,
) -> Result<bool, SerializationError>
where
E: Pairing,
......
......@@ -5,9 +5,9 @@ use ark_poly::DenseUVPolynomial;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{end_timer, ops::Div, rand::RngCore, start_timer};
#[cfg(feature = "kzg")]
#[cfg(any(feature = "kzg", feature = "aplonk"))]
use ark_ec::pairing::Pairing;
#[cfg(feature = "kzg")]
#[cfg(any(feature = "kzg", feature = "aplonk"))]
use ark_poly_commit::kzg10;
use crate::error::KomodoError;
......@@ -174,7 +174,7 @@ pub fn nb_elements_in_setup<F: PrimeField>(nb_bytes: usize) -> usize {
/// `d` should be less that `pp.max_degree()`.
///
/// > see [`ark-poly-commit::kzg10::tests::KZG10`](https://gitlab.isae-supaero.fr/a.stevan/poly-commit/-/blob/19fc0d4ad2bcff7df030c952d09649918dba7ddb/src/kzg10/mod.rs#L513-L538)
#[cfg(feature = "kzg")]
#[cfg(any(feature = "kzg", feature = "aplonk"))]
pub fn trim<E: Pairing>(
pp: kzg10::UniversalParams<E>,
supported_degree: usize,
......@@ -200,6 +200,34 @@ pub fn trim<E: Pairing>(
(powers, vk)
}
#[cfg(any(feature = "kzg", feature = "aplonk"))]
#[allow(clippy::type_complexity)]
pub fn ark_commit<E, P>(
powers: &kzg10::Powers<E>,
polynomials: &[P],
) -> Result<
(
Vec<kzg10::Commitment<E>>,
Vec<kzg10::Randomness<E::ScalarField, P>>,
),
ark_poly_commit::Error,
>
where
E: Pairing,
P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
let mut commits = Vec::new();
let mut randomnesses = Vec::new();
for polynomial in polynomials {
let (commit, randomness) = kzg10::KZG10::<E, P>::commit(powers, polynomial, None, None)?;
commits.push(commit);
randomnesses.push(randomness);
}
Ok((commits, randomnesses))
}
#[cfg(test)]
mod tests {
use ark_bls12_381::{Fr, G1Projective};
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment