From 9a8acbf721912c7db981385b7d1c6bb426745714 Mon Sep 17 00:00:00 2001 From: "a.stevan" <antoine.stevan@isae-supaero.fr> Date: Wed, 11 Sep 2024 16:07:10 +0200 Subject: [PATCH] do the thing lol --- src/fri.rs | 84 +++++++++++++++++++++++++++--------------------------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/src/fri.rs b/src/fri.rs index 95ec7fd2..bbc55a88 100644 --- a/src/fri.rs +++ b/src/fri.rs @@ -1,6 +1,7 @@ use ark_ff::PrimeField; use ark_poly::DenseUVPolynomial; use ark_std::ops::Div; +use rs_merkle::algorithms::Sha256; use rs_merkle::Hasher; use tracing::{debug, info}; @@ -20,13 +21,37 @@ pub struct Block<F: PrimeField, H: Hasher> { pub shard: fec::Shard<F>, proof: MerkleProof<H>, commit: FridaCommitment<F, H>, + position: usize, +} + +pub fn encode<F: PrimeField>(bytes: &[u8], k: usize, n: usize) -> Vec<fec::Shard<F>> { + debug!("splitting bytes into rows"); + let elements: Vec<F> = algebra::split_data_into_field_elements(bytes, k); + let rows = elements.chunks(k).map(|c| c.to_vec()).collect::<Vec<_>>(); + info!( + "data is composed of {} rows and {} elements", + rows.len(), + elements.len() + ); + + let hash = Sha256::hash(bytes).to_vec(); + + let w = F::get_root_of_unity(n as u64).unwrap(); + + rows.into_iter() + .enumerate() + .map(|(i, r)| fec::Shard { + k: k as u32, + linear_combination: (0..k).map(|j| w.pow([(i + j) as u64])).collect(), + hash: hash.clone(), + data: to_evaluations(r, n), + size: bytes.len(), + }) + .collect::<Vec<_>>() } pub fn prove<const N: usize, F: PrimeField, H: Hasher, P>( - bytes: &[u8], shards: Vec<fec::Shard<F>>, - k: usize, - domain_size: usize, blowup_factor: usize, remainder_plus_one: usize, nb_queries: usize, @@ -36,24 +61,8 @@ where for<'a, 'b> &'a P: Div<&'b P, Output = P>, <H as rs_merkle::Hasher>::Hash: AsRef<[u8]>, { - info!("encoding and proving {} bytes", bytes.len()); - - debug!("splitting bytes into rows"); - let elements: Vec<F> = algebra::split_data_into_field_elements(bytes, k); - let rows = elements.chunks(k).map(|c| c.to_vec()).collect::<Vec<_>>(); - info!( - "data is composed of {} rows and {} elements", - rows.len(), - elements.len() - ); - - let evaluations = rows - .into_iter() - .map(|r| to_evaluations(r, domain_size)) - .collect::<Vec<_>>(); - let builder = FridaBuilder::<F, H>::new::<N, _>( - &evaluations, + &shards.iter().map(|s| s.data.clone()).collect::<Vec<_>>(), FriChallenger::<H>::default(), blowup_factor, remainder_plus_one, @@ -64,29 +73,18 @@ where Ok(shards .iter() - .map(|s| { - // TODO: compute true position - let position = 0; - let _pos = s - .linear_combination - .iter() - .filter(|x| !x.is_zero()) - .cloned() - .collect::<Vec<F>>() - .first() - .unwrap(); - Block { - shard: s.clone(), - proof: builder.prove_shards(&[position]), - commit: commit.clone(), - } + .enumerate() + .map(|(i, s)| Block { + shard: s.clone(), + proof: builder.prove_shards(&[i]), + commit: commit.clone(), + position: i, }) .collect()) } pub fn verify<const N: usize, F: PrimeField, H: Hasher, P>( block: Block<F, H>, - k: usize, domain_size: usize, nb_queries: usize, ) -> Result<(), KomodoError> @@ -97,15 +95,17 @@ where { block .commit - .verify::<N, _>(FriChallenger::<H>::default(), nb_queries, k, domain_size) + .verify::<N, _>( + FriChallenger::<H>::default(), + nb_queries, + block.shard.k as usize, + domain_size, + ) .unwrap(); - // TODO: compute true position - let position = 0; - assert!(block.proof.verify( block.commit.tree_root(), - &[position], + &[block.position], &[H::hash_item(&block.shard)], domain_size, )); -- GitLab