Skip to content
Snippets Groups Projects
Commit 202d8bcc authored by STEVAN Antoine's avatar STEVAN Antoine :crab:
Browse files

add FRI (dragoon/komodo!175)

FRI protocol from [`dragoon/fri`](https://gitlab.isae-supaero.fr/dragoon/fri)

## changelog
- add binary assets to be used as inputs
- add `fri` and `fri_test_utils` as local dependencies until [`dragoon/fri`](https://gitlab.isae-supaero.fr/dragoon/fri) becomes public
- add `fri` feature and module (see section below for the public definitions)
- fix bug in 32bd6566
- bump Rust in e7a2c244
- add a versatile example
- add Nushell pipeline to run benchmarks and plot results
- add some tests

## `fri` module API
```rust
struct Block<F: PrimeField, H: Hasher>
```

```rust
fn evaluate<F: PrimeField>(bytes: &[u8], k: usize, n: usize) -> Vec<Vec<F>>
```

```rust
fn encode<F: PrimeField>(
    bytes: &[u8],
    evaluations: Vec<Vec<F>>,
    k: usize,
) -> Vec<fec::Shard<F>>
```

```rust
fn prove<const N: usize, F: PrimeField, H: Hasher, P>(
    evaluations: Vec<Vec<F>>,
    shards: Vec<fec::Shard<F>>,
    blowup_factor: usize,
    remainder_plus_one: usize,
    nb_queries: usize,
) -> Result<Vec<Block<F, H>>, KomodoError>
where
    P: DenseUVPolynomial<F>,
    for<'a, 'b> &'a P: Div<&'b P, Output = P>,
    <H as rs_merkle::Hasher>::Hash: AsRef<[u8]>,
```

```rust
fn verify<const N: usize, F: PrimeField, H: Hasher, P>(
    block: Block<F, H>,
    domain_size: usize,
    nb_queries: usize,
) -> Result<(), KomodoError>
where
    P: DenseUVPolynomial<F>,
    for<'a, 'b> &'a P: Div<&'b P, Output = P>,
    <H as rs_merkle::Hasher>::Hash: AsRef<[u8]>,
```

```rust
fn decode<F: PrimeField, H: Hasher>(blocks: Vec<Block<F, H>>, n: usize) -> Vec<u8>
```

## results

### times

![evaluating](/uploads/69607a2f987e26c23dd172d469c682c5/evaluating.png)
![encoding](/uploads/540ac15c21ba7500ad34b068c5d9d7dc/encoding.png)
![proving](/uploads/a694525c7d1277fe0b53dd87b6443900/proving.png)
![verifying_single](/uploads/8f03a3a0abca329eea396f3ba8b76512/verifying_single.png)
![decoding](/uploads/ba2cb0aa54f2ecff16340333121f16ca/decoding.png)

### sizes

![commits_single](/uploads/59a96661482fb1d918efc098e060cd45/commits_single.png)
![commits_single_normalized](/uploads/11398ed3f37ab4917b717cb717c9070d/commits_single_normalized.png)
![proofs](/uploads/17da07f4ef4ee637236deba7835cc022/proofs.png)
![proofs_normalized](/uploads/b2aae9491c56767ad1bf5674cf980361/proofs_normalized.png)
parent de4266c0
No related branches found
No related tags found
1 merge request!175add FRI
Pipeline #6776 passed with stages
in 3 minutes and 54 seconds
...@@ -3,6 +3,7 @@ target/ ...@@ -3,6 +3,7 @@ target/
Cargo.lock Cargo.lock
*.ndjson *.ndjson
*.png
# IDEs # IDEs
.idea .idea
......
...@@ -25,6 +25,7 @@ thiserror = "1.0.50" ...@@ -25,6 +25,7 @@ thiserror = "1.0.50"
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.17" tracing-subscriber = "0.3.17"
ark-poly-commit = { git = "https://gitlab.isae-supaero.fr/a.stevan/poly-commit", version = "0.4.0", rev = "19fc0d4", optional = true } ark-poly-commit = { git = "https://gitlab.isae-supaero.fr/a.stevan/poly-commit", version = "0.4.0", rev = "19fc0d4", optional = true }
dragoonfri = { version = "0.1.0", optional = true}
[workspace] [workspace]
members = [ members = [
...@@ -35,12 +36,16 @@ members = [ ...@@ -35,12 +36,16 @@ members = [
[dev-dependencies] [dev-dependencies]
ark-bls12-381 = "0.4.0" ark-bls12-381 = "0.4.0"
clap = { version = "4.5.17", features = ["derive"] }
itertools = "0.13.0" itertools = "0.13.0"
rand = "0.8.5" rand = "0.8.5"
dragoonfri-test-utils = "0.1.0"
hex = "0.4.3"
[features] [features]
kzg = ["dep:ark-poly-commit"] kzg = ["dep:ark-poly-commit"]
aplonk = ["dep:ark-poly-commit"] aplonk = ["dep:ark-poly-commit"]
fri = ["dep:dragoonfri"]
fs = [] fs = []
[package.metadata.docs.rs] [package.metadata.docs.rs]
...@@ -53,3 +58,7 @@ required-features = ["kzg"] ...@@ -53,3 +58,7 @@ required-features = ["kzg"]
[[example]] [[example]]
name = "aplonk" name = "aplonk"
required-features = ["aplonk"] required-features = ["aplonk"]
[[example]]
name = "fri"
required-features = ["fri"]
File added
File added
...@@ -79,3 +79,53 @@ benchmarks fec plot e2e $out_fec ...@@ -79,3 +79,53 @@ benchmarks fec plot e2e $out_fec
benchmarks fec plot combined $out_fec --recoding $out_recoding benchmarks fec plot combined $out_fec --recoding $out_recoding
benchmarks fec plot ratio $out_fec --recoding $out_recoding benchmarks fec plot ratio $out_fec --recoding $out_recoding
``` ```
## FRI
> :bulb: **Note**
>
> the FRI benchmarks don't use a module from [src/bin/](src/bin/) with PLNK but rather an
> [example](../examples/fri.rs)
- modify [benchmarks/params/fri.nu](benchmarks/params/fri.nu)
- source it
```nushell
source benchmarks/params/fri.nu
```
- run the benchmarks
```nushell
(benchmarks fri run
--data-sizes $DATA_SIZES
--ks $KS
--blowup-factors $BFS
--nb-queries $QS
--hashes $HS
--finite-fields $FFS
--remainders $RPOS
--folding-factors $NS
) | to ndjson out> $DATA
```
> the following `watch` call can be used to see the results as they are dumped to `$DATA`
> ```nushell
> watch . {
> open --raw $DATA
> | lines
> | last
> | from ndjson
> | into int evaluating encoding proving verifying decoding
> | into duration evaluating encoding proving verifying decoding
> | into filesize proofs commits d
> | into record
> }
> ```
```nushell
benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA evaluating encoding proving decoding --y-type "duration"
benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA verifying --y-type "duration" --single
benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA proofs --y-type "filesize" --identity --normalize
benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA commits --y-type "filesize" --single --identity --normalize
benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA proofs --y-type "filesize" --identity
benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA commits --y-type "filesize" --single --identity
```
...@@ -3,3 +3,4 @@ export module nu-lib/commit.nu ...@@ -3,3 +3,4 @@ export module nu-lib/commit.nu
export module nu-lib/fec/ export module nu-lib/fec/
export module nu-lib/recoding.nu export module nu-lib/recoding.nu
export module nu-lib/linalg.nu export module nu-lib/linalg.nu
export module nu-lib/fri/
export module run.nu
export module plot.nu
use std formats [ "from ndjson" ]
use ../utils plot [ into-axis-options, COMMON_OPTIONS ]
const NB_MS_IN_NS = 1_000_000
def plot [
name: string,
--save,
--y-type: string,
--single,
--identity,
--normalize,
--dump-dir: path,
] {
let ds = $in | get d | uniq
let graphs = $in
| select $name d k bf ff
| group-by { |it| $"($it.k):($it.ff):($it.bf)" }
| transpose name points
| update name {
let res = $in | parse "{k}:{ff}:{bf}" | into record
$"$k = ($res.k)$, $\\mathbb{F} = $ ($res.ff), $BF = ($res.bf)$"
}
| update points {
rename --column { $name: "y", d: "x" }
| update y { if $y_type == "duration" { $in / $NB_MS_IN_NS } else { $in } }
| if $single { update y { |it| $it.y / ($it.k * $it.bf) } } else { $in }
| if $normalize { update y { |it| $it.y / $it.x } } else { $in }
| sort-by x
}
| insert style { |it|
let type = match $it.points.ff.0 {
"fp128" => "solid",
"bls12-381" => "dashed",
_ => "",
}
let color = match $it.points.k.0 {
8 => "tab:blue",
128 => "tab:green",
1024 => "tab:orange",
4096 => "tab:red",
_ => "grey",
}
let marker = match $it.points.bf.0 {
2 => "o",
4 => "s",
_ => "*",
}
{ color: $color, line: { type: $type, marker: { shape: $marker } } }
}
| if $identity { append {
name: "$x \\mapsto x$",
points: ($ds | wrap x | merge ($ds | wrap y) | if $normalize { update y { |it| $it.y / $it.x } } else { $in }),
style: { color: "black", line: { type: "dotted" } },
} } else { $in }
let title = [
$name,
(if $single { "single" }),
(if $normalize { "normalized" }),
] | compact | str join '_'
let y_type = if $normalize { "plain" } else { $y_type }
let options = [
...($graphs.points | flatten | into-axis-options -x "filesize" -y $y_type)
--use-tex
--y-scale log
--x-scale log
--x-scale-base 2
--y-scale-base 2
--title $title
...(if $save { [ --save ($dump_dir | path join $"($title).png") ] } else {[]})
--fullscreen
]
$graphs | to json | gplt plot $in ...($options | compact)
}
export def main [
...x,
--file: path,
--y-type: string = "plain",
--single,
--identity,
--normalize,
--dump-dir: path = "./",
] {
if ($x | is-empty) {
error make --unspanned { msg: "nothing to do" }
}
if $file == null {
error make --unspanned { msg: "missing --file" }
}
if not ($dump_dir | path exists) {
mkdir $dump_dir
}
let data = open $file | where h == "sha3-512" and q == 50
for i in $x {
$data | plot --save $i --y-type=$y_type --single=$single --identity=$identity --normalize=$normalize --dump-dir=$dump_dir
}
}
use std iter
def "cartesian product" [
iters: list # the iterables you want the cartesian product of
]: nothing -> list {
def aux [a: list]: nothing -> list {
if ($a | is-empty) {
return []
}
let head = $a | first
let tail = aux ($a | skip 1)
if ($head | is-empty) {
return $tail
} else if ($tail | is-empty) {
return $head
}
$head | each {|h| $tail | each {|t| [$h, $t]}} | flatten | each { flatten }
}
aux $iters
}
# returns a record with all numeric results, merged with the parameters
def run [
params: record<
d: filesize, k: int, bf: int, q: int, h: string, ff: string, n: int, rpo: int
>
] {
cargo run --quiet --release --example fri --features fri -- ...[
--data-size ($params.d | into int)
-k $params.k
--blowup-factor $params.bf
--remainder-degree-plus-one $params.rpo
--folding-factor $params.n
--nb-queries $params.q
--hash $params.h
--finite-field $params.ff
]
| lines
| parse "{k}: {v}"
| into int v
| transpose --header-row
| into record
| merge $params
}
export def main [
--data-sizes: list<filesize>,
--ks: list<int>,
--blowup-factors: list<int>,
--nb-queries: list<int>,
--hashes: list<string>,
--finite-fields: list<string>,
--folding-factors: list<int>,
--remainders: list<int>,
] {
let inputs = [
$data_sizes, $ks, $blowup_factors, $nb_queries, $hashes, $finite_fields,
$folding_factors, $remainders
]
if ($inputs | any { is-empty }) {
error make --unspanned { msg: "one of the inputs is empty" }
}
let params = cartesian product $inputs | each { |params|
[d, k, bf, q, h, ff, n, rpo]
| iter zip-into-record $params
| into record
}
$params | each { |p|
print $p
run $p
}
}
...@@ -54,6 +54,7 @@ export def into-axis-options [-x: string, -y: string]: table<x: float, y: float> ...@@ -54,6 +54,7 @@ export def into-axis-options [-x: string, -y: string]: table<x: float, y: float>
let y_tick_labels = match $y { let y_tick_labels = match $y {
"duration" => ($y_ticks | into-duration-tick-labels), "duration" => ($y_ticks | into-duration-tick-labels),
"filesize" => ($y_ticks | into-filesize-tick-labels),
"plain" => $y_ticks, "plain" => $y_ticks,
_ => { _ => {
print $"warning: ($y) option is unknown for -y" print $"warning: ($y) option is unknown for -y"
......
let DATA_SIZES = seq 0 15 | each { 2 ** $in * 4096b }
const KS = [8, 128, 1024, 4096]
const BFS = [2, 4]
const NS = [2]
const RPOS = [1]
const QS = [50]
const HS = ["sha3-512"]
const FFS = ["fp128", "bls12-381"]
const DATA = "benchmarks/results/fri.ndjson"
const OUTPUT_DIR = "benchmarks/results/figures/"
if not ($DATA | path dirname | path exists) {
print $"creating directory for (ansi purple)($DATA)(ansi reset)"
$DATA | path dirname | mkdir $in
}
use ark_ff::PrimeField;
use ark_poly::univariate::DensePolynomial;
use ark_poly::DenseUVPolynomial;
use ark_serialize::CanonicalSerialize;
use ark_std::ops::Div;
use clap::{Parser, ValueEnum};
use rs_merkle::Hasher;
use std::time::Instant;
use ark_bls12_381::Fr as F_BLS12_381;
use dragoonfri_test_utils::Fq as F_128;
use dragoonfri::{
algorithms::{Blake3, Sha3_256, Sha3_512},
dynamic_folding_factor,
};
use komodo::error::KomodoError;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
/// measure the time it takes to apply a function on a set of arguments and returns the result of
/// the call
///
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let (res, time) = timeit!(add, 1, 2);
/// ```
/// will be the same as
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let (res, time) = {
/// let start = Instant::now();
/// let res = add(1, 2);
/// let time = start.elapsed();
/// (res, time)
/// };
/// ```
macro_rules! timeit {
($func:expr, $( $args:expr ),*) => {{
let start = Instant::now();
let res = $func( $( $args ),* );
let time = start.elapsed();
(res, time)
}};
}
/// same as [`timeit`] but prints a name and the time at the end directly
///
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let res = timeit_and_print!("addition", add, 1, 2);
/// ```
/// will be the same as
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let res = {
/// print!("addition: ");
/// let start = Instant::now();
/// let res = add(1, 2);
/// let time = start.elapsed();
/// println!("{}", time.as_nanos());
/// res
/// };
/// ```
macro_rules! timeit_and_print {
($name: expr, $func:expr, $( $args:expr ),*) => {{
print!("{}: ", $name);
let (res, time) = timeit!($func, $($args),*);
println!("{}", time.as_nanos());
res
}};
}
fn run<const N: usize, F: PrimeField, H: Hasher, P>(
bytes: &[u8],
k: usize,
n: usize,
bf: usize,
rpo: usize,
q: usize,
) -> Result<(), KomodoError>
where
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
<H as rs_merkle::Hasher>::Hash: AsRef<[u8]> + CanonicalSerialize,
{
let evaluations = timeit_and_print!("evaluating", komodo::fri::evaluate::<F>, &bytes, k, n);
let evals = evaluations.clone();
let shards = timeit_and_print!("encoding", komodo::fri::encode::<F>, &bytes, evals, k);
let blocks = timeit_and_print!(
"proving",
komodo::fri::prove::<N, F, H, P>,
evaluations,
shards,
bf,
rpo,
q
);
let blocks = blocks.unwrap();
let proofs: usize = blocks.iter().map(|b| b.proof.compressed_size()).sum();
let commits: usize = blocks.iter().map(|b| b.commit.compressed_size()).sum();
println!("proofs: {}", proofs);
println!("commits: {}", commits);
print!("verifying: ");
let time: std::time::Duration = blocks
.iter()
.cloned()
.map(|b| {
let (res, time) = timeit!(komodo::fri::verify::<N, F, H, P>, b, n, q);
res.unwrap();
time
})
.sum();
println!("{}", time.as_nanos());
let decoded = timeit_and_print!(
"decoding",
komodo::fri::decode::<F, H>,
blocks[0..k].to_vec(),
n
);
assert_eq!(hex::encode(bytes), hex::encode(decoded));
Ok(())
}
#[derive(ValueEnum, Debug, Clone)]
enum Hash {
BLAKE3,
SHA3_256,
SHA3_512,
}
#[derive(ValueEnum, Debug, Clone)]
enum FiniteField {
FP128,
BLS12_381,
}
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
#[arg(short, long)]
data_size: usize,
#[arg(long, default_value = "1234")]
seed: u64,
#[arg(short)]
k: usize,
#[arg(short, long)]
blowup_factor: usize,
#[arg(short, long)]
remainder_degree_plus_one: usize,
#[arg(short, long)]
folding_factor: usize,
#[arg(short, long)]
nb_queries: usize,
#[arg(long)]
hash: Hash,
#[arg(long)]
finite_field: FiniteField,
}
macro_rules! foo {
($n:ident, $f:ident, $h:ident) => {
dynamic_folding_factor!(
let N = $n => run::<N, $f, $h, DensePolynomial<$f>>
)
}
}
fn generate_data(size: usize, seed: u64) -> Vec<u8> {
let mut rnd = StdRng::seed_from_u64(seed);
(0..size).map(|_| rnd.gen()).collect()
}
fn main() {
let args = Args::parse();
let bytes = generate_data(args.data_size, args.seed);
println!("loaded {} bytes of data", bytes.len());
let ff = args.folding_factor;
let f = match args.finite_field {
FiniteField::FP128 => match args.hash {
Hash::BLAKE3 => foo!(ff, F_128, Blake3),
Hash::SHA3_256 => foo!(ff, F_128, Sha3_256),
Hash::SHA3_512 => foo!(ff, F_128, Sha3_512),
},
FiniteField::BLS12_381 => match args.hash {
Hash::BLAKE3 => foo!(ff, F_BLS12_381, Blake3),
Hash::SHA3_256 => foo!(ff, F_BLS12_381, Sha3_256),
Hash::SHA3_512 => foo!(ff, F_BLS12_381, Sha3_512),
},
};
f(
&bytes,
args.k,
args.k * args.blowup_factor,
args.blowup_factor,
args.remainder_degree_plus_one,
args.nb_queries,
)
.unwrap()
}
[toolchain] [toolchain]
profile = "minimal" profile = "minimal"
channel = "1.75" channel = "1.78"
components = ["rustfmt", "clippy"] components = ["rustfmt", "clippy"]
...@@ -67,7 +67,7 @@ pub mod linalg; ...@@ -67,7 +67,7 @@ pub mod linalg;
/// # } /// # }
/// ``` /// ```
pub fn split_data_into_field_elements<F: PrimeField>(bytes: &[u8], modulus: usize) -> Vec<F> { pub fn split_data_into_field_elements<F: PrimeField>(bytes: &[u8], modulus: usize) -> Vec<F> {
let bytes_per_element = (F::MODULUS_BIT_SIZE as usize) / 8; let bytes_per_element = (F::MODULUS_BIT_SIZE as usize - 1) / 8;
let mut elements = Vec::new(); let mut elements = Vec::new();
for chunk in bytes.chunks(bytes_per_element) { for chunk in bytes.chunks(bytes_per_element) {
......
use ark_ff::PrimeField;
use ark_poly::DenseUVPolynomial;
use ark_std::ops::Div;
use rs_merkle::algorithms::Sha256;
use rs_merkle::Hasher;
use std::rc::Rc;
use tracing::{debug, info};
use crate::{algebra, error::KomodoError, fec};
use dragoonfri::{
frida::{FridaBuilder, FridaCommitment},
interpolation::interpolate_polynomials,
rng::FriChallenger,
utils::{to_evaluations, HasherExt, MerkleProof},
};
/// representation of a block of proven data.
///
/// this is a wrapper around a [`fec::Shard`] with some additional cryptographic
/// information that allows to prove the integrity of said shard.
#[derive(Clone, PartialEq)]
pub struct Block<F: PrimeField, H: Hasher> {
pub shard: fec::Shard<F>,
pub proof: MerkleProof<H>,
pub commit: Rc<FridaCommitment<F, H>>,
position: usize,
}
pub fn evaluate<F: PrimeField>(bytes: &[u8], k: usize, n: usize) -> Vec<Vec<F>> {
debug!("splitting bytes into rows");
let elements: Vec<F> = algebra::split_data_into_field_elements(bytes, k);
let rows = elements.chunks(k).map(|c| c.to_vec()).collect::<Vec<_>>();
info!(
"data is composed of {} rows and {} elements",
rows.len(),
elements.len()
);
rows.into_iter()
.map(|r| to_evaluations(r, n))
.collect::<Vec<_>>()
}
#[inline]
fn transpose<F: Copy>(v: Vec<Vec<F>>) -> Vec<Vec<F>> {
let mut cols: Vec<_> = Vec::<Vec<F>>::with_capacity(v[0].len());
for i in 0..v[0].len() {
cols.push((0..v.len()).map(|j| v[j][i]).collect());
}
cols
}
pub fn encode<F: PrimeField>(
bytes: &[u8],
evaluations: Vec<Vec<F>>,
k: usize,
) -> Vec<fec::Shard<F>> {
let hash = Sha256::hash(bytes).to_vec();
let n = evaluations[0].len();
let t = transpose(evaluations);
(0..n)
.map(|i| fec::Shard {
k: k as u32,
linear_combination: vec![],
hash: hash.clone(),
data: t[i].clone(),
size: bytes.len(),
})
.collect::<Vec<_>>()
}
pub fn prove<const N: usize, F: PrimeField, H: Hasher, P>(
evaluations: Vec<Vec<F>>,
shards: Vec<fec::Shard<F>>,
blowup_factor: usize,
remainder_plus_one: usize,
nb_queries: usize,
) -> Result<Vec<Block<F, H>>, KomodoError>
where
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
<H as rs_merkle::Hasher>::Hash: AsRef<[u8]>,
{
let builder = FridaBuilder::<F, H>::new::<N, _>(
&evaluations,
FriChallenger::<H>::default(),
blowup_factor,
remainder_plus_one,
nb_queries,
);
let commit = Rc::new(FridaCommitment::from(builder.clone()));
Ok(shards
.iter()
.enumerate()
.map(|(i, s)| Block {
shard: s.clone(),
proof: builder.prove_shards(&[i]),
commit: commit.clone(),
position: i,
})
.collect())
}
pub fn verify<const N: usize, F: PrimeField, H: Hasher, P>(
block: Block<F, H>,
domain_size: usize,
nb_queries: usize,
) -> Result<(), KomodoError>
where
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
<H as rs_merkle::Hasher>::Hash: AsRef<[u8]>,
{
block
.commit
.verify::<N, _>(
FriChallenger::<H>::default(),
nb_queries,
block.shard.k as usize,
domain_size,
)
.unwrap();
assert!(block.proof.verify(
block.commit.tree_root(),
&[block.position],
&[H::hash_item(&block.shard.data)],
domain_size,
));
Ok(())
}
pub fn decode<F: PrimeField, H: Hasher>(blocks: Vec<Block<F, H>>, n: usize) -> Vec<u8> {
let w = F::get_root_of_unity(n as u64).unwrap();
let t_shards = transpose(blocks.iter().map(|b| b.shard.data.clone()).collect());
let positions = blocks
.iter()
.map(|b| w.pow([b.position as u64]))
.collect::<Vec<_>>();
let source_shards = interpolate_polynomials(&t_shards, &positions)
.into_iter()
.flatten()
.collect::<Vec<_>>();
let mut bytes = algebra::merge_elements_into_bytes(&source_shards);
bytes.resize(blocks[0].shard.size, 0);
bytes
}
#[cfg(test)]
mod tests {
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
use ark_serialize::CanonicalSerialize;
use ark_std::ops::Div;
use rs_merkle::Hasher;
use ark_bls12_381::Fr as F_BLS12_381;
use dragoonfri::{
algorithms::{Blake3, Sha3_256, Sha3_512},
dynamic_folding_factor,
};
use dragoonfri_test_utils::Fq as F_128;
use crate::error::KomodoError;
use super::{decode, encode, evaluate, prove, verify};
fn bytes() -> Vec<u8> {
include_bytes!("../assets/dragoon_133x133.png").to_vec()
}
fn run<const N: usize, F: PrimeField, H: Hasher, P>(
bytes: &[u8],
k: usize,
n: usize,
bf: usize,
rpo: usize,
q: usize,
) -> Result<(), KomodoError>
where
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
<H as rs_merkle::Hasher>::Hash: AsRef<[u8]> + CanonicalSerialize,
{
let evaluations = evaluate::<F>(bytes, k, n);
let evals = evaluations.clone();
let shards = encode::<F>(bytes, evals, k);
let blocks = prove::<N, F, H, P>(evaluations, shards, bf, rpo, q).unwrap();
for b in blocks.clone() {
verify::<N, F, H, P>(b, n, q).unwrap();
}
assert_eq!(decode::<F, H>(blocks[0..k].to_vec(), n), bytes);
Ok(())
}
macro_rules! run {
($n:tt, $f:ident, $h:ident) => {
dynamic_folding_factor!(
let N = $n => run::<N, $f, $h, DensePolynomial<$f>>
)
}
}
#[test]
fn end_to_end() {
for (ff, k, n, bf, rpo, q) in [(2, 4, 8, 2, 1, 50), (2, 4, 8, 2, 2, 50)] {
let _ = run!(ff, F_128, Blake3)(&bytes(), k, n, bf, rpo, q);
let _ = run!(ff, F_128, Sha3_256)(&bytes(), k, n, bf, rpo, q);
let _ = run!(ff, F_128, Sha3_512)(&bytes(), k, n, bf, rpo, q);
let _ = run!(ff, F_BLS12_381, Blake3)(&bytes(), k, n, bf, rpo, q);
let _ = run!(ff, F_BLS12_381, Sha3_256)(&bytes(), k, n, bf, rpo, q);
let _ = run!(ff, F_BLS12_381, Sha3_512)(&bytes(), k, n, bf, rpo, q);
}
}
}
...@@ -51,6 +51,8 @@ pub mod aplonk; ...@@ -51,6 +51,8 @@ pub mod aplonk;
mod conversions; mod conversions;
pub mod error; pub mod error;
pub mod fec; pub mod fec;
#[cfg(feature = "fri")]
pub mod fri;
#[cfg(feature = "fs")] #[cfg(feature = "fs")]
pub mod fs; pub mod fs;
#[cfg(feature = "kzg")] #[cfg(feature = "kzg")]
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment