Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • dragoon/komodo
  • a.stevan/komodo
  • c.heme/komodo
3 results
Show changes
Showing
with 496 additions and 1045 deletions
......@@ -2,6 +2,7 @@ use utils log
use utils math *
use utils fs check-file
use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ]
use utils args check-list-arg
use std formats *
......@@ -11,22 +12,18 @@ use std formats *
# - output: the output path, as NDJSON
export def run [
--output: path, # the output path (defaults to a random file in $nu.temp-path)
--force, # does not ask for confirmation if the output file already exists, it will be overwritten
--no-confirm (-y), # does not ask for confirmation if the output file already exists, it will be overwritten
--nb-measurements: int = 10, # the number of measurements per benchmark run
--append, # append to the output path instead of overwritting
]: list<int> -> path {
let input = $in
if ($input | is-empty) {
print "nothing to do"
return
}
$in | check-list-arg --cmd "linalg run" --arg "pipeline input"
let new_file = $output == null
let output = $output | default (mktemp --tmpdir komodo_linalg.XXXXXX)
let pretty_output = $"(ansi purple)($output)(ansi reset)"
if ($output | path exists) and not $new_file {
log warning $"($pretty_output) already exists"
if not $force {
if not $no_confirm {
let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?"
if $res == null or $res == "no" {
log info "aborting"
......@@ -36,10 +33,19 @@ export def run [
}
}
cargo run --release --package benchmarks --bin linalg -- ...[
let options = [
--release
--package benchmarks
--bin linalg
--
--nb-measurements $nb_measurements
...$input
] out> $output
...$in
]
if $append {
cargo run ...$options out>> $output
} else {
cargo run ...$options out> $output
}
log info $"results saved to ($pretty_output)"
$output
......@@ -89,7 +95,8 @@ export def plot [
| where op == $op
| rename --column { n: "x", mean: "y", stddev: "e" }
| group-by name --to-table
| rename --column { group: "name", items: "points" }
| reject items.name items.op items.times
| rename --column { name: "name", items: "points" }
| insert style.color {|it|
match $it.name {
"BLS12-381" => "tab:blue"
......
......@@ -3,6 +3,7 @@ use utils formats *
use utils math *
use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ]
use utils fs check-file
use utils args check-list-arg
use std formats *
......@@ -14,22 +15,20 @@ export def run [
--output: path, # the output path (defaults to a random file in $nu.temp-path)
--ks: list<int>, # the values of $k$ to benchmark
--curves: list<string>, # the curves to benchmark
--force, # does not ask for confirmation if the output file already exists, it will be overwritten
--no-confirm (-y), # does not ask for confirmation if the output file already exists, it will be overwritten
--nb-measurements: int = 10, # the number of measurements per benchmark run
--append, # append to the output path instead of overwritting
]: list<int> -> path {
let input = $in
if ($ks | is-empty) or ($input | is-empty) or ($curves | is-empty) {
print "nothing to do"
return
}
$ks | check-list-arg --cmd "recoding run" --arg "--ks" --span (metadata $ks).span
$curves | check-list-arg --cmd "recoding run" --arg "--curves" --span (metadata $curves).span
$in | check-list-arg --cmd "recoding run" --arg "pipeline input"
let new_file = $output == null
let output = $output | default (mktemp --tmpdir komodo_recoding.XXXXXX)
let pretty_output = $"(ansi purple)($output)(ansi reset)"
if ($output | path exists) and not $new_file {
log warning $"($pretty_output) already exists"
if not $force {
if not $no_confirm {
let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?"
if $res == null or $res == "no" {
log info "aborting"
......@@ -39,16 +38,28 @@ export def run [
}
}
"" out> $output
if not $append {
"" out> $output
}
let input = $in
for k in $ks {
cargo run --release --package benchmarks --bin recoding -- ...[
let options = [
--release
--package benchmarks
--bin recoding
--
--nb-measurements $nb_measurements
...$input
--shards $k
--ks $k
--curves ...$curves
] | from ndnuon | to ndjson out>> $output
]
if $append {
cargo run ...$options | from ndnuon | to ndjson out>> $output
} else {
cargo run ...$options | from ndnuon | to ndjson out> $output
}
}
log info $"results saved to ($pretty_output)"
......@@ -73,7 +84,7 @@ export def plot [
| select shards x y e
| group-by shards --to-table
| reject items.shards
| rename --column { group: "name", items: "points" }
| rename --column { shards: "name", items: "points" }
| update name { $"$k = ($in)$"}
let options = [
......
......@@ -2,6 +2,7 @@ use utils log
use utils math *
use utils fs check-file
use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ]
use utils args check-list-arg
use std formats *
......@@ -12,22 +13,19 @@ use std formats *
export def run [
--output: path, # the output path (defaults to a random file in $nu.temp-path)
--curves: list<string>, # the curves to benchmark
--force, # does not ask for confirmation if the output file already exists, it will be overwritten
--no-confirm (-y), # does not ask for confirmation if the output file already exists, it will be overwritten
--nb-measurements: int = 10, # the number of measurements per benchmark run
--append, # append to the output path instead of overwritting
]: list<int> -> path {
let input = $in
if ($input | is-empty) or ($curves | is-empty) {
print "nothing to do"
return
}
$curves | check-list-arg --cmd "setup run" --arg "--curves" --span (metadata $curves).span
$in | check-list-arg --cmd "setup run" --arg "pipeline input"
let new_file = $output == null
let output = $output | default (mktemp --tmpdir komodo_setup.XXXXXX)
let pretty_output = $"(ansi purple)($output)(ansi reset)"
if ($output | path exists) and not $new_file {
log warning $"($pretty_output) already exists"
if not $force {
if not $no_confirm {
let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?"
if $res == null or $res == "no" {
log info "aborting"
......@@ -37,11 +35,20 @@ export def run [
}
}
cargo run --release --package benchmarks --bin setup -- ...[
let options = [
--release
--package benchmarks
--bin setup
--
--nb-measurements $nb_measurements
...$input
...$in
--curves ...$curves
] out> $output
]
if $append {
cargo run ...$options out>> $output
} else {
cargo run ...$options out> $output
}
log info $"results saved to ($pretty_output)"
$output
......@@ -70,7 +77,7 @@ export def plot [
| select name x y e
| group-by name --to-table
| reject items.name
| rename --column { group: "name", items: "points" }
| rename --column { name: "name", items: "points" }
| insert style.color {|it|
match $it.name {
"BLS12-381" => "tab:blue"
......
# throws an error if the input is an empty list
export def check-list-arg [
--cmd: string, # the name of the command
--arg: string, # the name of the argument
--span: record<start: int, end: int>, # the span of the arg (no span means an unspanned error)
]: [ list -> nothing ] {
if ($in | is-empty) {
if $span == null {
error make --unspanned {
msg: $"(ansi red_bold)invalid_arguments(ansi reset)",
help: $"provide a non empty list as ($arg)",
}
} else {
error make {
msg: $"(ansi red_bold)invalid_arguments(ansi reset)",
label: {
text: $"(ansi purple)($cmd)(ansi reset) needs (ansi purple)($arg)(ansi reset)",
span: $span
},
help: $"provide a non empty list as (ansi purple)($arg)(ansi reset)"
}
}
}
}
......@@ -4,3 +4,4 @@ export module log.nu
export module math.nu
export module parse.nu
export module plot.nu
export module args.nu
......@@ -29,9 +29,7 @@ export def into-filesize-tick-labels []: list<int> -> list<string> {
}
export def into-axis-options [-x: string, -y: string]: table<x: float, y: float> -> list<string> {
let input = $in
let xs = $input | flatten | get x | uniq
let xs = $in | flatten | get x | uniq
let x_tick_labels = match $x {
"filesize" => ($xs | into-filesize-tick-labels),
......@@ -47,13 +45,14 @@ export def into-axis-options [-x: string, -y: string]: table<x: float, y: float>
--x-tick-labels ...$x_tick_labels
]
let ys = $input | flatten | get y
let ys = $in | flatten | get y
let y_ticks = seq ($ys | math min | math log 10 | math ceil | $in - 1) ($ys | math max | math log 10 | math floor)
| into float
| each { 10 ** $in }
let y_tick_labels = match $y {
"duration" => ($y_ticks | into-duration-tick-labels),
"filesize" => ($y_ticks | into-filesize-tick-labels),
"plain" => $y_ticks,
_ => {
print $"warning: ($y) option is unknown for -y"
......
// see `examples/benches/README.md`
use ark_ff::PrimeField;
use ark_poly::univariate::DensePolynomial;
use benchmarks::fields::Fq128;
use clap::{arg, command, Parser, ValueEnum};
use komodo::{algebra::linalg::Matrix, fec};
use dragoonfri::algorithms::Sha3_512;
use komodo::{algebra::linalg::Matrix, fec, fri};
use plnk::Bencher;
use rand::{rngs::ThreadRng, thread_rng, Rng, RngCore};
......@@ -10,6 +12,14 @@ fn random_bytes(n: usize, rng: &mut ThreadRng) -> Vec<u8> {
(0..n).map(|_| rng.gen::<u8>()).collect()
}
fn random_loss<T>(shards: &mut Vec<T>, k: usize, rng: &mut impl Rng) {
// Randomly drop some shards until k are left
while shards.len() > k {
let i = rng.gen_range(0..shards.len());
shards.remove(i);
}
}
fn build_encoding_mat<F: PrimeField>(
k: usize,
n: usize,
......@@ -24,48 +34,93 @@ fn build_encoding_mat<F: PrimeField>(
.collect();
Matrix::vandermonde_unchecked(&points, k)
}
_ => panic!("FFT encoding is not supported for matrix encoding"),
}
}
fn template<F: PrimeField>(b: &Bencher, nb_bytes: usize, k: usize, n: usize, encoding: &Encoding) {
let mut rng = thread_rng();
let encoding_mat = build_encoding_mat(k, n, encoding, &mut rng);
plnk::bench(
b,
&format!(
r#"{{"bytes": {}, "step": "encode", "k": {}, "n": {}}}"#,
nb_bytes, k, n
),
|| {
match encoding {
Encoding::Fft => {
assert_eq!(n.count_ones(), 1, "n must be a power of 2");
assert_eq!(k.count_ones(), 1, "k must be a power of 2");
let bytes = random_bytes(nb_bytes, &mut rng);
let mut shards: Vec<fec::Shard<F>> = vec![];
plnk::bench(
b,
&format!(
r#"{{"bytes": {}, "step": "encode", "method": "fft", "k": {}, "n": {}}}"#,
nb_bytes, k, n
),
|| {
plnk::timeit(|| {
let evaluations = fri::evaluate::<F>(&bytes, k, n);
shards = fri::encode::<F>(&bytes, evaluations, k)
})
},
);
let evaluations = fri::evaluate::<F>(&bytes, k, n);
let mut blocks =
fri::prove::<2, F, Sha3_512, DensePolynomial<F>>(evaluations, shards, 2, 2, 1)
.unwrap();
random_loss(&mut blocks, k, &mut rng);
plnk::bench(
b,
&format!(
r#"{{"bytes": {}, "step": "decode", "method":"fft" "k": {}, "n": {}}}"#,
nb_bytes, k, n
),
|| {
plnk::timeit(|| {
fri::decode::<F, Sha3_512>(blocks.clone(), n);
})
},
);
}
_ => {
let encoding_mat = build_encoding_mat(k, n, encoding, &mut rng);
plnk::bench(
b,
&format!(
r#"{{"bytes": {}, "step": "encode", "method": "matrix", "k": {}, "n": {}}}"#,
nb_bytes, k, n
),
|| {
let bytes = random_bytes(nb_bytes, &mut rng);
plnk::timeit(|| fec::encode::<F>(&bytes, &encoding_mat).unwrap())
},
);
plnk::timeit(|| fec::encode::<F>(&bytes, &encoding_mat).unwrap())
},
);
let encoding_mat = build_encoding_mat(k, k, encoding, &mut rng);
let encoding_mat = build_encoding_mat(k, k, encoding, &mut rng);
plnk::bench(
b,
&format!(
r#"{{"bytes": {}, "step": "decode", "k": {}, "n": {}}}"#,
nb_bytes, k, n
),
|| {
let bytes = random_bytes(nb_bytes, &mut rng);
let shards = fec::encode::<F>(&bytes, &encoding_mat).unwrap();
plnk::bench(
b,
&format!(
r#"{{"bytes": {}, "step": "decode", "method":"matrix", "k": {}, "n": {}}}"#,
nb_bytes, k, n
),
|| {
let bytes = random_bytes(nb_bytes, &mut rng);
let shards = fec::encode::<F>(&bytes, &encoding_mat).unwrap();
plnk::timeit(|| fec::decode::<F>(shards.clone()).unwrap())
},
);
plnk::timeit(|| fec::decode::<F>(shards.clone()).unwrap())
},
);
}
}
}
#[derive(ValueEnum, Clone)]
enum Encoding {
Vandermonde,
Random,
Fft,
}
#[derive(ValueEnum, Clone, Hash, PartialEq, Eq)]
......@@ -73,6 +128,7 @@ enum Curve {
BLS12381,
BN254,
Pallas,
FP128,
}
#[derive(Parser)]
......@@ -124,6 +180,9 @@ fn main() {
cli.n,
&cli.encoding,
),
Curve::FP128 => {
template::<Fq128>(&b.with_name("FP128"), n, cli.k, cli.n, &cli.encoding)
}
}
}
}
......
[package]
name = "rank"
version = "1.0.0"
version = "1.0.1"
edition = "2021"
description = "Compute the rank of a matrix."
......
[package]
name = "saclin"
version = "1.0.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = "1.0.81"
ark-bls12-381 = "0.4.0"
ark-ec = "0.4.2"
ark-ff = "0.4.2"
ark-poly = "0.4.2"
ark-serialize = "0.4.2"
ark-std = "0.4.0"
komodo = { path = "../../", features = ["fs"] }
rand = "0.8.5"
tracing = "0.1.40"
tracing-subscriber = "0.3.17"
## CLI app using Semi-AVID
export def "bytes from_int" []: [int -> binary, list<int> -> binary] {
each { into binary --compact } | bytes collect
}
export def "bytes to_int" []: binary -> list<int> {
let bytes = $in
seq 1 ($bytes | bytes length) | each {|i|
$bytes | bytes at ($i - 1)..($i) | get 0
}
}
#!/usr/bin/env nu
use .. [
"saclin build",
"saclin setup",
"saclin prove",
"saclin verify",
"saclin reconstruct",
"saclin ls",
]
use ../binary.nu [ "bytes from_int" ]
use std assert
const BYTES = "assets/dragoon_32x32.png"
const FEC_PARAMS = { k: 3, n: 5 }
const BLOCKS_TO_VERIFY = [0, 1]
const BLOCKS_TO_RECONSTRUCT = [0, 2, 3]
def main [] {
saclin build
saclin setup (open $BYTES | into binary | bytes length)
saclin prove $BYTES --fec-params $FEC_PARAMS
let blocks = saclin ls
saclin verify ...($BLOCKS_TO_VERIFY | each {|i| $blocks | get $i })
let actual = saclin reconstruct ...($BLOCKS_TO_RECONSTRUCT | each {|i| $blocks | get $i })
let expected = open $BYTES | bytes from_int
assert equal $actual $expected
print "reconstruction was successful"
}
# Welcome to SACLIN (**S**emi-**A**VID **CLI** in **N**ushell), a tool to encode and prove data.
#
# please run `saclin --help` or `saclin <tab>` to have a look at more information
module binary.nu
use binary ["bytes from_int"]
const BIN = "./target/release/saclin"
const DEFAULT_LOG_LEVEL = "INFO"
def home-dir []: nothing -> path {
$env.SACLIN_HOME? | default (
$env.XDG_DATA_HOME? | default "~/.local/share" | path join "saclin"
) | path expand
}
def block-dir []: nothing -> path {
home-dir | path join "blocks"
}
def "nu-complete log-levels" []: nothing -> list<string> {
[
"TRACE"
"DEBUG",
"INFO",
"WARN",
"ERROR",
]
}
def "nu-complete encoding-methods" []: nothing -> list<string> {
[
"vandermonde"
"random",
]
}
def run-saclin [
--input: path = "",
--nb-bytes: int = 0,
-k: int = 0,
-n: int = 0,
--generate-powers,
--reconstruct,
--verify,
--combine,
--inspect,
--encoding-method: string = "",
--log-level: string,
...block_hashes: string,
]: nothing -> any {
let home_dir = home-dir
if not ($home_dir | is-empty) {
mkdir $home_dir
}
let block_dir = block-dir
if not ($block_dir | is-empty) {
mkdir $block_dir
}
with-env {RUST_LOG: $log_level} {
let res = do {
^$BIN ...([
$input
$k
$n
($generate_powers | into string)
$home_dir
($reconstruct | into string)
($verify | into string)
($combine | into string)
($inspect | into string)
$nb_bytes
$encoding_method
] | append $block_hashes)
} | complete
print --no-newline $res.stdout
if $res.exit_code != 0 {
error make --unspanned { msg: $"($res.stderr) \(($res.exit_code)\)" }
}
$res.stderr | from json
}
}
def list-blocks []: nothing -> list<string> {
try {
ls (block-dir) | get name | path parse | get stem
} catch {
[]
}
}
# build SACLIN from source, updating the application
export def "saclin build" []: nothing -> nothing {
^cargo build --release --manifest-path bins/saclin/Cargo.toml
}
# create a random trusted setup for a given amount of data
#
# # Examples
# ```nushell
# # create a trusted setup well suited for a file called `my_target_file.txt`
# saclin setup (open my_target_file.txt | into binary | bytes length)
# ```
# ---
# ```nushell
# # create a trusted setup for 50k bytes and make sure the setup has been created
# saclin setup 50_000
# use std assert; assert ("~/.local/share/saclin/powers" | path exists)
export def "saclin setup" [
nb_bytes: int, # the size of the biggest expected data during the lifetime of the application
--log-level: string@"nu-complete log-levels" = $DEFAULT_LOG_LEVEL # change the log level
]: nothing -> nothing {
(
run-saclin
--log-level $log_level
--nb-bytes $nb_bytes
--generate-powers
)
}
# encode and _prove_ a bunch of input bytes
#
# # Examples
# ```nushell
# # encode and prove `assets/dragoon_32x32.png` with a _3 x 5_ Vandermonde encoding
# saclin prove assets/dragoon_32x32.png --fec-params {k: 3, n: 5} --encoding-method vandermonde
# ```
# ```
# ─┬────────────────────────────────────────────────────────────────
# 0│44614daf1f5ebb86f1c69293b82c7795a5a35b4d12718b551648223441028e3
# 1│8be575889246fbc49f4c748ac2dc1cd8a4ef71d16e91c9343660a5f79f086
# 2│6de9fd5fdfe8c08b3132e0d527b14a2a4e4be9a543af1f13d2c397bd113846e4
# 3│f1c34065cbfc3267f9d41558a465ba6335fd45229ff2eae5b34a8f30467562
# 4│7aa698f338605462205c5ff46b5463720d073de92a19f897cc4ae6c286ab87
# ─┴────────────────────────────────────────────────────────────────
# ```
export def "saclin prove" [
input: path, # the path to the input file to encode and prove
--fec-params: record<k: int, n: int>, # the parameters of the encoding
--encoding-method: string@"nu-complete encoding-methods" = "random", # the encoding method, e.g. _random_ or _vandermonde_
--log-level: string@"nu-complete log-levels" = $DEFAULT_LOG_LEVEL # change the log level
]: nothing -> list<string> {
# NOTE: the next two runtime checks on the type of `--fec-params` might be
# a bug on the Nushell side
if $fec_params == null {
error make --unspanned {
msg: "`saclin prove` requires `--fec-params` to be given"
}
}
let type = $fec_params | describe --detailed | update columns { sort }
let expected = { type: record, columns: { k: int, n: int } }
if $type != $expected {
error make {
msg: $"(ansi red_bold)invalid `--fec-params`(ansi reset)",
label: {
text: $"expected ($expected) got ($type)",
span: (metadata $fec_params).span,
}
}
}
(
run-saclin
--log-level $log_level
--input $input
-k $fec_params.k
-n $fec_params.n
--encoding-method $encoding_method
)
}
# verify the integrity of any number of blocks
#
# # Examples
# ```nushell
# # verify the integrity of two blocks (note the use of the spread operator introduced in Nushell 0.89.0)
# # > **Note**
# # > file: `assets/dragoon_32x32.png`
# # > parameters: k = 3 and n = 5
# # > method: vandermonde
# saclin verify ...[
# 44614daf1f5ebb86f1c69293b82c7795a5a35b4d12718b551648223441028e3,
# 7aa698f338605462205c5ff46b5463720d073de92a19f897cc4ae6c286ab87,
# ]
# ```
# ```
# #┬─────────────────────────────block─────────────────────────────┬status
# 0│44614daf1f5ebb86f1c69293b82c7795a5a35b4d12718b551648223441028e3│true
# 1│7aa698f338605462205c5ff46b5463720d073de92a19f897cc4ae6c286ab87 │true
# ─┴───────────────────────────────────────────────────────────────┴──────
# ```
export def "saclin verify" [
...blocks: string@"list-blocks", # the list of blocks to verify
--log-level: string@"nu-complete log-levels" = $DEFAULT_LOG_LEVEL # change the log level
]: nothing -> table<block: string, status: int> {
run-saclin --log-level $log_level --verify ...$blocks
}
# reconstruct the original data from a subset of blocks
#
# `saclin reconstruct` might throw an error in some cases
# - when there are too few blocks
# - when the blocks are linearly dependant, and thus the decoding cannot be applied
# - when the blocks belong to different data
#
# # Examples
# ```nushell
# # applying a valid reconstruction
# # > **Note**
# # > file: `assets/dragoon_32x32.png`
# # > parameters: k = 3 and n = 5
# # > method: vandermonde
# let bytes = saclin reconstruct ...[
# 44614daf1f5ebb86f1c69293b82c7795a5a35b4d12718b551648223441028e3,
# 7aa698f338605462205c5ff46b5463720d073de92a19f897cc4ae6c286ab87,
# 8be575889246fbc49f4c748ac2dc1cd8a4ef71d16e91c9343660a5f79f086,
# ]
# $bytes | bytes at 0..10
# ```
# ```
# Length: 10 (0xa) bytes | printable whitespace ascii_other non_ascii
# 00000000: 89 50 4e 47 0d 0a 1a 0a 00 00 ×PNG__•_00
# ```
# ---
# ```nushell
# # giving too few blocks
# # > **Note**
# # > file: `assets/dragoon_32x32.png`
# # > parameters: k = 3 and n = 5
# # > method: vandermonde
# saclin reconstruct ...[
# 44614daf1f5ebb86f1c69293b82c7795a5a35b4d12718b551648223441028e3,
# 7aa698f338605462205c5ff46b5463720d073de92a19f897cc4ae6c286ab87,
# ]
# ```
# ```
# Error: × could not decode: Expected at least 3, got 2 (1)
# ```
# ---
# ```nushell
# # after combining _44614d_ and _6de9fd_ (see [`saclin combine`]), try to decode with linear dependencies
# # > **Note**
# # > file: `assets/dragoon_32x32.png`
# # > parameters: k = 3 and n = 5
# # > method: vandermonde
# # > recoding: _44614d_ <+> _6de9fd_ => _86cdd1_
# saclin reconstruct ...[
# 44614daf1f5ebb86f1c69293b82c7795a5a35b4d12718b551648223441028e3,
# 6de9fd5fdfe8c08b3132e0d527b14a2a4e4be9a543af1f13d2c397bd113846e4,
# 86cdd1b7ed79618696ab82d848833cbe448719a513b850207936e4dce6294,
# ]
# ```
# ```
# Error: × could not decode: Matrix is not invertible at row 2 (1)
# ```
export def "saclin reconstruct" [
...blocks: string@"list-blocks", # the blocks that should be used to reconstruct the original data
--log-level: string@"nu-complete log-levels" = $DEFAULT_LOG_LEVEL # change the log level
]: nothing -> binary {
run-saclin --log-level $log_level --reconstruct ...$blocks | bytes from_int
}
# combine two blocks by computing a random linear combination
#
# # Examples
# # > **Note**
# # > file: `assets/dragoon_133x133.png`
# # > parameters: k = 7 and n = 23
# # > method: random
# ```nushell
# saclin combine ...[
# 1b112a11cd89dad619aadc18cb2c15c315453e177f1117c79d4ae4e219922,
# 31c9bfe2845cc430d666413d8b8b51aee0d010aa89275a8c7d9d9ca1c9e05c,
# ]
# ```
# ```
# b785bf5b93d7811792db7234d1b1ee7347398cee617243612d3225fa245545
# ```
# ---
# ```nushell
# # not giving exactly 2 blocks
# # > **Note**
# # > file: `assets/dragoon_133x133.png`
# # > parameters: k = 7 and n = 23
# # > method: random
# saclin combine ...[
# c22fe3c72cbc52fc55b46a3f9783f5c9a1e5fb59875f736332cf1b970b8,
# 1b112a11cd89dad619aadc18cb2c15c315453e177f1117c79d4ae4e219922,
# f3f423df47cd7538accd38abe9ad6670b894243647af98fbfa9776e9cf7ff8e,
# ]
# ```
# ```
# Error: × expected exactly 2 blocks, found 3 (1)
# ```
export def "saclin combine" [
...blocks: string@"list-blocks", # the blocks to combine, should contain two hashes
--log-level: string@"nu-complete log-levels" = $DEFAULT_LOG_LEVEL # change the log level
]: nothing -> string {
run-saclin --log-level $log_level --combine ...$blocks | get 0
}
# open one or more blocks and inspect their content
#
# # Examples
# ```nushell
# # inspect a single block
# # # > **Note**
# # # > file: `assets/dragoon_133x133.png`
# # # > parameters: k = 7 and n = 23
# # # > method: random
# # # >
# # # > `$.commits` and `$shard.bytes` have been truncated for readability
# saclin inspect 374f23fd1f25ae4050c414bc169550bdd10f49f775e2af71d2aee8a87dc
# | into record
# | update commits { parse --regex '\((?<f>\d{7})\d+, (?<s>\d{7})\d+\)' }
# | update shard.bytes { length }
# ```
# ```
# ───────┬──────────────────────────────────────────────────────────────────────
# │─────┬────────────────────────────────────────────────────────────────
# shard │k │7
# │ │─┬───────────────────────────────────────
# │comb │0│79293070128283035155183921571762200246
# │ │1│251822311562506197186167674369775071480
# │ │2│271375591445361086306725794586025747695
# │ │3│170387538935153872006296956270886059735
# │ │4│67758248758369211569040944574905941217
# │ │5│245908698054074962369032280439731463970
# │ │6│323120636634748190275497410128071523309
# │ │─┴───────────────────────────────────────
# │bytes│89
# │hash │d116d9e2bdb0e03fb2bdd6e716a929198f1012ae62a83e773eb2c21917f4b12c
# │size │19102
# │─────┴────────────────────────────────────────────────────────────────
# │#┬───f───┬───s───
# commits│0│3258872│1117914
# │1│1336159│2841207
# │2│3908964│4603563
# │3│3956175│1154567
# │4│2056568│3904956
# │5│2957425│2772456
# │6│2395336│2282274
# │─┴───────┴───────
# m │89
# ───────┴──────────────────────────────────────────────────────────────────────
# ```
export def "saclin inspect" [
...blocks: string@"list-blocks", # the blocks to inspect
--log-level: string@"nu-complete log-levels" = $DEFAULT_LOG_LEVEL # change the log level
]: nothing -> table<shard: record<k: int, comb: list<any>, bytes: list<string>, hash: string, size: int>, commits: list<string>, m: int> {
run-saclin --log-level $log_level --inspect ...$blocks
}
# list all the blocks that are currently in the store
export def "saclin ls" []: nothing -> list<string> {
list-blocks
}
# clean the SACLIN home from all blocks and trusted setup
export def "saclin clean" []: nothing -> nothing {
rm --force --recursive (home-dir)
}
def pretty-code []: string -> string {
$"`(ansi default_dimmed)($in)(ansi reset)`"
}
# the main entry point of SACLIN, will only print some help
export def main []: nothing -> nothing {
let help = [
$"the location of the files generated by SACLIN can be configured via ("$env.SACLIN_HOME" | pretty-code) which will default to",
$"- ("$env.XDG_DATA_HOME/saclin" | pretty-code) if ("$env.XDG_DATA_HOME" | pretty-code) is set",
$"- ("~/.local/share/saclin/" | pretty-code) otherwise"
]
print ($help | str join "\n")
}
use std::path::{Path, PathBuf};
use std::process::exit;
use ark_bls12_381::{Fr, G1Projective};
use ark_ec::CurveGroup;
use ark_ff::PrimeField;
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
use ark_serialize::{CanonicalDeserialize, Compress, Validate};
use ark_std::ops::Div;
use anyhow::Result;
use ark_std::rand::RngCore;
use tracing::{info, warn};
use komodo::{
algebra::linalg::Matrix,
error::KomodoError,
fec::{self, decode, Shard},
fs,
semi_avid::{build, prove, recode, verify, Block},
zk::{self, Powers},
};
const COMPRESS: Compress = Compress::Yes;
const VALIDATE: Validate = Validate::Yes;
#[allow(clippy::type_complexity)]
fn parse_args() -> (
Vec<u8>,
usize,
usize,
bool,
String,
bool,
bool,
bool,
bool,
usize,
String,
Vec<String>,
) {
let bytes_path = std::env::args()
.nth(1)
.expect("expected path to bytes as first positional argument");
let bytes = if bytes_path.is_empty() {
vec![]
} else {
std::fs::read(bytes_path).unwrap()
};
let k: usize = std::env::args()
.nth(2)
.expect("expected k as second positional argument")
.parse()
.expect("could not parse k as an int");
let n: usize = std::env::args()
.nth(3)
.expect("expected n as third positional argument")
.parse()
.expect("could not parse n as an int");
let do_generate_powers: bool = std::env::args()
.nth(4)
.expect("expected do_generate_powers as fourth positional argument")
.parse()
.expect("could not parse do_generate_powers as a bool");
let home_dir = std::env::args()
.nth(5)
.expect("expected home_dir as fifth positional argument");
let do_reconstruct_data: bool = std::env::args()
.nth(6)
.expect("expected do_reconstruct_data as sixth positional argument")
.parse()
.expect("could not parse do_reconstruct_data as a bool");
let do_verify_blocks: bool = std::env::args()
.nth(7)
.expect("expected do_verify_blocks as seventh positional argument")
.parse()
.expect("could not parse do_verify_blocks as a bool");
let do_combine_blocks: bool = std::env::args()
.nth(8)
.expect("expected do_combine_blocks as eigth positional argument")
.parse()
.expect("could not parse do_combine_blocks as a bool");
let do_inspect_blocks: bool = std::env::args()
.nth(9)
.expect("expected do_inspect_blocks as ninth positional argument")
.parse()
.expect("could not parse do_inspect_blocks as a bool");
let nb_bytes: usize = std::env::args()
.nth(10)
.expect("expected nb_bytes as 10th positional argument")
.parse()
.expect("could not parse nb_bytes as a usize");
let encoding_method = std::env::args()
.nth(11)
.expect("expected encoding_method as 11th positional argument");
let block_hashes = std::env::args().skip(12).collect::<Vec<_>>();
(
bytes,
k,
n,
do_generate_powers,
home_dir,
do_reconstruct_data,
do_verify_blocks,
do_combine_blocks,
do_inspect_blocks,
nb_bytes,
encoding_method,
block_hashes,
)
}
fn throw_error(code: i32, message: &str) {
eprint!("{}", message);
exit(code);
}
fn generate_random_powers<F, G, P>(
n: usize,
powers_dir: &Path,
powers_filename: Option<&str>,
rng: &mut impl RngCore,
) -> Result<()>
where
F: PrimeField,
G: CurveGroup<ScalarField = F>,
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
info!("generating new powers");
let powers = zk::setup::<F, G>(zk::nb_elements_in_setup::<F>(n), rng)?;
fs::dump(&powers, powers_dir, powers_filename, COMPRESS)?;
Ok(())
}
fn verify_blocks<F, G, P>(
blocks: &[(String, Block<F, G>)],
powers: Powers<F, G>,
) -> Result<(), KomodoError>
where
F: PrimeField,
G: CurveGroup<ScalarField = F>,
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
let res = blocks
.iter()
.map(|(f, b)| Ok((f, verify::<F, G, P>(b, &powers)?)))
.collect::<Result<Vec<(&String, bool)>, KomodoError>>()?;
eprint!("[");
for (f, v) in res {
eprint!("{{block: {:?}, status: {}}}", f, v);
}
eprint!("]");
Ok(())
}
fn main() {
tracing_subscriber::fmt::try_init().expect("cannot init logger");
let mut rng = rand::thread_rng();
let (
bytes,
k,
n,
do_generate_powers,
home_dir,
do_reconstruct_data,
do_verify_blocks,
do_combine_blocks,
do_inspect_blocks,
nb_bytes,
encoding_method,
block_hashes,
) = parse_args();
let home_dir = PathBuf::from(&home_dir);
let block_dir = home_dir.join("blocks/");
let powers_dir = home_dir;
let powers_filename = "powers";
let powers_file = powers_dir.join(powers_filename);
if do_generate_powers {
generate_random_powers::<Fr, G1Projective, DensePolynomial<Fr>>(
nb_bytes,
&powers_dir,
Some(powers_filename),
&mut rng,
)
.unwrap_or_else(|e| throw_error(1, &format!("could not generate powers: {}", e)));
exit(0);
}
if do_reconstruct_data {
let blocks: Vec<Shard<Fr>> =
fs::read_blocks::<Fr, G1Projective>(&block_hashes, &block_dir, COMPRESS, VALIDATE)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
})
.iter()
.cloned()
.map(|b| b.1.shard)
.collect();
eprintln!(
"{:?}",
decode::<Fr>(blocks).unwrap_or_else(|e| {
throw_error(1, &format!("could not decode: {}", e));
unreachable!()
})
);
exit(0);
}
if do_combine_blocks {
let blocks =
fs::read_blocks::<Fr, G1Projective>(&block_hashes, &block_dir, COMPRESS, VALIDATE)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
});
let formatted_output = fs::dump_blocks(
&[recode(
&blocks.iter().map(|(_, b)| b).cloned().collect::<Vec<_>>(),
&mut rng,
)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not encode block: {}", e));
unreachable!()
})
.unwrap_or_else(|| {
throw_error(1, "could not recode block (list of blocks is likely empty)");
unreachable!()
})],
&block_dir,
COMPRESS,
)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not dump block: {}", e));
unreachable!()
});
eprint!("{}", formatted_output);
exit(0);
}
if do_inspect_blocks {
let blocks =
fs::read_blocks::<Fr, G1Projective>(&block_hashes, &block_dir, COMPRESS, VALIDATE)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
});
eprint!("[");
for (_, block) in &blocks {
eprint!("{},", block);
}
eprintln!("]");
exit(0);
}
info!("reading powers from file `{:?}`", powers_file);
let powers = if let Ok(serialized) = std::fs::read(&powers_file) {
info!("deserializing the powers from `{:?}`", powers_file);
Powers::<Fr, G1Projective>::deserialize_with_mode(&serialized[..], COMPRESS, VALIDATE)
.unwrap_or_else(|e| {
throw_error(
1,
&format!("could not deserialize powers from {:?}: {}", powers_file, e),
);
unreachable!()
})
} else {
warn!("could not read powers from `{:?}`", powers_file);
info!("regenerating temporary powers");
zk::setup::<Fr, G1Projective>(zk::nb_elements_in_setup::<Fr>(nb_bytes), &mut rng)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not generate powers: {}", e));
unreachable!()
})
};
if do_verify_blocks {
verify_blocks::<Fr, G1Projective, DensePolynomial<Fr>>(
&fs::read_blocks::<Fr, G1Projective>(&block_hashes, &block_dir, COMPRESS, VALIDATE)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
}),
powers,
)
.unwrap_or_else(|e| {
throw_error(1, &format!("Failed to verify blocks: {}", e));
unreachable!()
});
exit(0);
}
let encoding_mat = match encoding_method.as_str() {
"vandermonde" => {
let points: Vec<Fr> = (0..n)
.map(|i| Fr::from_le_bytes_mod_order(&i.to_le_bytes()))
.collect();
Matrix::vandermonde_unchecked(&points, k)
}
"random" => Matrix::random(k, n, &mut rng),
m => {
throw_error(1, &format!("invalid encoding method: {}", m));
unreachable!()
}
};
let shards = fec::encode::<Fr>(&bytes, &encoding_mat).unwrap_or_else(|e| {
throw_error(1, &format!("could not encode: {}", e));
unreachable!()
});
let proof =
prove::<Fr, G1Projective, DensePolynomial<Fr>>(&bytes, &powers, k).unwrap_or_else(|e| {
throw_error(1, &format!("could not prove: {}", e));
unreachable!()
});
let blocks = build::<Fr, G1Projective, DensePolynomial<Fr>>(&shards, &proof);
let formatted_output = fs::dump_blocks(&blocks, &block_dir, COMPRESS).unwrap_or_else(|e| {
throw_error(1, &format!("could not dump blocks: {}", e));
unreachable!()
});
eprint!("{}", formatted_output);
}
use ../binary.nu [ "bytes from_int", "bytes to_int" ]
use std assert
def random-bytes [n: int]: nothing -> list<int> {
0..$n | each { random int 0..255 }
}
def main [] {
const hello_world_int = [
104, 101, 108, 108, 111, 32, 119, 111, 114, 108, 100, 33
]
const expected = 0x[68 65 6C 6C 6F 20 77 6F 72 6C 64 21]
assert equal ($hello_world_int | bytes from_int) $expected
let bytes = random-bytes 1_000
assert equal ($bytes | bytes from_int | bytes to_int) $bytes
}
use .. [
"saclin build",
"saclin setup",
"saclin prove",
"saclin verify",
"saclin reconstruct",
"saclin ls",
"saclin clean",
]
use ../binary.nu [ "bytes from_int" ]
use std assert
# a simple module to extend the `math` command
module math {
# `choose k n` is the list of all the possible $k$ indices chosen among $n$ indices
#
# see [_test_choose] below for some examples
export def choose [k: int, n: int]: nothing -> list<list<int>> {
if $k == 0 {
return []
} else if $k == 1 {
return (seq 0 ($n - 1) | each {[ $in ]})
}
choose ($k - 1) $n
| each { |x|
let l = $x | last
if $l != ($n - 1) {
seq ($l + 1) ($n - 1) | each {|it| $x | append $it}
}
}
| flatten
}
def _test_choose [] {
use std assert
assert equal (choose 0 5) []
assert equal (choose 1 5) [[0], [1], [2], [3], [4]]
assert equal (choose 2 5) [
[0, 1], [0, 2], [0, 3], [0, 4], [1, 2], [1, 3], [1, 4], [2, 3], [2, 4], [3, 4]
]
assert equal (choose 3 5) [
[0, 1, 2],
[0, 1, 3],
[0, 1, 4],
[0, 2, 3],
[0, 2, 4],
[0, 3, 4],
[1, 2, 3],
[1, 2, 4],
[1, 3, 4],
[2, 3, 4],
]
assert equal (choose 4 5) [
[0, 1, 2, 3],
[0, 1, 2, 4],
[0, 1, 3, 4],
[0, 2, 3, 4],
[1, 2, 3, 4],
]
assert equal (choose 5 5) [[0, 1, 2, 3, 4]]
}
# `perm n` is the list of all the possible permutations on $n$ elements
#
# see [_test_perm] below for some examples
export def perm [n: int]: nothing -> list<list<int>> {
if $n == 0 {
return []
} else if $n == 1 {
return [[0]]
}
perm ($n - 1)
| each {|x|
seq 0 ($x | length) | each {|i| $x | insert $i ($n - 1)}
}
| flatten
}
def _test_perm [] {
use std assert
assert equal (perm 0 | sort) []
assert equal (perm 1 | sort) [[0]]
assert equal (perm 2 | sort) [[0, 1], [1, 0]]
assert equal (perm 3 | sort) [
[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]
]
assert equal (perm 4 | sort) [
[0, 1, 2, 3],
[0, 1, 3, 2],
[0, 2, 1, 3],
[0, 2, 3, 1],
[0, 3, 1, 2],
[0, 3, 2, 1],
[1, 0, 2, 3],
[1, 0, 3, 2],
[1, 2, 0, 3],
[1, 2, 3, 0],
[1, 3, 0, 2],
[1, 3, 2, 0],
[2, 0, 1, 3],
[2, 0, 3, 1],
[2, 1, 0, 3],
[2, 1, 3, 0],
[2, 3, 0, 1],
[2, 3, 1, 0],
[3, 0, 1, 2],
[3, 0, 2, 1],
[3, 1, 0, 2],
[3, 1, 2, 0],
[3, 2, 0, 1],
[3, 2, 1, 0],
]
}
}
use math
const FILE = "assets/dragoon_32x32.png"
const FEC_PARAMS = {k: 3, n: 5}
def test [blocks: list<int>, --fail] {
let actual = try {
saclin reconstruct ...(saclin ls)
} catch {
if not $fail {
error make --unspanned { msg: "woopsie" }
} else {
return
}
}
let expected = open $FILE | bytes from_int
assert equal $actual $expected
}
def main [] {
saclin build
saclin clean
saclin setup (open $FILE | into binary | bytes length)
saclin prove $FILE --fec-params $FEC_PARAMS
saclin verify ...(saclin ls)
let all_k_choose_n_permutations = seq 1 $FEC_PARAMS.n
| each {|ki|
let p = math perm $ki
math choose $ki $FEC_PARAMS.n
| each {|it|
{
blocks: ($p | each { each {|i| $it | get $i} }),
fail: ($ki < $FEC_PARAMS.k),
}
}
| flatten
}
| flatten
let total = $all_k_choose_n_permutations | length
$all_k_choose_n_permutations | enumerate | each {|it|
print $"[($it.index / $total * 100 | into int)%]: ($it.item.blocks | str join ', ') | ($it.item.fail)"
test $it.item.blocks --fail=$it.item.fail
}
print "reconstruction was successful"
}
use ark_poly::univariate::DensePolynomial;
use clap::{arg, Parser, ValueEnum};
use dragoonfri::algorithms::Sha3_512;
use dragoonfri_test_utils::Fq;
use komodo::algebra::linalg;
use komodo::{fec, fri};
use rand::{Rng, RngCore, SeedableRng};
use ark_ff::PrimeField;
use komodo::fec::Shard;
use std::time::Instant;
#[path = "utils/time.rs"]
mod time;
fn random_loss<T>(shards: &mut Vec<T>, k: usize, rng: &mut impl Rng) {
// Randomly drop some shards until k are left
while shards.len() > k {
let i = rng.gen_range(0..shards.len());
shards.remove(i);
}
}
#[derive(ValueEnum, Debug, Clone)]
enum Coding {
Matrix,
Fft,
}
#[derive(ValueEnum, Debug, Clone)]
enum FiniteField {
FP128,
}
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
#[arg(short, long)]
data_size: usize,
#[arg(long, default_value = "1234")]
seed: u64,
#[arg(short)]
k: usize,
#[arg(short)]
n: usize,
#[arg(long, default_value = "fp128")]
finite_field: FiniteField,
#[arg(long, default_value = "matrix")]
coding: Coding,
}
fn encode_fft<F: PrimeField>(bytes: &[u8], k: usize, n: usize) -> Vec<Shard<F>> {
let evaluations = fri::evaluate::<F>(bytes, k, n);
fri::encode::<F>(bytes, evaluations, k)
}
fn run<F: PrimeField>(bytes: &[u8], k: usize, n: usize, seed: u64, coding: Coding) {
let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
match coding {
Coding::Matrix => {
let matrix = linalg::Matrix::random(k, n, &mut rng);
let mut shards = timeit_and_print!("encoding", fec::encode, bytes, &matrix).unwrap();
random_loss(&mut shards, k, &mut rng);
let recovered = timeit_and_print!("decoding", fec::decode::<F>, shards).unwrap();
assert_eq!(bytes, recovered);
}
Coding::Fft => {
assert_eq!(n.count_ones(), 1, "n must be a power of 2");
assert_eq!(k.count_ones(), 1, "k must be a power of 2");
let shards = timeit_and_print!("encoding", encode_fft::<F>, bytes, k, n);
let evaluations = fri::evaluate::<F>(bytes, k, n);
let mut blocks =
fri::prove::<2, F, Sha3_512, DensePolynomial<F>>(evaluations, shards, 2, 2, 1)
.unwrap();
random_loss(&mut blocks, k, &mut rng);
let recovered = timeit_and_print!("decoding", fri::decode::<F, Sha3_512>, blocks, n);
assert_eq!(
bytes, recovered,
"decoded data does not match original data"
);
}
}
}
fn main() {
let args = Args::parse();
let mut rng = rand::rngs::StdRng::seed_from_u64(args.seed);
let mut bytes = vec![0u8; args.data_size];
rng.fill_bytes(&mut bytes);
let (k, n) = (args.k, args.n);
match args.finite_field {
FiniteField::FP128 => run::<Fq>(&bytes, k, n, args.seed, args.coding),
}
}
use ark_ff::PrimeField;
use ark_poly::univariate::DensePolynomial;
use ark_poly::DenseUVPolynomial;
use ark_serialize::CanonicalSerialize;
use ark_std::ops::Div;
use clap::{Parser, ValueEnum};
use rs_merkle::Hasher;
use std::time::Instant;
use ark_bls12_381::Fr as F_BLS12_381;
use dragoonfri_test_utils::Fq as F_128;
use dragoonfri::{
algorithms::{Blake3, Sha3_256, Sha3_512},
dynamic_folding_factor,
};
use komodo::error::KomodoError;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
/// measure the time it takes to apply a function on a set of arguments and returns the result of
/// the call
///
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let (res, time) = timeit!(add, 1, 2);
/// ```
/// will be the same as
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let (res, time) = {
/// let start = Instant::now();
/// let res = add(1, 2);
/// let time = start.elapsed();
/// (res, time)
/// };
/// ```
macro_rules! timeit {
($func:expr, $( $args:expr ),*) => {{
let start = Instant::now();
let res = $func( $( $args ),* );
let time = start.elapsed();
(res, time)
}};
}
/// same as [`timeit`] but prints a name and the time at the end directly
///
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let res = timeit_and_print!("addition", add, 1, 2);
/// ```
/// will be the same as
/// ```rust
/// fn add(a: i32, b: i32) { a + b }
/// let res = {
/// print!("addition: ");
/// let start = Instant::now();
/// let res = add(1, 2);
/// let time = start.elapsed();
/// println!("{}", time.as_nanos());
/// res
/// };
/// ```
macro_rules! timeit_and_print {
($name: expr, $func:expr, $( $args:expr ),*) => {{
print!("{}: ", $name);
let (res, time) = timeit!($func, $($args),*);
println!("{}", time.as_nanos());
res
}};
}
fn run<const N: usize, F: PrimeField, H: Hasher, P>(
bytes: &[u8],
k: usize,
n: usize,
bf: usize,
rpo: usize,
q: usize,
) -> Result<(), KomodoError>
where
P: DenseUVPolynomial<F>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
<H as rs_merkle::Hasher>::Hash: AsRef<[u8]> + CanonicalSerialize,
{
let evaluations = timeit_and_print!("evaluating", komodo::fri::evaluate::<F>, &bytes, k, n);
let evals = evaluations.clone();
let shards = timeit_and_print!("encoding", komodo::fri::encode::<F>, &bytes, evals, k);
let blocks = timeit_and_print!(
"proving",
komodo::fri::prove::<N, F, H, P>,
evaluations,
shards,
bf,
rpo,
q
);
let blocks = blocks.unwrap();
let proofs: usize = blocks.iter().map(|b| b.proof.compressed_size()).sum();
let commits: usize = blocks.iter().map(|b| b.commit.compressed_size()).sum();
println!("proofs: {}", proofs);
println!("commits: {}", commits);
print!("verifying: ");
let time: std::time::Duration = blocks
.iter()
.cloned()
.map(|b| {
let (res, time) = timeit!(komodo::fri::verify::<N, F, H, P>, b, n, q);
res.unwrap();
time
})
.sum();
println!("{}", time.as_nanos());
let decoded = timeit_and_print!(
"decoding",
komodo::fri::decode::<F, H>,
blocks[0..k].to_vec(),
n
);
assert_eq!(hex::encode(bytes), hex::encode(decoded));
Ok(())
}
#[derive(ValueEnum, Debug, Clone)]
enum Hash {
BLAKE3,
SHA3_256,
SHA3_512,
}
#[derive(ValueEnum, Debug, Clone)]
enum FiniteField {
FP128,
BLS12_381,
}
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
#[arg(short, long)]
data_size: usize,
#[arg(long, default_value = "1234")]
seed: u64,
#[arg(short)]
k: usize,
#[arg(short, long)]
blowup_factor: usize,
#[arg(short, long)]
remainder_degree_plus_one: usize,
#[arg(short, long)]
folding_factor: usize,
#[arg(short, long)]
nb_queries: usize,
#[arg(long)]
hash: Hash,
#[arg(long)]
finite_field: FiniteField,
}
macro_rules! foo {
($n:ident, $f:ident, $h:ident) => {
dynamic_folding_factor!(
let N = $n => run::<N, $f, $h, DensePolynomial<$f>>
)
}
}
fn generate_data(size: usize, seed: u64) -> Vec<u8> {
let mut rnd = StdRng::seed_from_u64(seed);
(0..size).map(|_| rnd.gen()).collect()
}
fn main() {
let args = Args::parse();
let bytes = generate_data(args.data_size, args.seed);
println!("loaded {} bytes of data", bytes.len());
let ff = args.folding_factor;
let f = match args.finite_field {
FiniteField::FP128 => match args.hash {
Hash::BLAKE3 => foo!(ff, F_128, Blake3),
Hash::SHA3_256 => foo!(ff, F_128, Sha3_256),
Hash::SHA3_512 => foo!(ff, F_128, Sha3_512),
},
FiniteField::BLS12_381 => match args.hash {
Hash::BLAKE3 => foo!(ff, F_BLS12_381, Blake3),
Hash::SHA3_256 => foo!(ff, F_BLS12_381, Sha3_256),
Hash::SHA3_512 => foo!(ff, F_BLS12_381, Sha3_512),
},
};
f(
&bytes,
args.k,
args.k * args.blowup_factor,
args.blowup_factor,
args.remainder_degree_plus_one,
args.nb_queries,
)
.unwrap()
}