Skip to content
Snippets Groups Projects
Commit b567b1bd authored by DISSOUBRAY Nathan's avatar DISSOUBRAY Nathan Committed by STEVAN Antoine
Browse files

Move functions defined in the main to the lib (dragoon/komodo!47)

## Description

Those functions in the main could be useful later as they are operations that an external user might actually want to do directly (instead of just rewriting the functions in the main).

## List of changes
- Create a dump function (to be be used by both dump_blocks and generate_powers as they write to disk)
- Created a fs mod for actions related to writing on disk
- moved functions into their relevant module (though some might be up to discussion, as noted in some commits)
- Use anyhow to be able to return Result from functions with multiple error types (since before they just threw errors in the main)

## Additional notes

Should I include the example that I was working on (ie writing blocks of files recursively), which was the thing that prompted me to actually move the functions from the main to the lib (as I noticed I was rewriting what was in the main to be able to do that) ?
parent 5d1cb661
No related branches found
No related tags found
No related merge requests found
...@@ -6,6 +6,7 @@ edition = "2021" ...@@ -6,6 +6,7 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
anyhow = "1.0.81"
ark-bls12-381 = "0.4.0" ark-bls12-381 = "0.4.0"
ark-ec = "0.4.2" ark-ec = "0.4.2"
ark-ff = "0.4.2" ark-ff = "0.4.2"
......
use std::{
fs::File,
io::prelude::*,
path::{Path, PathBuf},
};
use anyhow::Result;
use ark_ec::pairing::Pairing;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use rs_merkle::{algorithms::Sha256, Hasher};
use tracing::info;
use crate::Block;
const COMPRESS: Compress = Compress::Yes;
const VALIDATE: Validate = Validate::Yes;
pub fn dump(
dumpable: &impl CanonicalSerialize,
dump_dir: &Path,
filename: Option<&str>,
) -> Result<PathBuf> {
info!("serializing the dumpable");
let mut serialized = vec![0; dumpable.serialized_size(COMPRESS)];
dumpable.serialize_with_mode(&mut serialized[..], COMPRESS)?;
let filename = match filename {
Some(filename) => filename.to_string(),
None => Sha256::hash(&serialized)
.iter()
.map(|x| format!("{:x}", x))
.collect::<Vec<_>>()
.join(""),
};
let dump_path = dump_dir.join(filename);
info!("dumping dumpable into `{:?}`", dump_path);
let mut file = File::create(&dump_path)?;
file.write_all(&serialized)?;
Ok(dump_path)
}
pub fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Result<String> {
info!("dumping blocks to `{:?}`", block_dir);
let mut hashes = vec![];
std::fs::create_dir_all(block_dir)?;
for block in blocks.iter() {
let filename = dump(block, block_dir, None)?;
hashes.push(filename);
}
let mut formatted_output = String::from("[");
for hash in &hashes {
formatted_output = format!("{}{:?},", formatted_output, hash);
}
formatted_output = format!("{}{}", formatted_output, "]");
Ok(formatted_output)
}
pub fn read_blocks<E: Pairing>(
block_hashes: &[String],
block_dir: &Path,
) -> Result<Vec<(String, Block<E>)>> {
block_hashes
.iter()
.map(|f| {
let filename = block_dir.join(f);
let s = std::fs::read(filename)?;
Ok((
f.clone(),
Block::<E>::deserialize_with_mode(&s[..], COMPRESS, VALIDATE)?,
))
})
.collect()
}
...@@ -4,14 +4,14 @@ use ark_ec::pairing::Pairing; ...@@ -4,14 +4,14 @@ use ark_ec::pairing::Pairing;
use ark_poly::DenseUVPolynomial; use ark_poly::DenseUVPolynomial;
use ark_poly_commit::kzg10::{Commitment, Powers, Randomness, KZG10}; use ark_poly_commit::kzg10::{Commitment, Powers, Randomness, KZG10};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::UniformRand; use ark_std::{UniformRand, Zero};
use ark_std::Zero;
use fec::combine; use fec::combine;
use tracing::{debug, info}; use tracing::{debug, info};
mod error; mod error;
pub mod fec; pub mod fec;
pub mod field; pub mod field;
pub mod fs;
pub mod linalg; pub mod linalg;
pub mod setup; pub mod setup;
......
use std::io::prelude::*;
use std::ops::Div; use std::ops::Div;
use std::path::Path; use std::path::{Path, PathBuf};
use std::process::exit; use std::process::exit;
use std::{fs::File, path::PathBuf};
use anyhow::Result;
use ark_bls12_381::Bls12_381; use ark_bls12_381::Bls12_381;
use ark_ec::pairing::Pairing; use ark_ec::pairing::Pairing;
...@@ -10,14 +10,13 @@ use ark_ff::PrimeField; ...@@ -10,14 +10,13 @@ use ark_ff::PrimeField;
use ark_poly::univariate::DensePolynomial; use ark_poly::univariate::DensePolynomial;
use ark_poly::DenseUVPolynomial; use ark_poly::DenseUVPolynomial;
use ark_poly_commit::kzg10::Powers; use ark_poly_commit::kzg10::Powers;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}; use ark_serialize::{CanonicalDeserialize, Compress, Validate};
use rs_merkle::algorithms::Sha256; use tracing::{info, warn};
use rs_merkle::Hasher;
use tracing::{debug, info, warn};
use komodo::{ use komodo::{
encode, encode,
fec::{decode, Shard}, fec::{decode, Shard},
fs,
linalg::Matrix, linalg::Matrix,
recode, setup, verify, Block, recode, setup, verify, Block,
}; };
...@@ -119,108 +118,43 @@ fn throw_error(code: i32, message: &str) { ...@@ -119,108 +118,43 @@ fn throw_error(code: i32, message: &str) {
exit(code); exit(code);
} }
fn generate_powers(n: usize, powers_file: &PathBuf) -> Result<(), std::io::Error> { pub fn generate_random_powers<E, P>(
n: usize,
powers_dir: &Path,
powers_filename: Option<&str>,
) -> Result<()>
where
E: Pairing,
P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
info!("generating new powers"); info!("generating new powers");
let powers = setup::random::<Bls12_381, UniPoly12_381>(n).unwrap_or_else(|_| { let powers = setup::random::<E, P>(n)?;
throw_error(3, "could not generate random trusted setup");
unreachable!()
});
info!("serializing powers");
let mut serialized = vec![0; powers.serialized_size(COMPRESS)];
powers
.serialize_with_mode(&mut serialized[..], COMPRESS)
.unwrap_or_else(|_| throw_error(3, "could not serialize trusted setup"));
info!("dumping powers into `{:?}`", powers_file); fs::dump(&powers, powers_dir, powers_filename)?;
let mut file = File::create(powers_file)?;
file.write_all(&serialized)?;
Ok(()) Ok(())
} }
fn read_block<E: Pairing>(block_hashes: &[String], block_dir: &Path) -> Vec<(String, Block<E>)> { pub fn verify_blocks<E, P>(
block_hashes blocks: &[(String, Block<E>)],
.iter() powers: Powers<E>,
.map(|f| { ) -> Result<(), ark_poly_commit::Error>
let filename = block_dir.join(f);
let s = std::fs::read(filename).unwrap_or_else(|_| {
throw_error(2, &format!("could not read block {}", f));
unreachable!()
});
(
f.clone(),
Block::<E>::deserialize_with_mode(&s[..], COMPRESS, VALIDATE).unwrap_or_else(
|_| {
throw_error(2, &format!("could not deserialize block {}", f));
unreachable!()
},
),
)
})
.collect::<Vec<_>>()
}
fn verify_blocks<E, P>(blocks: &[(String, Block<E>)], powers: Powers<E>)
where where
E: Pairing, E: Pairing,
P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>, P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>, for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{ {
let res: Vec<_> = blocks let res = blocks
.iter() .iter()
.map(|(f, b)| { .map(|(f, b)| Ok((f, verify::<E, P>(b, &powers)?)))
( .collect::<Result<Vec<(&String, bool)>, ark_poly_commit::Error>>()?;
f,
verify::<E, P>(b, &powers).unwrap_or_else(|_| {
throw_error(
4,
&format!("verification failed unexpectedly for block {}", f),
);
unreachable!()
}),
)
})
.collect();
eprint!("["); eprint!("[");
for (f, v) in res { for (f, v) in res {
eprint!("{{block: {:?}, status: {}}}", f, v); eprint!("{{block: {:?}, status: {}}}", f, v);
} }
eprint!("]"); eprint!("]");
}
fn dump_blocks<E: Pairing>(blocks: &[Block<E>], block_dir: &PathBuf) -> Result<(), std::io::Error> {
info!("dumping blocks to `{:?}`", block_dir);
let mut hashes = vec![];
for (i, block) in blocks.iter().enumerate() {
debug!("serializing block {}", i);
let mut serialized = vec![0; block.serialized_size(COMPRESS)];
block
.serialize_with_mode(&mut serialized[..], COMPRESS)
.unwrap_or_else(|_| throw_error(5, &format!("could not serialize block {}", i)));
let repr = Sha256::hash(&serialized)
.iter()
.map(|x| format!("{:x}", x))
.collect::<Vec<_>>()
.join("");
let filename = block_dir.join(&repr);
std::fs::create_dir_all(block_dir)?;
debug!("dumping serialized block to `{:?}`", filename);
let mut file = File::create(&filename)?;
file.write_all(&serialized)?;
hashes.push(repr);
}
eprint!("[");
for hash in &hashes {
eprint!("{:?},", hash);
}
eprint!("]");
Ok(()) Ok(())
} }
...@@ -244,17 +178,27 @@ fn main() { ...@@ -244,17 +178,27 @@ fn main() {
let home_dir = PathBuf::from(&home_dir); let home_dir = PathBuf::from(&home_dir);
let block_dir = home_dir.join("blocks/"); let block_dir = home_dir.join("blocks/");
let powers_file = home_dir.join("powers"); let powers_dir = home_dir;
let powers_filename = "powers";
let powers_file = powers_dir.join(powers_filename);
if do_generate_powers { if do_generate_powers {
generate_powers(nb_bytes, &powers_file) generate_random_powers::<Bls12_381, UniPoly12_381>(
.unwrap_or_else(|e| throw_error(1, &format!("could not generate powers: {}", e))); nb_bytes,
&powers_dir,
Some(powers_filename),
)
.unwrap_or_else(|e| throw_error(1, &format!("could not generate powers: {}", e)));
exit(0); exit(0);
} }
if do_reconstruct_data { if do_reconstruct_data {
let blocks: Vec<Shard<Bls12_381>> = read_block::<Bls12_381>(&block_hashes, &block_dir) let blocks: Vec<Shard<Bls12_381>> = fs::read_blocks::<Bls12_381>(&block_hashes, &block_dir)
.unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
})
.iter() .iter()
.cloned() .cloned()
.map(|b| b.1.shard) .map(|b| b.1.shard)
...@@ -271,9 +215,12 @@ fn main() { ...@@ -271,9 +215,12 @@ fn main() {
} }
if do_combine_blocks { if do_combine_blocks {
let blocks = read_block::<Bls12_381>(&block_hashes, &block_dir); let blocks = fs::read_blocks::<Bls12_381>(&block_hashes, &block_dir).unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
});
dump_blocks( let formatted_output = fs::dump_blocks(
&[ &[
recode(&blocks.iter().map(|(_, b)| b).cloned().collect::<Vec<_>>()) recode(&blocks.iter().map(|(_, b)| b).cloned().collect::<Vec<_>>())
.unwrap_or_else(|e| { .unwrap_or_else(|e| {
...@@ -287,13 +234,21 @@ fn main() { ...@@ -287,13 +234,21 @@ fn main() {
], ],
&block_dir, &block_dir,
) )
.unwrap_or_else(|e| throw_error(1, &format!("could not dump block: {}", e))); .unwrap_or_else(|e| {
throw_error(1, &format!("could not dump block: {}", e));
unreachable!()
});
eprint!("{}", formatted_output);
exit(0); exit(0);
} }
if do_inspect_blocks { if do_inspect_blocks {
let blocks = read_block::<Bls12_381>(&block_hashes, &block_dir); let blocks = fs::read_blocks::<Bls12_381>(&block_hashes, &block_dir).unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
});
eprint!("["); eprint!("[");
for (_, block) in &blocks { for (_, block) in &blocks {
eprint!("{},", block); eprint!("{},", block);
...@@ -325,9 +280,16 @@ fn main() { ...@@ -325,9 +280,16 @@ fn main() {
if do_verify_blocks { if do_verify_blocks {
verify_blocks::<Bls12_381, UniPoly12_381>( verify_blocks::<Bls12_381, UniPoly12_381>(
&read_block::<Bls12_381>(&block_hashes, &block_dir), &fs::read_blocks::<Bls12_381>(&block_hashes, &block_dir).unwrap_or_else(|e| {
throw_error(1, &format!("could not read blocks: {}", e));
unreachable!()
}),
powers, powers,
); )
.unwrap_or_else(|e| {
throw_error(1, &format!("Failed to verify blocks: {}", e));
unreachable!()
});
exit(0); exit(0);
} }
...@@ -348,12 +310,17 @@ fn main() { ...@@ -348,12 +310,17 @@ fn main() {
} }
}; };
dump_blocks( let formatted_output = fs::dump_blocks(
&encode::<Bls12_381, UniPoly12_381>(&bytes, &encoding_mat, &powers).unwrap_or_else(|e| { &encode::<Bls12_381, UniPoly12_381>(&bytes, &encoding_mat, &powers).unwrap_or_else(|e| {
throw_error(1, &format!("could not encode: {}", e)); throw_error(1, &format!("could not encode: {}", e));
unreachable!() unreachable!()
}), }),
&block_dir, &block_dir,
) )
.unwrap_or_else(|e| throw_error(1, &format!("could not dump blocks: {}", e))); .unwrap_or_else(|e| {
throw_error(1, &format!("could not dump blocks: {}", e));
unreachable!()
});
eprint!("{}", formatted_output);
} }
use std::ops::Div; use std::ops::Div;
use anyhow::Result;
use ark_ec::pairing::Pairing; use ark_ec::pairing::Pairing;
use ark_ff::PrimeField; use ark_ff::PrimeField;
use ark_poly::DenseUVPolynomial; use ark_poly::DenseUVPolynomial;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment