Skip to content
Snippets Groups Projects
Commit 4493022b authored by STEVAN Antoine's avatar STEVAN Antoine :crab:
Browse files

add support for decoding recoded shards (!13)

- should close #3
- based on top of !12

> **Note**  
> - commits containing "_DEBUG_" will be removed once this is done
> - this MR is based on !12 and will be rebased on top of `main` once !12 lands

i think this is best [reviewed commit by commit](!13 (58cec473))
parent ab7c61f2
No related branches found
No related tags found
1 merge request!13add support for decoding recoded shards
Pipeline #3945 passed
......@@ -13,6 +13,7 @@ ark-poly = "0.4.2"
ark-poly-commit = "0.4.0"
ark-serialize = "0.4.2"
ark-std = "0.4.0"
rand = "0.8.5"
rs_merkle = "1.4.1"
thiserror = "1.0.50"
tracing = "0.1.40"
......
......@@ -11,7 +11,7 @@ def "nu-complete log-levels" []: nothing -> list<string> {
}
def run-komodo [
args: record<bytes: path, k: int, n: int, do_generate_powers: bool, powers_file: path, do_reconstruct_data: bool, do_verify_blocks: bool, block_files: list<string>>,
args: record<bytes: path, k: int, n: int, do_generate_powers: bool, powers_file: path, do_reconstruct_data: bool, do_verify_blocks: bool, do_combine_blocks: bool, block_files: list<string>>,
--log-level: string,
]: nothing -> any {
with-env {RUST_LOG: $log_level} {
......@@ -24,6 +24,7 @@ def run-komodo [
$args.powers_file
($args.do_reconstruct_data | into string)
($args.do_verify_blocks | into string)
($args.do_combine_blocks | into string)
] | append $args.block_files)
} | complete
......@@ -49,6 +50,7 @@ export def "komodo setup" [
powers_file: $powers_file,
do_reconstruct_data: false,
do_verify_blocks: false,
do_combine_blocks: false,
block_files: [],
}
}
......@@ -67,6 +69,7 @@ export def "komodo prove" [
powers_file: $powers_file,
do_reconstruct_data: false,
do_verify_blocks: false,
do_combine_blocks: false,
block_files: [],
}
}
......@@ -84,6 +87,7 @@ export def "komodo verify" [
powers_file: $powers_file,
do_reconstruct_data: false,
do_verify_blocks: true,
do_combine_blocks: false,
block_files: $blocks,
}
}
......@@ -100,6 +104,24 @@ export def "komodo reconstruct" [
powers_file: "",
do_reconstruct_data: true,
do_verify_blocks: false,
do_combine_blocks: false,
block_files: $blocks,
}
}
export def "komodo combine" [
...blocks: path,
--log-level: string@"nu-complete log-levels" = "INFO"
]: nothing -> list<int> {
run-komodo --log-level $log_level {
bytes: "",
k: 0,
n: 0,
do_generate_powers: false,
powers_file: "",
do_reconstruct_data: false,
do_verify_blocks: false,
do_combine_blocks: true,
block_files: $blocks,
}
}
......@@ -73,25 +73,39 @@ impl<E: Pairing> Shard<E> {
pub fn decode<E: Pairing>(blocks: Vec<Shard<E>>, transpose: bool) -> Result<Vec<u8>, KomodoError> {
let k = blocks[0].k;
let np = blocks.len();
if blocks.len() < k as usize {
return Err(KomodoError::TooFewShards(blocks.len(), k as usize));
if np < k as usize {
return Err(KomodoError::TooFewShards(np, k as usize));
}
let points: Vec<_> = blocks
let n = 1 + blocks
.iter()
.take(k as usize)
.map(|b| {
// TODO: use the real linear combination
let first_non_zero = b
.linear_combination
.flat_map(|b| {
b.linear_combination
.iter()
.enumerate()
.filter(|(_, l)| !l.is_zero())
.collect::<Vec<_>>()[0];
E::ScalarField::from_le_bytes_mod_order(&(first_non_zero.0 as u64).to_le_bytes())
.map(|(i, _)| i)
.collect::<Vec<_>>()
})
.max()
.unwrap();
let points: Vec<E::ScalarField> = (0..n)
.map(|i| E::ScalarField::from_le_bytes_mod_order(&[i as u8]))
.collect();
let encoding_mat = Matrix::vandermonde(&points, k as usize);
let lin_comb_mat = Matrix::from_vec_vec(
blocks
.iter()
.map(|b| {
let mut comb = b.linear_combination.clone();
comb.resize(n, E::ScalarField::zero());
comb
})
.collect(),
)?;
let shards = Matrix::from_vec_vec(
blocks
......@@ -102,7 +116,11 @@ pub fn decode<E: Pairing>(blocks: Vec<Shard<E>>, transpose: bool) -> Result<Vec<
)?
.transpose();
let source_shards = shards.mul(&Matrix::vandermonde(&points, k as usize).invert()?)?;
let ra = encoding_mat
.mul(&lin_comb_mat.transpose())?
.truncate(None, Some(np - k as usize));
let source_shards = shards.mul(&ra.invert()?)?;
let source_shards = if transpose {
source_shards.transpose().elements
} else {
......@@ -134,7 +152,7 @@ mod tests {
}
#[allow(clippy::expect_fun_call)]
fn decoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) {
fn encode<E: Pairing>(data: &[u8], k: usize, n: usize) -> Vec<Shard<E>> {
let hash = Sha256::hash(data).to_vec();
let points: Vec<E::ScalarField> = (0..n)
......@@ -153,7 +171,7 @@ mod tests {
data.len()
));
let shards = source_shards
source_shards
.mul(&encoding)
.expect(&format!("could not encode shards ({} bytes)", data.len()))
.transpose()
......@@ -173,12 +191,14 @@ mod tests {
size: data.len(),
}
})
.collect();
.collect()
}
fn decoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) {
assert_eq!(
data,
decode::<E>(shards, false)
.expect(&format!("could not decode shards ({} bytes)", data.len()))
decode::<E>(encode(data, k, n), false)
.unwrap_or_else(|_| panic!("could not decode shards ({} bytes)", data.len()))
);
}
......@@ -194,6 +214,37 @@ mod tests {
}
}
fn decoding_with_recoding_template<E: Pairing>(data: &[u8], k: usize, n: usize) {
let mut shards = encode(data, k, n);
shards[1] = shards[2].combine(
E::ScalarField::from_le_bytes_mod_order(&[7]),
&shards[4],
E::ScalarField::from_le_bytes_mod_order(&[6]),
);
shards[2] = shards[1].combine(
E::ScalarField::from_le_bytes_mod_order(&[5]),
&shards[3],
E::ScalarField::from_le_bytes_mod_order(&[4]),
);
assert_eq!(
data,
decode::<E>(shards, false)
.unwrap_or_else(|_| panic!("could not decode shards ({} bytes)", data.len()))
);
}
#[test]
fn decoding_with_recoding() {
let bytes = bytes();
let (k, n) = (3, 5);
let modulus_byte_size = <Bls12_381 as Pairing>::ScalarField::MODULUS_BIT_SIZE as usize / 8;
// NOTE: starting at `modulus_byte_size * (k - 1) + 1` to include at least _k_ elements
for b in (modulus_byte_size * (k - 1) + 1)..bytes.len() {
decoding_with_recoding_template::<Bls12_381>(&bytes[..b], k, n);
}
}
fn create_fake_shard<E: Pairing>(
linear_combination: &[E::ScalarField],
bytes: &[u8],
......
......@@ -5,6 +5,7 @@ use ark_ff::{Field, PrimeField};
use ark_poly::DenseUVPolynomial;
use ark_poly_commit::kzg10::{Commitment, Powers, Randomness, KZG10};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::UniformRand;
use ark_std::{One, Zero};
use rs_merkle::algorithms::Sha256;
use rs_merkle::Hasher;
......@@ -19,8 +20,8 @@ pub mod setup;
#[derive(Debug, Default, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize)]
pub struct Block<E: Pairing> {
pub shard: fec::Shard<E>,
commit: Vec<Commitment<E>>,
m: usize,
pub commit: Vec<Commitment<E>>,
pub m: usize,
}
#[allow(clippy::type_complexity)]
......@@ -142,6 +143,19 @@ where
prove::<E, P>(commits, hash, bytes.len(), polynomials, &points)
}
pub fn recode<E: Pairing>(b1: &Block<E>, b2: &Block<E>) -> Block<E> {
let mut rng = rand::thread_rng();
let alpha = E::ScalarField::rand(&mut rng);
let beta = E::ScalarField::rand(&mut rng);
Block {
shard: b1.shard.combine(alpha, &b2.shard, beta),
commit: b1.commit.clone(),
m: b1.m,
}
}
pub fn verify<E, P>(
block: &Block<E>,
verifier_key: &Powers<E>,
......@@ -205,12 +219,11 @@ mod tests {
use ark_ff::{Field, PrimeField};
use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
use ark_poly_commit::kzg10::Commitment;
use ark_std::One;
use crate::{
batch_verify, encode,
fec::{decode, Shard},
setup, verify, Block,
recode, setup, verify, Block,
};
type UniPoly381 = DensePolynomial<<Bls12_381 as Pairing>::ScalarField>;
......@@ -372,43 +385,8 @@ mod tests {
let powers = setup::random(bytes.len())?;
let blocks = encode::<E, P>(bytes, k, n, &powers)?;
let block = Block {
shard: blocks[0].shard.mul(E::ScalarField::one()),
commit: blocks[0].commit.clone(),
m: blocks[0].m,
};
assert!(verify::<E, P>(&block, &powers)?);
let block = Block {
shard: blocks[3]
.shard
.mul(E::ScalarField::from_le_bytes_mod_order(&[2u8])),
commit: blocks[3].commit.clone(),
m: blocks[3].m,
};
assert!(verify::<E, P>(&block, &powers)?);
let block = Block {
shard: blocks[3].shard.combine(
E::ScalarField::from_le_bytes_mod_order(&[2u8]),
&blocks[2].shard,
E::ScalarField::from_le_bytes_mod_order(&[5u8]),
),
commit: blocks[3].commit.clone(),
m: blocks[3].m,
};
assert!(verify::<E, P>(&block, &powers)?);
let block = Block {
shard: blocks[3].shard.combine(
E::ScalarField::from_le_bytes_mod_order(&[3u8]),
&blocks[5].shard,
E::ScalarField::from_le_bytes_mod_order(&[4u8]),
),
commit: block.commit.clone(),
m: block.m,
};
assert!(verify::<E, P>(&block, &powers)?);
assert!(verify::<E, P>(&recode(&blocks[2], &blocks[3]), &powers)?);
assert!(verify::<E, P>(&recode(&blocks[3], &blocks[5]), &powers)?);
Ok(())
}
......@@ -469,4 +447,50 @@ mod tests {
end_to_end_template::<Bls12_381, UniPoly381>(&bytes[0..(bytes.len() - 10)], 4, 6)
.expect("end to end failed for bls12-381 with padding");
}
fn end_to_end_with_recoding_template<E, P>(bytes: &[u8]) -> Result<(), ark_poly_commit::Error>
where
E: Pairing,
P: DenseUVPolynomial<E::ScalarField, Point = E::ScalarField>,
for<'a, 'b> &'a P: Div<&'b P, Output = P>,
{
let powers = setup::random(bytes.len())?;
let blocks = encode::<E, P>(bytes, 3, 5, &powers)?;
let b_0_1 = recode(&blocks[0], &blocks[1]);
let shards = vec![
b_0_1.shard,
blocks[2].shard.clone(),
blocks[3].shard.clone(),
];
assert_eq!(bytes, decode::<E>(shards, true).unwrap());
let b_0_1 = recode(&blocks[0], &blocks[1]);
let shards = vec![
blocks[0].shard.clone(),
blocks[1].shard.clone(),
b_0_1.shard,
];
assert!(decode::<E>(shards, true).is_err());
let b_0_1 = recode(&blocks[0], &blocks[1]);
let b_2_3 = recode(&blocks[2], &blocks[3]);
let b_1_4 = recode(&blocks[1], &blocks[4]);
let shards = vec![b_0_1.shard, b_2_3.shard, b_1_4.shard];
assert_eq!(bytes, decode::<E>(shards, true).unwrap());
let fully_recoded_shards = (0..3)
.map(|_| recode(&recode(&blocks[0], &blocks[1]), &blocks[2]).shard)
.collect();
assert_eq!(bytes, decode::<E>(fully_recoded_shards, true).unwrap());
Ok(())
}
#[test]
fn end_to_end_with_recoding_2() {
let bytes = bytes::<Bls12_381>(4, 2);
end_to_end_with_recoding_template::<Bls12_381, UniPoly381>(&bytes)
.expect("end to end failed for bls12-381");
}
}
......@@ -183,6 +183,35 @@ impl<T: Field> Matrix<T> {
width,
}
}
pub(super) fn truncate(&self, rows: Option<usize>, cols: Option<usize>) -> Self {
let width = if let Some(w) = cols {
self.width - w
} else {
self.width
};
let height = if let Some(h) = rows {
self.height - h
} else {
self.height
};
let mut elements = Vec::new();
elements.resize(height * width, T::zero());
for i in 0..height {
for j in 0..width {
elements[i * width + j] = self.get(i, j);
}
}
Self {
elements,
height,
width,
}
}
}
#[cfg(test)]
......@@ -388,4 +417,26 @@ mod tests {
assert_eq!(matrix.transpose(), transpose);
}
#[test]
fn truncate() {
let matrix = Matrix::from_vec_vec(vec![
vec![Fr::from(1), Fr::from(2), Fr::from(3), Fr::from(10)],
vec![Fr::from(4), Fr::from(5), Fr::from(6), Fr::from(11)],
vec![Fr::from(7), Fr::from(8), Fr::from(9), Fr::from(12)],
])
.unwrap();
assert_eq!(matrix.truncate(None, None), matrix);
assert_eq!(matrix.truncate(Some(0), None), matrix);
assert_eq!(matrix.truncate(None, Some(0)), matrix);
assert_eq!(matrix.truncate(Some(0), Some(0)), matrix);
let truncated = Matrix::from_vec_vec(vec![
vec![Fr::from(1), Fr::from(2)],
vec![Fr::from(4), Fr::from(5)],
])
.unwrap();
assert_eq!(matrix.truncate(Some(1), Some(2)), truncated);
}
}
......@@ -9,6 +9,9 @@ use ark_poly::univariate::DensePolynomial;
use ark_poly::DenseUVPolynomial;
use ark_poly_commit::kzg10::Powers;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use komodo::recode;
use rs_merkle::algorithms::Sha256;
use rs_merkle::Hasher;
use tracing::{debug, info, warn};
use komodo::{
......@@ -23,7 +26,17 @@ const COMPRESS: Compress = Compress::Yes;
const VALIDATE: Validate = Validate::Yes;
const BLOCK_DIR: &str = "blocks/";
fn parse_args() -> (Vec<u8>, usize, usize, bool, String, bool, bool, Vec<String>) {
fn parse_args() -> (
Vec<u8>,
usize,
usize,
bool,
String,
bool,
bool,
bool,
Vec<String>,
) {
let bytes_path = std::env::args()
.nth(1)
.expect("expected path to bytes as first positional argument");
......@@ -60,7 +73,12 @@ fn parse_args() -> (Vec<u8>, usize, usize, bool, String, bool, bool, Vec<String>
.expect("expected do_verify_blocks as seventh positional argument")
.parse()
.expect("could not parse do_verify_blocks as a bool");
let block_files = std::env::args().skip(8).collect::<Vec<_>>();
let do_combine_blocks: bool = std::env::args()
.nth(8)
.expect("expected do_combine_blocks as eigth positional argument")
.parse()
.expect("could not parse do_combine_blocks as a bool");
let block_files = std::env::args().skip(9).collect::<Vec<_>>();
(
bytes,
......@@ -70,6 +88,7 @@ fn parse_args() -> (Vec<u8>, usize, usize, bool, String, bool, bool, Vec<String>
powers_file,
do_reconstruct_data,
do_verify_blocks,
do_combine_blocks,
block_files,
)
}
......@@ -96,11 +115,8 @@ fn generate_powers(bytes: &[u8], powers_file: &str) -> Result<(), std::io::Error
fn read_block<E: Pairing>(block_files: &[String]) -> Vec<(String, Block<E>)> {
block_files
.iter()
.filter_map(|f| match std::fs::read(f) {
Ok(bytes) => Some((f, bytes)),
Err(_) => None,
})
.map(|(f, s)| {
.map(|f| {
let s = std::fs::read(f).unwrap_or_else(|_| panic!("could not read {}", f));
(
f.clone(),
// FIXME: do not unwrap and return an error
......@@ -132,11 +148,23 @@ where
fn dump_blocks<E: Pairing>(blocks: &[Block<E>]) -> Result<(), std::io::Error> {
info!("dumping blocks to `{}`", BLOCK_DIR);
let mut block_files = vec![];
for (i, block) in blocks.iter().enumerate() {
let filename = PathBuf::from(BLOCK_DIR).join(format!("{}.bin", i));
for block in blocks {
let mut serialized = vec![0; block.shard.linear_combination.serialized_size(COMPRESS)];
block
.shard
.linear_combination
.serialize_with_mode(&mut serialized[..], COMPRESS)
.unwrap();
let repr = Sha256::hash(&serialized)
.iter()
.map(|x| format!("{:x}", x))
.collect::<Vec<_>>()
.join("");
let filename = PathBuf::from(BLOCK_DIR).join(format!("{}.bin", repr));
std::fs::create_dir_all(BLOCK_DIR)?;
debug!("serializing block {}", i);
debug!("serializing block {}", repr);
let mut serialized = vec![0; block.serialized_size(COMPRESS)];
// FIXME: do not unwrap and return an error with std::io::Error
block
......@@ -170,6 +198,7 @@ fn main() {
powers_file,
do_reconstruct_data,
do_verify_blocks,
do_combine_blocks,
block_files,
) = parse_args();
......@@ -189,6 +218,18 @@ fn main() {
exit(0);
}
if do_combine_blocks {
let blocks = read_block::<Bls12_381>(&block_files);
if blocks.len() != 2 {
eprintln!("expected exactly 2 blocks, found {}", blocks.len());
exit(1);
}
dump_blocks(&[recode(&blocks[0].1, &blocks[1].1)]).unwrap();
exit(0);
}
info!("reading powers from file `{}`", powers_file);
let powers = if let Ok(serialized) = std::fs::read(&powers_file) {
info!("deserializing the powers from `{}`", powers_file);
......
......@@ -123,7 +123,7 @@ const FEC_PARAMS = {k: 3, n: 5}
def test [blocks: list<int>] {
let actual = try {
komodo reconstruct ...($blocks | each { $"blocks/($in).bin" }) | bytes decode
komodo reconstruct ...($blocks | each {|i| ls blocks | get name | get $i}) | bytes decode
} catch {
error make --unspanned { msg: "woopsie" }
}
......@@ -138,7 +138,7 @@ def main [] {
komodo setup $BYTES
komodo prove $BYTES --fec-params $FEC_PARAMS
komodo verify blocks/0.bin blocks/1.bin
komodo verify (ls blocks).0.name (ls blocks).1.name
let all_k_choose_n_permutations = seq $FEC_PARAMS.k $FEC_PARAMS.n
| each {|ki|
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment