diff --git a/.gitignore b/.gitignore
index 4fc3bf546e9bd869e9fe480f02c97d879d1018ac..0e3c97d364377fd1d379c6bc75dc7397db3049eb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@ target/
 Cargo.lock
 
 *.ndjson
+*.png
 
 # IDEs
 .idea
diff --git a/Cargo.toml b/Cargo.toml
index 69c51fb15717403bea8656bcf27b34ddcfeeba57..3fed51b96fa4824ed8b1944178d1d97733e9305f 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -25,6 +25,7 @@ thiserror = "1.0.50"
 tracing = "0.1.40"
 tracing-subscriber = "0.3.17"
 ark-poly-commit = { git = "https://gitlab.isae-supaero.fr/a.stevan/poly-commit", version = "0.4.0", rev = "19fc0d4", optional = true }
+dragoonfri = { version = "0.1.0", optional = true}
 
 [workspace]
 members = [
@@ -35,12 +36,16 @@ members = [
 
 [dev-dependencies]
 ark-bls12-381 = "0.4.0"
+clap = { version = "4.5.17", features = ["derive"] }
 itertools = "0.13.0"
 rand = "0.8.5"
+dragoonfri-test-utils = "0.1.0"
+hex = "0.4.3"
 
 [features]
 kzg = ["dep:ark-poly-commit"]
 aplonk = ["dep:ark-poly-commit"]
+fri = ["dep:dragoonfri"]
 fs = []
 
 [package.metadata.docs.rs]
@@ -53,3 +58,7 @@ required-features = ["kzg"]
 [[example]]
 name = "aplonk"
 required-features = ["aplonk"]
+
+[[example]]
+name = "fri"
+required-features = ["fri"]
diff --git a/assets/128_4096_3.bin b/assets/128_4096_3.bin
new file mode 100644
index 0000000000000000000000000000000000000000..a7ba852da9eea7586d915d1f34bfa8cde220c905
Binary files /dev/null and b/assets/128_4096_3.bin differ
diff --git a/assets/128_4_3.bin b/assets/128_4_3.bin
new file mode 100644
index 0000000000000000000000000000000000000000..c41b5c91430f303aa4beff6077f54867c64e6812
Binary files /dev/null and b/assets/128_4_3.bin differ
diff --git a/benchmarks/README.md b/benchmarks/README.md
index d9445e8d04187c15e5141a3785c9134c5272726c..a946dcef474213530447828867d4f59fdac41e2b 100644
--- a/benchmarks/README.md
+++ b/benchmarks/README.md
@@ -79,3 +79,53 @@ benchmarks fec plot e2e $out_fec
 benchmarks fec plot combined $out_fec --recoding $out_recoding
 benchmarks fec plot ratio $out_fec --recoding $out_recoding
 ```
+
+## FRI
+> :bulb: **Note**
+>
+> the FRI benchmarks don't use a module from [src/bin/](src/bin/) with PLNK but rather an
+> [example](../examples/fri.rs)
+
+- modify [benchmarks/params/fri.nu](benchmarks/params/fri.nu)
+- source it
+```nushell
+source benchmarks/params/fri.nu
+```
+- run the benchmarks
+```nushell
+(benchmarks fri run
+    --data-sizes $DATA_SIZES
+    --ks $KS
+    --blowup-factors $BFS
+    --nb-queries $QS
+    --hashes $HS
+    --finite-fields $FFS
+    --remainders $RPOS
+    --folding-factors $NS
+) | to ndjson out> $DATA
+```
+
+> the following `watch` call can be used to see the results as they are dumped to `$DATA`
+> ```nushell
+> watch . {
+>     open --raw $DATA
+>         | lines
+>         | last
+>         | from ndjson
+>         | into int evaluating encoding proving verifying decoding
+>         | into duration evaluating encoding proving verifying decoding
+>         | into filesize proofs commits d
+>         | into record
+> }
+> ```
+
+```nushell
+benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA evaluating encoding proving decoding --y-type "duration"
+benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA verifying --y-type "duration" --single
+
+benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA proofs --y-type "filesize" --identity --normalize
+benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA commits --y-type "filesize" --single --identity --normalize
+
+benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA proofs --y-type "filesize" --identity
+benchmarks fri plot --dump-dir $OUTPUT_DIR --file $DATA commits --y-type "filesize" --single --identity
+```
diff --git a/benchmarks/mod.nu b/benchmarks/mod.nu
index 6ea142d054f7bda535b4a7864962f9e137702a87..838b61833c4f30f052b89f285aa3e7c0647aa767 100644
--- a/benchmarks/mod.nu
+++ b/benchmarks/mod.nu
@@ -3,3 +3,4 @@ export module nu-lib/commit.nu
 export module nu-lib/fec/
 export module nu-lib/recoding.nu
 export module nu-lib/linalg.nu
+export module nu-lib/fri/
diff --git a/benchmarks/nu-lib/fri/mod.nu b/benchmarks/nu-lib/fri/mod.nu
new file mode 100644
index 0000000000000000000000000000000000000000..cb999a34582b65d5c45b76878e4ace3884bac519
--- /dev/null
+++ b/benchmarks/nu-lib/fri/mod.nu
@@ -0,0 +1,2 @@
+export module run.nu
+export module plot.nu
diff --git a/benchmarks/nu-lib/fri/plot.nu b/benchmarks/nu-lib/fri/plot.nu
new file mode 100644
index 0000000000000000000000000000000000000000..7eb2287e7f0394d30ae99c8eb95925208998635f
--- /dev/null
+++ b/benchmarks/nu-lib/fri/plot.nu
@@ -0,0 +1,106 @@
+use std formats [ "from ndjson" ]
+use ../utils plot [ into-axis-options, COMMON_OPTIONS ]
+
+const NB_MS_IN_NS = 1_000_000
+
+def plot [
+    name: string,
+    --save,
+    --y-type: string,
+    --single,
+    --identity,
+    --normalize,
+    --dump-dir: path,
+] {
+    let ds = $in | get d | uniq
+
+    let graphs = $in
+        | select $name d k bf ff
+        | group-by { |it| $"($it.k):($it.ff):($it.bf)" }
+        | transpose name points
+        | update name {
+            let res = $in | parse "{k}:{ff}:{bf}" | into record
+            $"$k = ($res.k)$, $\\mathbb{F} = $ ($res.ff), $BF = ($res.bf)$"
+        }
+        | update points {
+            rename --column { $name: "y", d: "x" }
+                | update y { if $y_type == "duration" { $in / $NB_MS_IN_NS } else { $in } }
+                | if $single { update y { |it| $it.y / ($it.k * $it.bf) } } else { $in }
+                | if $normalize { update y { |it| $it.y / $it.x } } else { $in }
+                | sort-by x
+        }
+        | insert style { |it|
+            let type = match $it.points.ff.0 {
+                "fp128" => "solid",
+                "bls12-381" => "dashed",
+                _ => "",
+            }
+            let color = match $it.points.k.0 {
+                8 => "tab:blue",
+                128 => "tab:green",
+                1024 => "tab:orange",
+                4096 => "tab:red",
+                _ => "grey",
+            }
+            let marker = match $it.points.bf.0 {
+                2 => "o",
+                4 => "s",
+                _ => "*",
+            }
+            { color: $color, line: { type: $type, marker: { shape: $marker } } }
+        }
+        | if $identity { append {
+            name: "$x \\mapsto x$",
+            points: ($ds | wrap x | merge ($ds | wrap y) | if $normalize { update y { |it| $it.y / $it.x } } else { $in }),
+            style: { color: "black", line: { type: "dotted" } },
+        } } else { $in }
+
+    let title = [
+        $name,
+        (if $single { "single" }),
+        (if $normalize { "normalized" }),
+    ] | compact | str join '_'
+
+    let y_type = if $normalize { "plain" } else { $y_type }
+
+    let options = [
+        ...($graphs.points | flatten | into-axis-options -x "filesize" -y $y_type)
+        --use-tex
+        --y-scale log
+        --x-scale log
+        --x-scale-base 2
+        --y-scale-base 2
+        --title $title
+        ...(if $save { [ --save ($dump_dir | path join $"($title).png") ] } else {[]})
+        --fullscreen
+    ]
+
+    $graphs | to json | gplt plot $in ...($options | compact)
+}
+
+export def main [
+    ...x,
+    --file: path,
+    --y-type: string = "plain",
+    --single,
+    --identity,
+    --normalize,
+    --dump-dir: path = "./",
+] {
+    if ($x | is-empty) {
+        error make --unspanned { msg: "nothing to do" }
+    }
+    if $file == null {
+        error make --unspanned { msg: "missing --file" }
+    }
+
+    if not ($dump_dir | path exists) {
+        mkdir $dump_dir
+    }
+
+    let data = open $file | where h == "sha3-512" and q == 50
+
+    for i in $x {
+        $data | plot --save $i --y-type=$y_type --single=$single --identity=$identity --normalize=$normalize --dump-dir=$dump_dir
+    }
+}
diff --git a/benchmarks/nu-lib/fri/run.nu b/benchmarks/nu-lib/fri/run.nu
new file mode 100644
index 0000000000000000000000000000000000000000..8f6b21620b71834a5a03bcbd7fe45bb984bcf072
--- /dev/null
+++ b/benchmarks/nu-lib/fri/run.nu
@@ -0,0 +1,78 @@
+use std iter
+
+def "cartesian product" [
+    iters: list  # the iterables you want the cartesian product of
+]: nothing -> list {
+    def aux [a: list]: nothing -> list {
+        if ($a | is-empty) {
+            return []
+        }
+
+        let head = $a | first
+        let tail = aux ($a | skip 1)
+
+        if ($head | is-empty) {
+            return $tail
+        } else if ($tail | is-empty) {
+            return $head
+        }
+
+        $head | each {|h| $tail | each {|t| [$h, $t]}} | flatten | each { flatten }
+    }
+
+    aux $iters
+}
+
+# returns a record with all numeric results, merged with the parameters
+def run [
+    params: record<
+        d: filesize, k: int, bf: int, q: int, h: string, ff: string, n: int, rpo: int
+    >
+] {
+    cargo run --quiet --release --example fri --features fri -- ...[
+        --data-size ($params.d | into int)
+        -k $params.k
+        --blowup-factor $params.bf
+        --remainder-degree-plus-one $params.rpo
+        --folding-factor $params.n
+        --nb-queries $params.q
+        --hash $params.h
+        --finite-field $params.ff
+    ]
+        | lines
+        | parse "{k}: {v}"
+        | into int v
+        | transpose --header-row
+        | into record
+        | merge $params
+}
+
+export def main [
+    --data-sizes: list<filesize>,
+    --ks: list<int>,
+    --blowup-factors: list<int>,
+    --nb-queries: list<int>,
+    --hashes: list<string>,
+    --finite-fields: list<string>,
+    --folding-factors: list<int>,
+    --remainders: list<int>,
+] {
+    let inputs = [
+        $data_sizes, $ks, $blowup_factors, $nb_queries, $hashes, $finite_fields,
+        $folding_factors, $remainders
+    ]
+    if ($inputs | any { is-empty }) {
+        error make --unspanned { msg: "one of the inputs is empty" }
+    }
+
+    let params = cartesian product $inputs | each { |params|
+        [d, k, bf, q, h, ff, n, rpo]
+            | iter zip-into-record $params
+            | into record
+    }
+
+    $params | each { |p|
+        print $p
+        run $p
+    }
+}
diff --git a/benchmarks/nu-lib/utils/plot.nu b/benchmarks/nu-lib/utils/plot.nu
index b31a4058e80a6f14b288dbb2fdc57b952611d16f..72c753c59d74ad7b40023b9f7b1920cc8f007f28 100644
--- a/benchmarks/nu-lib/utils/plot.nu
+++ b/benchmarks/nu-lib/utils/plot.nu
@@ -54,6 +54,7 @@ export def into-axis-options [-x: string, -y: string]: table<x: float, y: float>
 
     let y_tick_labels = match $y {
         "duration" => ($y_ticks | into-duration-tick-labels),
+        "filesize" => ($y_ticks | into-filesize-tick-labels),
         "plain" => $y_ticks,
         _ => {
             print $"warning: ($y) option is unknown for -y"
diff --git a/benchmarks/params/fri.nu b/benchmarks/params/fri.nu
new file mode 100644
index 0000000000000000000000000000000000000000..24fe810097c5df476898ba91cde13f33224ac952
--- /dev/null
+++ b/benchmarks/params/fri.nu
@@ -0,0 +1,16 @@
+let DATA_SIZES = seq 0 15 | each { 2 ** $in * 4096b }
+const KS = [8, 128, 1024, 4096]
+const BFS = [2, 4]
+const NS = [2]
+const RPOS = [1]
+const QS = [50]
+const HS = ["sha3-512"]
+const FFS = ["fp128", "bls12-381"]
+
+const DATA = "benchmarks/results/fri.ndjson"
+const OUTPUT_DIR = "benchmarks/results/figures/"
+
+if not ($DATA | path dirname | path exists) {
+    print $"creating directory for (ansi purple)($DATA)(ansi reset)"
+    $DATA | path dirname | mkdir $in
+}
diff --git a/examples/fri.rs b/examples/fri.rs
new file mode 100644
index 0000000000000000000000000000000000000000..c44f191570318effd1fda16befbcc8fab087a09a
--- /dev/null
+++ b/examples/fri.rs
@@ -0,0 +1,214 @@
+use ark_ff::PrimeField;
+use ark_poly::univariate::DensePolynomial;
+use ark_poly::DenseUVPolynomial;
+use ark_serialize::CanonicalSerialize;
+use ark_std::ops::Div;
+use clap::{Parser, ValueEnum};
+use rs_merkle::Hasher;
+use std::time::Instant;
+
+use ark_bls12_381::Fr as F_BLS12_381;
+use dragoonfri_test_utils::Fq as F_128;
+
+use dragoonfri::{
+    algorithms::{Blake3, Sha3_256, Sha3_512},
+    dynamic_folding_factor,
+};
+use komodo::error::KomodoError;
+use rand::rngs::StdRng;
+use rand::{Rng, SeedableRng};
+
+/// measure the time it takes to apply a function on a set of arguments and returns the result of
+/// the call
+///
+/// ```rust
+/// fn add(a: i32, b: i32) { a + b }
+/// let (res, time) = timeit!(add, 1, 2);
+/// ```
+/// will be the same as
+/// ```rust
+/// fn add(a: i32, b: i32) { a + b }
+/// let (res, time) = {
+///     let start = Instant::now();
+///     let res = add(1, 2);
+///     let time = start.elapsed();
+///     (res, time)
+/// };
+/// ```
+macro_rules! timeit {
+    ($func:expr, $( $args:expr ),*) => {{
+        let start = Instant::now();
+        let res = $func( $( $args ),* );
+        let time = start.elapsed();
+        (res, time)
+    }};
+}
+
+/// same as [`timeit`] but prints a name and the time at the end directly
+///
+/// ```rust
+/// fn add(a: i32, b: i32) { a + b }
+/// let res = timeit_and_print!("addition", add, 1, 2);
+/// ```
+/// will be the same as
+/// ```rust
+/// fn add(a: i32, b: i32) { a + b }
+/// let res = {
+///     print!("addition: ");
+///     let start = Instant::now();
+///     let res = add(1, 2);
+///     let time = start.elapsed();
+///     println!("{}", time.as_nanos());
+///     res
+/// };
+/// ```
+macro_rules! timeit_and_print {
+    ($name: expr, $func:expr, $( $args:expr ),*) => {{
+        print!("{}: ", $name);
+        let (res, time) = timeit!($func, $($args),*);
+        println!("{}", time.as_nanos());
+        res
+    }};
+}
+
+fn run<const N: usize, F: PrimeField, H: Hasher, P>(
+    bytes: &[u8],
+    k: usize,
+    n: usize,
+    bf: usize,
+    rpo: usize,
+    q: usize,
+) -> Result<(), KomodoError>
+where
+    P: DenseUVPolynomial<F>,
+    for<'a, 'b> &'a P: Div<&'b P, Output = P>,
+    <H as rs_merkle::Hasher>::Hash: AsRef<[u8]> + CanonicalSerialize,
+{
+    let evaluations = timeit_and_print!("evaluating", komodo::fri::evaluate::<F>, &bytes, k, n);
+
+    let evals = evaluations.clone();
+    let shards = timeit_and_print!("encoding", komodo::fri::encode::<F>, &bytes, evals, k);
+
+    let blocks = timeit_and_print!(
+        "proving",
+        komodo::fri::prove::<N, F, H, P>,
+        evaluations,
+        shards,
+        bf,
+        rpo,
+        q
+    );
+
+    let blocks = blocks.unwrap();
+
+    let proofs: usize = blocks.iter().map(|b| b.proof.compressed_size()).sum();
+    let commits: usize = blocks.iter().map(|b| b.commit.compressed_size()).sum();
+    println!("proofs: {}", proofs);
+    println!("commits: {}", commits);
+
+    print!("verifying: ");
+    let time: std::time::Duration = blocks
+        .iter()
+        .cloned()
+        .map(|b| {
+            let (res, time) = timeit!(komodo::fri::verify::<N, F, H, P>, b, n, q);
+            res.unwrap();
+            time
+        })
+        .sum();
+    println!("{}", time.as_nanos());
+
+    let decoded = timeit_and_print!(
+        "decoding",
+        komodo::fri::decode::<F, H>,
+        blocks[0..k].to_vec(),
+        n
+    );
+
+    assert_eq!(hex::encode(bytes), hex::encode(decoded));
+
+    Ok(())
+}
+
+#[derive(ValueEnum, Debug, Clone)]
+enum Hash {
+    BLAKE3,
+    SHA3_256,
+    SHA3_512,
+}
+
+#[derive(ValueEnum, Debug, Clone)]
+enum FiniteField {
+    FP128,
+    BLS12_381,
+}
+
+#[derive(Parser, Debug)]
+#[command(version, about, long_about = None)]
+struct Args {
+    #[arg(short, long)]
+    data_size: usize,
+
+    #[arg(long, default_value = "1234")]
+    seed: u64,
+
+    #[arg(short)]
+    k: usize,
+    #[arg(short, long)]
+    blowup_factor: usize,
+
+    #[arg(short, long)]
+    remainder_degree_plus_one: usize,
+    #[arg(short, long)]
+    folding_factor: usize,
+    #[arg(short, long)]
+    nb_queries: usize,
+
+    #[arg(long)]
+    hash: Hash,
+    #[arg(long)]
+    finite_field: FiniteField,
+}
+
+macro_rules! foo {
+    ($n:ident, $f:ident, $h:ident) => {
+        dynamic_folding_factor!(
+            let N = $n => run::<N, $f, $h, DensePolynomial<$f>>
+        )
+    }
+}
+
+fn generate_data(size: usize, seed: u64) -> Vec<u8> {
+    let mut rnd = StdRng::seed_from_u64(seed);
+    (0..size).map(|_| rnd.gen()).collect()
+}
+
+fn main() {
+    let args = Args::parse();
+
+    let bytes = generate_data(args.data_size, args.seed);
+    println!("loaded {} bytes of data", bytes.len());
+
+    let ff = args.folding_factor;
+    let f = match args.finite_field {
+        FiniteField::FP128 => match args.hash {
+            Hash::BLAKE3 => foo!(ff, F_128, Blake3),
+            Hash::SHA3_256 => foo!(ff, F_128, Sha3_256),
+            Hash::SHA3_512 => foo!(ff, F_128, Sha3_512),
+        },
+        FiniteField::BLS12_381 => match args.hash {
+            Hash::BLAKE3 => foo!(ff, F_BLS12_381, Blake3),
+            Hash::SHA3_256 => foo!(ff, F_BLS12_381, Sha3_256),
+            Hash::SHA3_512 => foo!(ff, F_BLS12_381, Sha3_512),
+        },
+    };
+    f(
+        &bytes,
+        args.k,
+        args.k * args.blowup_factor,
+        args.blowup_factor,
+        args.remainder_degree_plus_one,
+        args.nb_queries,
+    )
+    .unwrap()
+}
diff --git a/rust-toolchain.toml b/rust-toolchain.toml
index a6c0bd22bab8dc27f792ba798e40a9591c863795..ca7437e6140c9d1d891f1c347cac11cd607bf1e9 100644
--- a/rust-toolchain.toml
+++ b/rust-toolchain.toml
@@ -1,4 +1,4 @@
 [toolchain]
 profile = "minimal"
-channel = "1.75"
+channel = "1.78"
 components = ["rustfmt", "clippy"]
diff --git a/src/algebra/mod.rs b/src/algebra/mod.rs
index 22c6d61971c92e3ba62b17e9f79a0b42b4f01e4f..5a70b1568d0c38fc6600b65152da342c682622a2 100644
--- a/src/algebra/mod.rs
+++ b/src/algebra/mod.rs
@@ -67,7 +67,7 @@ pub mod linalg;
 /// # }
 /// ```
 pub fn split_data_into_field_elements<F: PrimeField>(bytes: &[u8], modulus: usize) -> Vec<F> {
-    let bytes_per_element = (F::MODULUS_BIT_SIZE as usize) / 8;
+    let bytes_per_element = (F::MODULUS_BIT_SIZE as usize - 1) / 8;
 
     let mut elements = Vec::new();
     for chunk in bytes.chunks(bytes_per_element) {
diff --git a/src/fri.rs b/src/fri.rs
new file mode 100644
index 0000000000000000000000000000000000000000..16ab1e4659c0759c2cdef4948305476147076191
--- /dev/null
+++ b/src/fri.rs
@@ -0,0 +1,228 @@
+use ark_ff::PrimeField;
+use ark_poly::DenseUVPolynomial;
+use ark_std::ops::Div;
+use rs_merkle::algorithms::Sha256;
+use rs_merkle::Hasher;
+use std::rc::Rc;
+use tracing::{debug, info};
+
+use crate::{algebra, error::KomodoError, fec};
+use dragoonfri::{
+    frida::{FridaBuilder, FridaCommitment},
+    interpolation::interpolate_polynomials,
+    rng::FriChallenger,
+    utils::{to_evaluations, HasherExt, MerkleProof},
+};
+
+/// representation of a block of proven data.
+///
+/// this is a wrapper around a [`fec::Shard`] with some additional cryptographic
+/// information that allows to prove the integrity of said shard.
+#[derive(Clone, PartialEq)]
+pub struct Block<F: PrimeField, H: Hasher> {
+    pub shard: fec::Shard<F>,
+    pub proof: MerkleProof<H>,
+    pub commit: Rc<FridaCommitment<F, H>>,
+    position: usize,
+}
+
+pub fn evaluate<F: PrimeField>(bytes: &[u8], k: usize, n: usize) -> Vec<Vec<F>> {
+    debug!("splitting bytes into rows");
+    let elements: Vec<F> = algebra::split_data_into_field_elements(bytes, k);
+    let rows = elements.chunks(k).map(|c| c.to_vec()).collect::<Vec<_>>();
+    info!(
+        "data is composed of {} rows and {} elements",
+        rows.len(),
+        elements.len()
+    );
+
+    rows.into_iter()
+        .map(|r| to_evaluations(r, n))
+        .collect::<Vec<_>>()
+}
+
+#[inline]
+fn transpose<F: Copy>(v: Vec<Vec<F>>) -> Vec<Vec<F>> {
+    let mut cols: Vec<_> = Vec::<Vec<F>>::with_capacity(v[0].len());
+    for i in 0..v[0].len() {
+        cols.push((0..v.len()).map(|j| v[j][i]).collect());
+    }
+    cols
+}
+
+pub fn encode<F: PrimeField>(
+    bytes: &[u8],
+    evaluations: Vec<Vec<F>>,
+    k: usize,
+) -> Vec<fec::Shard<F>> {
+    let hash = Sha256::hash(bytes).to_vec();
+
+    let n = evaluations[0].len();
+
+    let t = transpose(evaluations);
+
+    (0..n)
+        .map(|i| fec::Shard {
+            k: k as u32,
+            linear_combination: vec![],
+            hash: hash.clone(),
+            data: t[i].clone(),
+            size: bytes.len(),
+        })
+        .collect::<Vec<_>>()
+}
+
+pub fn prove<const N: usize, F: PrimeField, H: Hasher, P>(
+    evaluations: Vec<Vec<F>>,
+    shards: Vec<fec::Shard<F>>,
+    blowup_factor: usize,
+    remainder_plus_one: usize,
+    nb_queries: usize,
+) -> Result<Vec<Block<F, H>>, KomodoError>
+where
+    P: DenseUVPolynomial<F>,
+    for<'a, 'b> &'a P: Div<&'b P, Output = P>,
+    <H as rs_merkle::Hasher>::Hash: AsRef<[u8]>,
+{
+    let builder = FridaBuilder::<F, H>::new::<N, _>(
+        &evaluations,
+        FriChallenger::<H>::default(),
+        blowup_factor,
+        remainder_plus_one,
+        nb_queries,
+    );
+
+    let commit = Rc::new(FridaCommitment::from(builder.clone()));
+
+    Ok(shards
+        .iter()
+        .enumerate()
+        .map(|(i, s)| Block {
+            shard: s.clone(),
+            proof: builder.prove_shards(&[i]),
+            commit: commit.clone(),
+            position: i,
+        })
+        .collect())
+}
+
+pub fn verify<const N: usize, F: PrimeField, H: Hasher, P>(
+    block: Block<F, H>,
+    domain_size: usize,
+    nb_queries: usize,
+) -> Result<(), KomodoError>
+where
+    P: DenseUVPolynomial<F>,
+    for<'a, 'b> &'a P: Div<&'b P, Output = P>,
+    <H as rs_merkle::Hasher>::Hash: AsRef<[u8]>,
+{
+    block
+        .commit
+        .verify::<N, _>(
+            FriChallenger::<H>::default(),
+            nb_queries,
+            block.shard.k as usize,
+            domain_size,
+        )
+        .unwrap();
+
+    assert!(block.proof.verify(
+        block.commit.tree_root(),
+        &[block.position],
+        &[H::hash_item(&block.shard.data)],
+        domain_size,
+    ));
+
+    Ok(())
+}
+
+pub fn decode<F: PrimeField, H: Hasher>(blocks: Vec<Block<F, H>>, n: usize) -> Vec<u8> {
+    let w = F::get_root_of_unity(n as u64).unwrap();
+
+    let t_shards = transpose(blocks.iter().map(|b| b.shard.data.clone()).collect());
+    let positions = blocks
+        .iter()
+        .map(|b| w.pow([b.position as u64]))
+        .collect::<Vec<_>>();
+    let source_shards = interpolate_polynomials(&t_shards, &positions)
+        .into_iter()
+        .flatten()
+        .collect::<Vec<_>>();
+
+    let mut bytes = algebra::merge_elements_into_bytes(&source_shards);
+    bytes.resize(blocks[0].shard.size, 0);
+    bytes
+}
+
+#[cfg(test)]
+mod tests {
+    use ark_ff::PrimeField;
+    use ark_poly::{univariate::DensePolynomial, DenseUVPolynomial};
+    use ark_serialize::CanonicalSerialize;
+    use ark_std::ops::Div;
+    use rs_merkle::Hasher;
+
+    use ark_bls12_381::Fr as F_BLS12_381;
+    use dragoonfri::{
+        algorithms::{Blake3, Sha3_256, Sha3_512},
+        dynamic_folding_factor,
+    };
+    use dragoonfri_test_utils::Fq as F_128;
+
+    use crate::error::KomodoError;
+
+    use super::{decode, encode, evaluate, prove, verify};
+
+    fn bytes() -> Vec<u8> {
+        include_bytes!("../assets/dragoon_133x133.png").to_vec()
+    }
+
+    fn run<const N: usize, F: PrimeField, H: Hasher, P>(
+        bytes: &[u8],
+        k: usize,
+        n: usize,
+        bf: usize,
+        rpo: usize,
+        q: usize,
+    ) -> Result<(), KomodoError>
+    where
+        P: DenseUVPolynomial<F>,
+        for<'a, 'b> &'a P: Div<&'b P, Output = P>,
+        <H as rs_merkle::Hasher>::Hash: AsRef<[u8]> + CanonicalSerialize,
+    {
+        let evaluations = evaluate::<F>(bytes, k, n);
+
+        let evals = evaluations.clone();
+        let shards = encode::<F>(bytes, evals, k);
+
+        let blocks = prove::<N, F, H, P>(evaluations, shards, bf, rpo, q).unwrap();
+
+        for b in blocks.clone() {
+            verify::<N, F, H, P>(b, n, q).unwrap();
+        }
+
+        assert_eq!(decode::<F, H>(blocks[0..k].to_vec(), n), bytes);
+
+        Ok(())
+    }
+
+    macro_rules! run {
+        ($n:tt, $f:ident, $h:ident) => {
+            dynamic_folding_factor!(
+                let N = $n => run::<N, $f, $h, DensePolynomial<$f>>
+            )
+        }
+    }
+
+    #[test]
+    fn end_to_end() {
+        for (ff, k, n, bf, rpo, q) in [(2, 4, 8, 2, 1, 50), (2, 4, 8, 2, 2, 50)] {
+            let _ = run!(ff, F_128, Blake3)(&bytes(), k, n, bf, rpo, q);
+            let _ = run!(ff, F_128, Sha3_256)(&bytes(), k, n, bf, rpo, q);
+            let _ = run!(ff, F_128, Sha3_512)(&bytes(), k, n, bf, rpo, q);
+            let _ = run!(ff, F_BLS12_381, Blake3)(&bytes(), k, n, bf, rpo, q);
+            let _ = run!(ff, F_BLS12_381, Sha3_256)(&bytes(), k, n, bf, rpo, q);
+            let _ = run!(ff, F_BLS12_381, Sha3_512)(&bytes(), k, n, bf, rpo, q);
+        }
+    }
+}
diff --git a/src/lib.rs b/src/lib.rs
index 8b69d6a2cb709050e9fb1a95335c6f590f9ef00f..c4c0690ec36cc0b147c8b6b24101c05bf4714c78 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -51,6 +51,8 @@ pub mod aplonk;
 mod conversions;
 pub mod error;
 pub mod fec;
+#[cfg(feature = "fri")]
+pub mod fri;
 #[cfg(feature = "fs")]
 pub mod fs;
 #[cfg(feature = "kzg")]