Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • dragoon/komodo
  • a.stevan/komodo
  • c.heme/komodo
3 results
Show changes
Commits on Source (26)
Showing with 776 additions and 200 deletions
FROM rust:latest
# Suppress prompts during package installation
ENV DEBIAN_FRONTEND=noninteractive
RUN apt update --yes && apt upgrade --yes
RUN apt install --yes protobuf-compiler
COPY rust-toolchain.toml /
RUN rustup show && cargo --version
RUN cargo install cargo-script
RUN apt clean && rm -rf /var/lib/apt/lists/*
name: Rust CI
on: [push, pull_request, workflow_dispatch]
jobs:
fmt:
runs-on: ubuntu-latest
container:
image: "ghcr.io/dragoon-rs/dragoon/komodo:bcb0e6b5f73420762f6208700a43291e0066c2c3"
if: "!contains(github.event.head_commit.message, 'draft:') && !contains(github.event.head_commit.message, 'no-ci:')"
steps:
- uses: actions/checkout@v3
- name: Run fmt check
run: |
./make.rs fmt --check
test:
runs-on: ubuntu-latest
container:
image: "ghcr.io/dragoon-rs/dragoon/komodo:bcb0e6b5f73420762f6208700a43291e0066c2c3"
needs: fmt
if: "!contains(github.event.head_commit.message, 'draft:') && !contains(github.event.head_commit.message, 'no-ci:')"
steps:
- uses: actions/checkout@v3
- name: Show configuration
run: |
./make.rs version
- name: Run tests
run: |
./make.rs check
./make.rs clippy
./make.rs test
# Rust
target/ target/
Cargo.lock Cargo.lock
*.ndjson *.ndjson
*.png
# IDEs
.idea
.vscode
image: "rust:latest" image: "gitlab-registry.isae-supaero.fr/dragoon/komodo:bcb0e6b5f73420762f6208700a43291e0066c2c3"
stages: stages:
- fmt - fmt
- test - test
variables:
NUSHELL_ARCH: "x86_64-unknown-linux-musl"
NUSHELL_VERSION: "0.95.0"
workflow: workflow:
rules: rules:
- if: $CI_COMMIT_MESSAGE =~ /^(draft|no-ci):/ - if: $CI_COMMIT_MESSAGE =~ /^(draft|no-ci):/
...@@ -19,28 +15,15 @@ workflow: ...@@ -19,28 +15,15 @@ workflow:
fmt: fmt:
stage: fmt stage: fmt
script: script:
- make fmt-check - ./make.rs fmt --check
test: test:
stage: test stage: test
needs: needs:
- fmt - fmt
before_script:
- apt update --yes
- apt upgrade --yes
- apt install protobuf-compiler --yes
- export NUSHELL_BUILD="nu-$NUSHELL_VERSION-$NUSHELL_ARCH"
- export PATH="/tmp/:$PATH"
# install Nushell
- curl -fLo /tmp/nu.tar.gz "https://github.com/nushell/nushell/releases/download/$NUSHELL_VERSION/$NUSHELL_BUILD.tar.gz"
- tar xvf /tmp/nu.tar.gz --directory /tmp
- cp "/tmp/$NUSHELL_BUILD/nu" /tmp/nu
- make show
script: script:
- make check clippy test example - ./make.rs version
- ./make.rs check
- ./make.rs clippy
- ./make.rs test
REVISION: 1aa2ed1947a0b891398558fcf4e4289849cc5a1d
VERSION: 0.102.0
[package] [package]
name = "komodo" name = "komodo"
version = "1.0.0" version = "1.0.1"
edition = "2021" edition = "2021"
description = "Komodo: cryptographically-proven erasure coding for distributed systems" description = "Komodo: cryptographically-proven erasure coding for distributed systems"
repository = "https://gitlab.isae-supaero.fr/dragoon/komodo" repository = "https://gitlab.isae-supaero.fr/dragoon/komodo"
...@@ -25,26 +25,31 @@ thiserror = "1.0.50" ...@@ -25,26 +25,31 @@ thiserror = "1.0.50"
tracing = "0.1.40" tracing = "0.1.40"
tracing-subscriber = "0.3.17" tracing-subscriber = "0.3.17"
ark-poly-commit = { git = "https://gitlab.isae-supaero.fr/a.stevan/poly-commit", version = "0.4.0", rev = "19fc0d4", optional = true } ark-poly-commit = { git = "https://gitlab.isae-supaero.fr/a.stevan/poly-commit", version = "0.4.0", rev = "19fc0d4", optional = true }
dragoonfri = { version = "0.1.0", optional = true}
[workspace] [workspace]
members = [ members = [
"benchmarks", "benchmarks",
"bins/rank", "bins/rank",
"bins/saclin",
] ]
[dev-dependencies] [dev-dependencies]
ark-bls12-381 = "0.4.0" ark-bls12-381 = "0.4.0"
clap = { version = "4.5.17", features = ["derive"] }
itertools = "0.13.0" itertools = "0.13.0"
rand = "0.8.5" rand = "0.8.5"
dragoonfri-test-utils = "0.1.0"
hex = "0.4.3"
[features] [features]
kzg = ["dep:ark-poly-commit"] kzg = ["dep:ark-poly-commit"]
aplonk = ["dep:ark-poly-commit"] aplonk = ["dep:ark-poly-commit"]
fri = ["dep:dragoonfri"]
fs = [] fs = []
[package.metadata.docs.rs] [package.metadata.docs.rs]
features = ["kzg", "aplonk"] features = ["kzg", "aplonk"]
rustdoc-args = [ "--html-in-header", "katex.html" ]
[[example]] [[example]]
name = "kzg" name = "kzg"
...@@ -53,3 +58,11 @@ required-features = ["kzg"] ...@@ -53,3 +58,11 @@ required-features = ["kzg"]
[[example]] [[example]]
name = "aplonk" name = "aplonk"
required-features = ["aplonk"] required-features = ["aplonk"]
[[example]]
name = "fri"
required-features = ["fri"]
[[example]]
name = "fec"
required-features = ["fri"]
.PHONY: fmt fmt-check check clippy test-rs test-nu test example show doc build-examples
DEFAULT_GOAL: fmt-check check clippy test-rs
fmt-check:
cargo fmt --all -- --check
fmt:
cargo fmt --all
check:
cargo check --workspace --all-targets
cargo check --workspace --all-targets --features kzg
cargo check --workspace --all-targets --features aplonk
cargo check --workspace --all-targets --all-features
clippy:
cargo clippy --workspace --all-targets --all-features -- -D warnings
test-rs:
cargo test --workspace --verbose --all-features
cargo test --examples --verbose
test-nu:
nu bins/saclin/tests/cli.nu
nu bins/saclin/tests/binary.nu
test: test-rs test-nu
example:
nu bins/saclin/examples/cli.nu
show:
rustup --version
rustup show --verbose
rustc --version
cargo --version
cargo clippy --version
nu --version
doc:
cargo doc --document-private-items --no-deps --open
build-examples:
cargo build --examples --release
# Komodo: Cryptographically-proven Erasure Coding # Komodo: Cryptographically-proven Erasure Coding
## the library [![release](https://gitlab.isae-supaero.fr/dragoon/komodo/-/badges/release.svg)](https://gitlab.isae-supaero.fr/dragoon/komodo/-/releases)
see `cargo doc` or [the library itself](src/) [![crate](https://img.shields.io/crates/v/komodo)](https://crates.io/crates/komodo)
[![docs](https://img.shields.io/docsrs/komodo)](https://docs.rs/komodo/latest/komodo/)
[![source](https://gitlab.isae-supaero.fr/dragoon/komodo/badges/main/pipeline.svg?key_text=GitLab%20CI)](https://gitlab.isae-supaero.fr/dragoon/komodo/-/pipelines)
[![mirror](https://github.com/dragoon-rs/komodo/actions/workflows/ci.yml/badge.svg)](https://github.com/dragoon-rs/komodo/actions)
## the tests Komodo uses a build system entirely writen in Rust.
```shell - [`cargo-script`](https://crates.io/crates/cargo-script) to build the script
make - [`nob.rs`](https://gitlab.isae-supaero.fr/a.stevan/nob.rs) to run commands
``` - [`clap`](https://crates.io/crates/clap) to provide a nice and complete build API
or
```shell
make check clippy test-rs
```
### some extra tests First, [install `cargo-script`](https://github.com/DanielKeep/cargo-script#installation).
this project defines some tests written in [Nushell](https://www.nushell.sh/) to test an
[implementation of Komodo in a CLI application](bins/saclin/).
If you have [Nushell installed](https://www.nushell.sh/book/installation.html), you can run these Then, run the script with `./make.rs --help`
with the following command:
## the library
```shell ```shell
make test-nu ./make.rs doc
``` ```
## examples ## the tests
A [CLI example](bins/saclin/examples/cli.nu) is also provided and can be run with
```shell ```shell
make example ./make.rs check
./make.rs clippy
./make.rs test
``` ```
Other examples that showcase the Komodo API are available in [`examples/`](examples/). Other examples that showcase the Komodo API are available in [`examples/`](examples/).
...@@ -33,6 +32,28 @@ Other examples that showcase the Komodo API are available in [`examples/`](examp ...@@ -33,6 +32,28 @@ Other examples that showcase the Komodo API are available in [`examples/`](examp
## the benchmarks ## the benchmarks
see [`benchmarks/`](benchmarks/README.md) see [`benchmarks/`](benchmarks/README.md)
the results can be found in [`dragoon/komodo-benchmark-results`](https://gitlab.isae-supaero.fr/dragoon/komodo-benchmark-results).
## development
Komodo uses a Docker image as the base of the GitLab pipelines.
That means that there is nothing to build apart from the source code of Komodo itself when running jobs.
When the development environment needs to change, e.g. when the version of Rust is bumped in
[`rust-toolchain.toml`](./rust-toolchain.toml), one shall run the following commands to push the new
Docker image to the [_container registry_][gitlab.isae-supaero.fr:dragoon/komodo@containers].
```shell
./make.rs container --login
```
```shell
./make.rs container
```
```shell
./make.rs container --push
```
## contributors ## contributors
Because the code for this project has been originally extracted from Because the code for this project has been originally extracted from
...@@ -45,3 +66,4 @@ note that the following people have contributed to this code base: ...@@ -45,3 +66,4 @@ note that the following people have contributed to this code base:
- @j.detchart - @j.detchart
[pcs-fec-id]: https://gitlab.isae-supaero.fr/dragoon/pcs-fec-id [pcs-fec-id]: https://gitlab.isae-supaero.fr/dragoon/pcs-fec-id
[gitlab.isae-supaero.fr:dragoon/komodo@containers]: https://gitlab.isae-supaero.fr/dragoon/komodo/container_registry/42
File added
File added
[package] [package]
name = "benchmarks" name = "benchmarks"
version = "1.0.0" version = "1.0.1"
edition = "2021" edition = "2021"
[dependencies] [dependencies]
...@@ -20,6 +20,7 @@ ark-secp256r1 = "0.4.0" ...@@ -20,6 +20,7 @@ ark-secp256r1 = "0.4.0"
ark-std = "0.4.0" ark-std = "0.4.0"
ark-vesta = "0.4.0" ark-vesta = "0.4.0"
clap = { version = "4.5.4", features = ["derive"] } clap = { version = "4.5.4", features = ["derive"] }
komodo = { path = ".." } komodo = { path = "..", features = ["fri"] }
plnk = { git = "https://gitlab.isae-supaero.fr/a.stevan/plnk", tag = "0.7.0", version = "0.7.0" } plnk = { git = "https://gitlab.isae-supaero.fr/a.stevan/plnk", tag = "0.7.0", version = "0.7.0" }
rand = "0.8.5" rand = "0.8.5"
dragoonfri = { version = "0.1.0"}
# Table of contents
- [Requirements](#requirements)
- [Run the benchmarks](#run-the-benchmarks)
- [define them](#define-them)
- [run them](#run-them)
- [Plot the benchmarks](#plot-the-benchmarks)
## requirements ## requirements
- install [GPLT](https://gitlab.isae-supaero.fr/a.stevan/gplt) > :bulb: **Note**
>
> these should only be required for plotting results
```nushell - install [GPLT](https://gitlab.isae-supaero.fr/a.stevan/gplt)
use .nushell/math.nu * - create a virtual environment
use .nushell/formats.nu * ```bash
const VENV = "~/.local/share/venvs/gplt/bin/activate.nu" | path expand
``` ```
```bash
## atomic operations virtualenv ($VENV | path dirname --num-levels 2)
```nushell
cargo run --release --package benchmarks --bin field_operations -- --nb-measurements 1000 out> field.ndjson
cargo run --release --package benchmarks --bin curve_group_operations -- --nb-measurements 1000 out> curve_group.ndjson
``` ```
```nushell - activate the virtual environment
use .nushell/parse.nu read-atomic-ops ```bash
overlay use $VENV
gplt multi_bar --title "simple field operations" -l "time (in ns)" ( ```
open field.ndjson - activate required modules
| read-atomic-ops --exclude [ "exponentiation", "legendre", "inverse", "sqrt" ] ```bash
| to json use benchmarks
)
gplt multi_bar --title "complex field operations" -l "time (in ns)" (
open field.ndjson
| read-atomic-ops --include [ "exponentiation", "legendre", "inverse", "sqrt" ]
| to json
)
gplt multi_bar --title "simple curve group operations" -l "time (in ns)" (
open curve_group.ndjson
| read-atomic-ops --exclude [ "random sampling", "scalar multiplication", "affine scalar multiplication" ]
| to json
)
gplt multi_bar --title "complex curve group operations" -l "time (in ns)" (
open curve_group.ndjson
| read-atomic-ops --include [ "random sampling", "scalar multiplication", "affine scalar multiplication" ]
| to json
)
``` ```
## linear algebra > :bulb: **Note**
```nushell >
let sizes = seq 0 7 | each { 2 ** $in } > i personally use the [`nuenv` hook](https://github.com/nushell/nu_scripts/blob/main/nu-hooks/nu-hooks/nuenv/hook.nu)
> that reads [`.env.nu`](../.env.nu).
let out_linalg = $sizes | benchmarks linalg run
benchmarks linalg plot $out_linalg inverse ## Run the benchmarks
``` ### define them
## trusted setup and commit > :bulb: **Note**
```nushell >
let degrees = seq 0 13 | each { 2 ** $in } > the FRI benchmarks don't use a module from [src/bin/](src/bin/) with PLNK but rather an
let curves = [ bls12381, pallas, bn254 ] > [example](../examples/fri.rs)
let out_setup = $degrees | benchmarks setup run --curves $curves ```bash
let out_commit = $degrees | benchmarks commit run --curves $curves const RESULTS_DIR = "/path/to/komodo-benchmark-results/"
benchmarks setup plot $out_setup let benchmarks = {
benchmarks commit plot $out_commit linalg: {
enabled: true,
sizes: (seq 0 7 | each { 2 ** $in }),
output: "linalg.ndjson",
append: true,
},
setup: {
enabled: true,
degrees: (seq 0 13 | each { 2 ** $in }),
curves: [ bls12381, pallas, bn254 ],
output: "setup.ndjson",
append: true,
},
commit: {
enabled: true,
degrees: (seq 0 13 | each { 2 ** $in }),
curves: [ bls12381, pallas, bn254 ],
output: "commit.ndjson",
append: true,
},
recoding: {
enabled: true,
sizes: (seq 0 18 | each { 512 * 2 ** $in }),
ks: [2, 4, 8, 16],
curves: [ bls12381 ],
output: "recoding.ndjson",
append: true,
},
fec: {
enabled: true,
sizes: (seq 0 18 | each { 512 * 2 ** $in }),
ks: [2, 4, 8, 16],
curves: [ bls12381 ],
output: "fec.ndjson",
append: true,
},
fri: {
enabled: true,
sizes: (seq 0 15 | each { 2 ** $in * 4096b }),
ks: [8, 128, 1024, 4096],
blowup_factors: [2, 4],
ns: [2],
remainder_plus_ones: [1],
nb_queries: [50],
hashes: ["sha3-512"],
ffs: ["fp128", "bls12-381"],
output: "fri.ndjson",
append: true,
},
field: {
enabled: true,
nb_measurements: 1000,
output: "field.ndjson",
append: true,
},
curve_group: {
enabled: true,
nb_measurements: 1000,
output: "curve_group.ndjson",
append: true,
},
}
``` ```
## end-to-end benchmarks ### run them
```nushell ```bash
let sizes = seq 0 18 | each { 512 * 2 ** $in } benchmarks run --output-dir $RESULTS_DIR $benchmarks
let ks = [2, 4, 8, 16]
let curves = [ bls12381 ]
``` ```
### run > the following `watch` can be used to see the results as they are dumped to `$RESULTS_DIR`
```nushell > ```bash
let out_recoding = $sizes | benchmarks recoding run --ks $ks --curves $curves > watch $RESULTS_DIR { |op, path|
let out_fec = $sizes | benchmarks fec run --ks $ks --curves $curves > $"($op) ($path)"
``` > }
> ```
### plot ## Plot the benchmarks
```nushell ```bash
benchmarks recoding plot $out_recoding let plots = {
benchmarks fec plot encoding $out_fec linalg: { file: "linalg.ndjson" },
benchmarks fec plot decoding $out_fec setup: { file: "setup.ndjson" },
benchmarks fec plot e2e $out_fec commit: { file: "commit.ndjson" },
benchmarks fec plot combined $out_fec --recoding $out_recoding fec: { file: "fec.ndjson" },
benchmarks fec plot ratio $out_fec --recoding $out_recoding recoding: { file: "recoding.ndjson" },
fri: [
[name, y_type, single, identity, normalize];
[evaluating, duration, false, false, false ],
[encoding, duration, false, false, false ],
[proving, duration, false, false, false ],
[decoding, duration, false, false, false ],
[verifying, duration, true, false, false ],
[proofs, filesize, false, true, true ],
[commits, filesize, true, true, true ],
[proofs, filesize, false, true, false ],
[commits, filesize, true, true, false ],
],
field: {
title: "field operations",
file: field.ndjson,
simple_operations: [ "exponentiation", "legendre", "inverse", "sqrt" ],
},
curve_group: {
title: "curve group operations",
file: curve_group.ndjson,
simple_operations: [ "random sampling", "scalar multiplication", "affine scalar multiplication" ],
},
}
```
```bash
benchmarks plot $plots --input-dir "/path/to/komodo-benchmark-results/<hash>" --output-dir "./figures/"
``` ```
...@@ -3,3 +3,262 @@ export module nu-lib/commit.nu ...@@ -3,3 +3,262 @@ export module nu-lib/commit.nu
export module nu-lib/fec/ export module nu-lib/fec/
export module nu-lib/recoding.nu export module nu-lib/recoding.nu
export module nu-lib/linalg.nu export module nu-lib/linalg.nu
export module nu-lib/fri/
use nu-lib/linalg.nu
use nu-lib/setup.nu
use nu-lib/commit.nu
use nu-lib/recoding.nu
use nu-lib/fec/
use nu-lib/fri/
use nu-lib/utils/log.nu
use nu-lib/utils/parse.nu read-atomic-ops
const CPU_FIELDS = [
"Architecture",
"CPU op-mode(s)",
"Address sizes",
"Byte Order",
"CPU(s)",
"On-line CPU(s) list",
"Model name",
"CPU family",
"Model",
"Thread(s) per core",
"Core(s) per socket",
"Socket(s)",
"Stepping",
"CPU max MHz",
"CPU min MHz",
"BogoMIPS",
"Virtualization",
"L1d cache",
"L1i cache",
"L2 cache",
"L3 cache",
"NUMA node(s)",
"NUMA node0 CPU(s)",
]
export def run [
benchmarks: record<
linalg: record<
enabled: bool,
sizes: list<int>,
output: string,
append: bool,
>,
setup: record<
enabled: bool,
degrees: list<int>,
curves: list<string>,
output: string,
append: bool,
>,
commit: record<
enabled: bool,
degrees: list<int>,
curves: list<string>,
output: string,
append: bool,
>,
recoding: record<
enabled: bool,
sizes: list<int>,
ks: list<int>,
curves: list<string>,
output: string,
append: bool,
>,
fec: record<
enabled: bool,
sizes: list<int>,
ks: list<int>,
curves: list<string>,
output: string,
append: bool,
>,
fri: record<
enabled: bool,
sizes: list<filesize>,
ks: list<int>,
blowup_factors: list<int>,
ns: list<int>,
remainder_plus_ones: list<int>,
nb_queries: list<int>,
hashes: list<string>,
ffs: list<string>,
output: string,
append: bool,
>,
field: record<enabled: bool, nb_measurements: int, output: string, append: bool>,
curve_group: record<enabled: bool, nb_measurements: int, output: string, append: bool>,
>,
--output-dir: path = ".",
] {
let cpu = lscpu --json
| from json
| get lscpu
| update field { str trim --right --char ":" }
| transpose --header-row
| into record
| select ...$CPU_FIELDS
let commit = git rev-parse HEAD
let hash = $cpu | to json | $in + $commit | hash sha256
let target = $output_dir | path join $hash
mkdir $target
$cpu | to json | save --force ($target | path join "cpu.json")
$commit | save --force ($target | path join "komodo.txt")
let benchmarks = $benchmarks
| insert linalg.run {{ |it|
let output = $target | path join $it.output
$it.sizes | linalg run --no-confirm --output $output --append=$it.append
}}
| insert setup.run {{ |it|
let output = $target | path join $it.output
$it.degrees | setup run --curves $it.curves --no-confirm --output $output --append=$it.append
}}
| insert commit.run {{ |it|
let output = $target | path join $it.output
$it.degrees | commit run --curves $it.curves --no-confirm --output $output --append=$it.append
}}
| insert recoding.run {{ |it|
let output = $target | path join $it.output
$it.sizes | recoding run --ks $it.ks --curves $it.curves --no-confirm --output $output --append=$it.append
}}
| insert fec.run {{ |it|
let output = $target | path join $it.output
$it.sizes | fec run --ks $it.ks --curves $it.curves --no-confirm --output $output --append=$it.append
}}
| insert fri.run {{ |it|
# FIXME: refactor this
if $it.append {
(
fri run
--data-sizes $it.sizes
--ks $it.ks
--blowup-factors $it.blowup_factors
--nb-queries $it.nb_queries
--hashes $it.hashes
--finite-fields $it.ffs
--remainders $it.remainder_plus_ones
--folding-factors $it.ns
) | to ndjson out>> ($target | path join $it.output)
} else {
(
fri run
--data-sizes $it.sizes
--ks $it.ks
--blowup-factors $it.blowup_factors
--nb-queries $it.nb_queries
--hashes $it.hashes
--finite-fields $it.ffs
--remainders $it.remainder_plus_ones
--folding-factors $it.ns
) | to ndjson out> ($target | path join $it.output)
}
}}
| insert field.run {{ |it|
let options = [
--bin field
--release
--package benchmarks
--
--nb-measurements $it.nb_measurements
]
# FIXME: refactor this
if $it.append {
cargo run ...$options out>> ($target | path join $it.output)
} else {
cargo run ...$options out> ($target | path join $it.output)
}
}}
| insert curve_group.run {{ |it|
let options = [
--bin curve_group
--release
--package benchmarks
--
--nb-measurements $it.nb_measurements
]
# FIXME: refactor this
if $it.append {
cargo run ...$options out>> ($target | path join $it.output)
} else {
cargo run ...$options out> ($target | path join $it.output)
}
}}
let _ = $benchmarks | items { |k, b|
if ($b.enabled? | default true) {
log info $"running (ansi cyan)($k)(ansi reset)"
do $b.run $b
} else {
log warning $"skipping (ansi cyan)($k)(ansi reset)"
}
}
}
export def plot [plots: record, --input-dir: path, --output-dir: path = "./figures/"] {
mkdir $output_dir
let linalg_file = $input_dir | path join $plots.linalg.file
let fec_file = $input_dir | path join $plots.fec.file
let recoding_file = $input_dir | path join $plots.recoding.file
for op in [ "mul", "transpose", "inverse" ] {
linalg plot $linalg_file $op --save ($output_dir | path join $"linalg-($op).png")
}
setup plot ($input_dir | path join $plots.setup.file) --save ($output_dir | path join setup.png)
commit plot ($input_dir | path join $plots.commit.file) --save ($output_dir | path join commit.png)
recoding plot $recoding_file --save ($output_dir | path join recoding.png)
fec plot encoding $fec_file --save ($output_dir | path join encoding.png)
fec plot decoding $fec_file --save ($output_dir | path join decoding.png)
fec plot e2e $fec_file --save ($output_dir | path join end_to_end.png)
fec plot combined $fec_file --recoding $recoding_file --save ($output_dir | path join combined.png)
fec plot ratio $fec_file --recoding $recoding_file --save ($output_dir | path join ratio.png)
for plot in $plots.fri {(
fri plot
--dump-dir $output_dir
--file ($input_dir | path join fri.ndjson)
$plot.name
--y-type $plot.y_type
--single=$plot.single
--identity=$plot.identity
--normalize=$plot.normalize
--save
)}
for plot in ($plots | select field curve_group | values) {
def output [prefix: string]: [ nothing -> record<path: path, title: string> ] {
let title_tokens = $plot.title | split row " " | prepend $prefix
{
path: ({
parent: $output_dir,
stem: ($title_tokens | str join "_"),
extension: "png",
} | path join),
title: ($title_tokens | str join " "),
}
}
let data = open ($input_dir | path join $plot.file)
output "simple" | gplt multi-bar --title $in.title -l "time (in ns)" (
$data | read-atomic-ops --include $plot.simple_operations | to json
) --save $in.path
output "complex" | gplt multi-bar --title $in.title -l "time (in ns)" (
$data | read-atomic-ops --exclude $plot.simple_operations | to json
) --save $in.path
}
}
...@@ -2,6 +2,7 @@ use utils log ...@@ -2,6 +2,7 @@ use utils log
use utils math * use utils math *
use utils fs check-file use utils fs check-file
use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ] use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ]
use utils args check-list-arg
use std formats * use std formats *
...@@ -12,22 +13,19 @@ use std formats * ...@@ -12,22 +13,19 @@ use std formats *
export def run [ export def run [
--output: path, # the output path (defaults to a random file in $nu.temp-path) --output: path, # the output path (defaults to a random file in $nu.temp-path)
--curves: list<string>, # the curves to benchmark --curves: list<string>, # the curves to benchmark
--force, # does not ask for confirmation if the output file already exists, it will be overwritten --no-confirm (-y), # does not ask for confirmation if the output file already exists, it will be overwritten
--nb-measurements: int = 10, # the number of measurements per benchmark run --nb-measurements: int = 10, # the number of measurements per benchmark run
--append, # append to the output path instead of overwritting
]: list<int> -> path { ]: list<int> -> path {
let input = $in $curves | check-list-arg --cmd "commit run" --arg "--curves" --span (metadata $curves).span
$in | check-list-arg --cmd "commit run" --arg "pipeline input"
if ($input | is-empty) or ($curves | is-empty) {
print "nothing to do"
return
}
let new_file = $output == null let new_file = $output == null
let output = $output | default (mktemp --tmpdir komodo_commit.XXXXXX) let output = $output | default (mktemp --tmpdir komodo_commit.XXXXXX)
let pretty_output = $"(ansi purple)($output)(ansi reset)" let pretty_output = $"(ansi purple)($output)(ansi reset)"
if ($output | path exists) and not $new_file { if ($output | path exists) and not $new_file {
log warning $"($pretty_output) already exists" log warning $"($pretty_output) already exists"
if not $force { if not $no_confirm {
let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?" let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?"
if $res == null or $res == "no" { if $res == null or $res == "no" {
log info "aborting" log info "aborting"
...@@ -37,11 +35,20 @@ export def run [ ...@@ -37,11 +35,20 @@ export def run [
} }
} }
cargo run --release --package benchmarks --bin commit -- ...[ let options = [
--release
--package benchmarks
--bin commit
--
--nb-measurements $nb_measurements --nb-measurements $nb_measurements
...$input ...$in
--curves ...$curves --curves ...$curves
] out> $output ]
if $append {
cargo run ...$options out>> $output
} else {
cargo run ...$options out> $output
}
log info $"results saved to ($pretty_output)" log info $"results saved to ($pretty_output)"
$output $output
...@@ -65,7 +72,7 @@ export def plot [ ...@@ -65,7 +72,7 @@ export def plot [
| select name x y e | select name x y e
| group-by name --to-table | group-by name --to-table
| reject items.name | reject items.name
| rename --column { group: "name", items: "points" } | rename --column { name: "name", items: "points" }
| insert style.color {|it| | insert style.color {|it|
match $it.name { match $it.name {
"BLS12-381" => "tab:blue" "BLS12-381" => "tab:blue"
......
...@@ -24,7 +24,7 @@ export def encoding [ ...@@ -24,7 +24,7 @@ export def encoding [
| sort-by x | sort-by x
| group-by k --to-table | group-by k --to-table
| reject items.k | reject items.k
| rename --column { group: "name", items: "points" } | rename --column { k: "name", items: "points" }
| update name { $"$k = ($in)$" } | update name { $"$k = ($in)$" }
let options = [ let options = [
...@@ -56,7 +56,7 @@ export def decoding [ ...@@ -56,7 +56,7 @@ export def decoding [
| sort-by x | sort-by x
| group-by k --to-table | group-by k --to-table
| reject items.k | reject items.k
| rename --column { group: "name", items: "points" } | rename --column { k: "name", items: "points" }
| update name { $"$k = ($in)$" } | update name { $"$k = ($in)$" }
let options = [ let options = [
...@@ -89,7 +89,7 @@ export def e2e [ ...@@ -89,7 +89,7 @@ export def e2e [
| update times { $it.items.0.times | zip $it.items.1.times | each { $in.0 + $in.1 } } | update times { $it.items.0.times | zip $it.items.1.times | each { $in.0 + $in.1 } }
} }
| flatten --all | flatten --all
| reject group foo | reject foo
| ns-to-ms times | ns-to-ms times
| compute-stats times | compute-stats times
| reject times | reject times
...@@ -99,7 +99,7 @@ export def e2e [ ...@@ -99,7 +99,7 @@ export def e2e [
| sort-by x | sort-by x
| group-by k --to-table | group-by k --to-table
| reject items.k | reject items.k
| rename --column { group: "name", items: "points" } | rename --column { k: "name", items: "points" }
| update name { $"$k = ($in)$" } | update name { $"$k = ($in)$" }
let options = [ let options = [
...@@ -144,7 +144,7 @@ export def combined [ ...@@ -144,7 +144,7 @@ export def combined [
} }
| reject items.shards | reject items.shards
| insert style.line.type "solid" | insert style.line.type "solid"
| rename --column { group: "name", items: "points" } | rename --column { shards: "name", items: "points" }
| update name { $"$k = ($in)$" } | update name { $"$k = ($in)$" }
let re_encoding_graphs = open --raw $data let re_encoding_graphs = open --raw $data
...@@ -159,7 +159,7 @@ export def combined [ ...@@ -159,7 +159,7 @@ export def combined [
| update times { $it.items.0.times | zip $it.items.1.times | each { $in.0 + $in.1 } } | update times { $it.items.0.times | zip $it.items.1.times | each { $in.0 + $in.1 } }
} }
| flatten --all | flatten --all
| reject group key | reject key
| ns-to-ms times | ns-to-ms times
| compute-stats times | compute-stats times
| reject times | reject times
...@@ -179,7 +179,7 @@ export def combined [ ...@@ -179,7 +179,7 @@ export def combined [
} }
| insert style.line.type "dashed" | insert style.line.type "dashed"
| reject items.k | reject items.k
| rename --column { group: "name", items: "points" } | rename --column { k: "name", items: "points" }
| reject name | reject name
let graphs = $recoding_graphs let graphs = $recoding_graphs
...@@ -254,7 +254,7 @@ export def ratio [ ...@@ -254,7 +254,7 @@ export def ratio [
| update times { $it.items.0.times | zip $it.items.1.times | each { $in.0 + $in.1 } } | update times { $it.items.0.times | zip $it.items.1.times | each { $in.0 + $in.1 } }
} }
| flatten --all | flatten --all
| reject group key | reject key
| ns-to-ms times | ns-to-ms times
| compute-stats times | compute-stats times
| where name == "BLS12-381" | where name == "BLS12-381"
...@@ -281,7 +281,7 @@ export def ratio [ ...@@ -281,7 +281,7 @@ export def ratio [
} }
} }
| reject items.k | reject items.k
| rename --column { group: "name", items: "points" } | rename --column { k: "name", items: "points" }
| update name { $"$k = ($in)$" } | update name { $"$k = ($in)$" }
let options = [ let options = [
......
use ../utils log use ../utils log
use ../utils formats * use ../utils formats *
use ../utils args check-list-arg
use std formats * use std formats *
...@@ -11,22 +12,20 @@ export def main [ ...@@ -11,22 +12,20 @@ export def main [
--output: path, # the output path (defaults to a random file in $nu.temp-path) --output: path, # the output path (defaults to a random file in $nu.temp-path)
--ks: list<int>, # the values of $k$ to benchmark --ks: list<int>, # the values of $k$ to benchmark
--curves: list<string>, # the curves to benchmark --curves: list<string>, # the curves to benchmark
--force, # does not ask for confirmation if the output file already exists, it will be overwritten --no-confirm (-y), # does not ask for confirmation if the output file already exists, it will be overwritten
--nb-measurements: int = 10, # the number of measurements per benchmark run --nb-measurements: int = 10, # the number of measurements per benchmark run
--append, # append to the output path instead of overwritting
]: list<int> -> path { ]: list<int> -> path {
let input = $in $ks | check-list-arg --cmd "fec run" --arg "--ks" --span (metadata $ks).span
$curves | check-list-arg --cmd "fec run" --arg "--curves" --span (metadata $curves).span
if ($ks | is-empty) or ($input | is-empty) or ($curves | is-empty) { $in | check-list-arg --cmd "fec run" --arg "pipeline input"
print "nothing to do"
return
}
let new_file = $output == null let new_file = $output == null
let output = $output | default (mktemp --tmpdir komodo_fec.XXXXXX) let output = $output | default (mktemp --tmpdir komodo_fec.XXXXXX)
let pretty_output = $"(ansi purple)($output)(ansi reset)" let pretty_output = $"(ansi purple)($output)(ansi reset)"
if ($output | path exists) and not $new_file { if ($output | path exists) and not $new_file {
log warning $"($pretty_output) already exists" log warning $"($pretty_output) already exists"
if not $force { if not $no_confirm {
let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?" let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?"
if $res == null or $res == "no" { if $res == null or $res == "no" {
log info "aborting" log info "aborting"
...@@ -36,17 +35,29 @@ export def main [ ...@@ -36,17 +35,29 @@ export def main [
} }
} }
"" out> $output if not $append {
"" out> $output
}
let input = $in
for k in $ks { for k in $ks {
cargo run --release --package benchmarks --bin fec -- ...[ let options = [
--release
--package benchmarks
--bin fec
--
--nb-measurements $nb_measurements --nb-measurements $nb_measurements
...$input ...$input
--encoding vandermonde --encoding vandermonde
-k $k -k $k
-n 1 -n 1
--curves ...$curves --curves ...$curves
] | from ndnuon | to ndjson out>> $output ]
if $append {
cargo run ...$options | from ndnuon | to ndjson out>> $output
} else {
cargo run ...$options | from ndnuon | to ndjson out> $output
}
} }
log info $"results saved to ($pretty_output)" log info $"results saved to ($pretty_output)"
......
export module run.nu
export module plot.nu
use std formats [ "from ndjson" ]
use ../utils plot [ into-axis-options, COMMON_OPTIONS ]
const NB_MS_IN_NS = 1_000_000
def plot [
name: string,
--save,
--y-type: string,
--single,
--identity,
--normalize,
--dump-dir: path,
] {
let ds = $in | get d | uniq
let graphs = $in
| select $name d k bf ff
| group-by { |it| $"($it.k):($it.ff):($it.bf)" }
| transpose name points
| update name {
let res = $in | parse "{k}:{ff}:{bf}" | into record
$"$k = ($res.k)$, $\\mathbb{F} = $ ($res.ff), $BF = ($res.bf)$"
}
| update points {
rename --column { $name: "y", d: "x" }
| update y { if $y_type == "duration" { $in / $NB_MS_IN_NS } else { $in } }
| if $single { update y { |it| $it.y / ($it.k * $it.bf) } } else { $in }
| if $normalize { update y { |it| $it.y / $it.x } } else { $in }
| sort-by x
}
| insert style { |it|
let type = match $it.points.ff.0 {
"fp128" => "solid",
"bls12-381" => "dashed",
_ => "",
}
let color = match $it.points.k.0 {
8 => "tab:blue",
128 => "tab:green",
1024 => "tab:orange",
4096 => "tab:red",
_ => "grey",
}
let marker = match $it.points.bf.0 {
2 => "o",
4 => "s",
_ => "*",
}
{ color: $color, line: { type: $type, marker: { shape: $marker } } }
}
| if $identity { append {
name: "$x \\mapsto x$",
points: ($ds | wrap x | merge ($ds | wrap y) | if $normalize { update y { |it| $it.y / $it.x } } else { $in }),
style: { color: "black", line: { type: "dotted" } },
} } else { $in }
| reject points.k? points.bf? points.ff?
let title = [
$name,
(if $single { "single" }),
(if $normalize { "normalized" }),
] | compact | str join '_'
let y_type = if $normalize { "plain" } else { $y_type }
let options = [
...($graphs.points | flatten | into-axis-options -x "filesize" -y $y_type)
--use-tex
--y-scale log
--x-scale log
--x-scale-base 2
--y-scale-base 2
--title $title
...(if $save { [ --save ($dump_dir | path join $"($title).png") ] } else {[]})
--fullscreen
]
$graphs | to json | gplt plot $in ...($options | compact)
}
export def main [
...x,
--file: path,
--y-type: string = "plain",
--single,
--identity,
--normalize,
--dump-dir: path = "./",
--save,
] {
if ($x | is-empty) {
error make --unspanned { msg: "nothing to do, x is empty" }
}
if $file == null {
error make --unspanned { msg: "missing --file" }
}
if not ($dump_dir | path exists) {
mkdir $dump_dir
}
let data = open $file | where h == "sha3-512" and q == 50
for i in $x {
$data | plot --save=$save $i --y-type=$y_type --single=$single --identity=$identity --normalize=$normalize --dump-dir=$dump_dir
}
}
use std iter
def "cartesian product" [
iters: list # the iterables you want the cartesian product of
]: nothing -> list {
def aux [a: list]: nothing -> list {
if ($a | is-empty) {
return []
}
let head = $a | first
let tail = aux ($a | skip 1)
if ($head | is-empty) {
return $tail
} else if ($tail | is-empty) {
return $head
}
$head | each {|h| $tail | each {|t| [$h, $t]}} | flatten | each { flatten }
}
aux $iters
}
# returns a record with all numeric results, merged with the parameters
def run [
params: record<
d: filesize, k: int, bf: int, q: int, h: string, ff: string, n: int, rpo: int
>
] {
cargo run --quiet --release --example fri --features fri -- ...[
--data-size ($params.d | into int)
-k $params.k
--blowup-factor $params.bf
--remainder-degree-plus-one $params.rpo
--folding-factor $params.n
--nb-queries $params.q
--hash $params.h
--finite-field $params.ff
]
| lines
| parse "{k}: {v}"
| into int v
| transpose --header-row
| into record
| merge $params
}
export def main [
--data-sizes: list<filesize>,
--ks: list<int>,
--blowup-factors: list<int>,
--nb-queries: list<int>,
--hashes: list<string>,
--finite-fields: list<string>,
--folding-factors: list<int>,
--remainders: list<int>,
] {
let inputs = [
$data_sizes, $ks, $blowup_factors, $nb_queries, $hashes, $finite_fields,
$folding_factors, $remainders
]
if ($inputs | any { is-empty }) {
error make --unspanned { msg: "one of the inputs is empty" }
}
let params = cartesian product $inputs | each { |params|
[d, k, bf, q, h, ff, n, rpo]
| iter zip-into-record $params
| into record
}
$params | each { |p|
print ($p | to nuon --raw)
run $p
}
}
...@@ -2,6 +2,7 @@ use utils log ...@@ -2,6 +2,7 @@ use utils log
use utils math * use utils math *
use utils fs check-file use utils fs check-file
use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ] use utils plot [ into-axis-options, COMMON_OPTIONS, gplt ]
use utils args check-list-arg
use std formats * use std formats *
...@@ -11,22 +12,18 @@ use std formats * ...@@ -11,22 +12,18 @@ use std formats *
# - output: the output path, as NDJSON # - output: the output path, as NDJSON
export def run [ export def run [
--output: path, # the output path (defaults to a random file in $nu.temp-path) --output: path, # the output path (defaults to a random file in $nu.temp-path)
--force, # does not ask for confirmation if the output file already exists, it will be overwritten --no-confirm (-y), # does not ask for confirmation if the output file already exists, it will be overwritten
--nb-measurements: int = 10, # the number of measurements per benchmark run --nb-measurements: int = 10, # the number of measurements per benchmark run
--append, # append to the output path instead of overwritting
]: list<int> -> path { ]: list<int> -> path {
let input = $in $in | check-list-arg --cmd "linalg run" --arg "pipeline input"
if ($input | is-empty) {
print "nothing to do"
return
}
let new_file = $output == null let new_file = $output == null
let output = $output | default (mktemp --tmpdir komodo_linalg.XXXXXX) let output = $output | default (mktemp --tmpdir komodo_linalg.XXXXXX)
let pretty_output = $"(ansi purple)($output)(ansi reset)" let pretty_output = $"(ansi purple)($output)(ansi reset)"
if ($output | path exists) and not $new_file { if ($output | path exists) and not $new_file {
log warning $"($pretty_output) already exists" log warning $"($pretty_output) already exists"
if not $force { if not $no_confirm {
let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?" let res = ["no", "yes"] | input list $"Do you want to overwrite ($pretty_output)?"
if $res == null or $res == "no" { if $res == null or $res == "no" {
log info "aborting" log info "aborting"
...@@ -36,10 +33,19 @@ export def run [ ...@@ -36,10 +33,19 @@ export def run [
} }
} }
cargo run --release --package benchmarks --bin linalg -- ...[ let options = [
--release
--package benchmarks
--bin linalg
--
--nb-measurements $nb_measurements --nb-measurements $nb_measurements
...$input ...$in
] out> $output ]
if $append {
cargo run ...$options out>> $output
} else {
cargo run ...$options out> $output
}
log info $"results saved to ($pretty_output)" log info $"results saved to ($pretty_output)"
$output $output
...@@ -89,7 +95,8 @@ export def plot [ ...@@ -89,7 +95,8 @@ export def plot [
| where op == $op | where op == $op
| rename --column { n: "x", mean: "y", stddev: "e" } | rename --column { n: "x", mean: "y", stddev: "e" }
| group-by name --to-table | group-by name --to-table
| rename --column { group: "name", items: "points" } | reject items.name items.op items.times
| rename --column { name: "name", items: "points" }
| insert style.color {|it| | insert style.color {|it|
match $it.name { match $it.name {
"BLS12-381" => "tab:blue" "BLS12-381" => "tab:blue"
......