Skip to content

Commit ccbeafb

Browse files
authored
feat: unify the metrics across the repo (#21)
1 parent c2359bb commit ccbeafb

File tree

10 files changed

+158
-52
lines changed

10 files changed

+158
-52
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,4 @@ dist/
1818
Dockerfile
1919
rust-project.json
2020
tmp/
21+
*.json

binius/src/bin/measure_lookup.rs

Lines changed: 26 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,38 @@
11
use anyhow::Error;
2-
use binius::bench::{prove, sha256_with_lookup_prepare, verify};
3-
use utils::bench::measure_peak_memory;
2+
use binius::bench::{prove, sha256_with_lookup_prepare};
3+
use binius_utils::SerializeBytes;
4+
use utils::bench::{SubMetrics, measure_peak_memory, write_json_submetrics};
45

56
fn main() -> Result<(), Error> {
7+
let json_file = "sha2_binius_lookup_submetrics.json";
8+
9+
let input_num_bytes = 2048;
10+
let metrics = benchmark_sha2(input_num_bytes)?;
11+
12+
write_json_submetrics(json_file, &metrics);
13+
14+
Ok(())
15+
}
16+
17+
fn benchmark_sha2(num_bytes: usize) -> Result<SubMetrics, Error> {
18+
let mut metrics = SubMetrics::new(num_bytes);
19+
620
let allocator = bumpalo::Bump::new();
721

822
let ((constraint_system, args, witness, backend), peak_memory) =
923
measure_peak_memory(|| sha256_with_lookup_prepare(&allocator));
24+
metrics.preprocessing_peak_memory = peak_memory;
1025

11-
println!(
12-
"Preprocessing(lookup) peak memory: {} MB",
13-
peak_memory as f32 / (1024.0 * 1024.0),
14-
);
26+
let mut buffer: Vec<u8> = Vec::new();
27+
let _ = constraint_system
28+
.serialize(&mut buffer, binius_utils::SerializationMode::CanonicalTower)
29+
.expect("Failed to serialize constraint system");
30+
metrics.preprocessing_size = buffer.len();
1531

16-
let ((cs, args, proof), peak_memory) =
32+
let ((_, _, proof), peak_memory) =
1733
measure_peak_memory(|| prove(constraint_system, args, witness, backend));
34+
metrics.proving_peak_memory = peak_memory;
35+
metrics.proof_size = proof.get_proof_size();
1836

19-
println!(
20-
"Proving(lookup) peak memory: {} MB",
21-
peak_memory as f32 / (1024.0 * 1024.0)
22-
);
23-
24-
verify(cs, args, proof);
25-
26-
Ok(())
37+
Ok(metrics)
2738
}

binius/src/bin/measure_no_lookup.rs

Lines changed: 26 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,38 @@
11
use anyhow::Error;
2-
use binius::bench::{prove, sha256_no_lookup_prepare, verify};
3-
use utils::bench::measure_peak_memory;
2+
use binius::bench::{prove, sha256_no_lookup_prepare};
3+
use binius_utils::SerializeBytes;
4+
use utils::bench::{SubMetrics, measure_peak_memory, write_json_submetrics};
45

56
fn main() -> Result<(), Error> {
7+
let json_file = "sha2_binius_no_lookup_submetrics.json";
8+
9+
let input_num_bytes = 2048;
10+
let metrics = benchmark_sha2(input_num_bytes)?;
11+
12+
write_json_submetrics(json_file, &metrics);
13+
14+
Ok(())
15+
}
16+
17+
fn benchmark_sha2(num_bytes: usize) -> Result<SubMetrics, Error> {
18+
let mut metrics = SubMetrics::new(num_bytes);
19+
620
let allocator = bumpalo::Bump::new();
721

822
let ((constraint_system, args, witness, backend), peak_memory) =
923
measure_peak_memory(|| sha256_no_lookup_prepare(&allocator));
24+
metrics.preprocessing_peak_memory = peak_memory;
1025

11-
println!(
12-
"Preprocessing(no lookup) peak memory: {} MB",
13-
peak_memory as f32 / (1024.0 * 1024.0),
14-
);
26+
let mut buffer: Vec<u8> = Vec::new();
27+
let _ = constraint_system
28+
.serialize(&mut buffer, binius_utils::SerializationMode::CanonicalTower)
29+
.expect("Failed to serialize constraint system");
30+
metrics.preprocessing_size = buffer.len();
1531

16-
let ((cs, args, proof), peak_memory) =
32+
let ((_, _, proof), peak_memory) =
1733
measure_peak_memory(|| prove(constraint_system, args, witness, backend));
34+
metrics.proving_peak_memory = peak_memory;
35+
metrics.proof_size = proof.get_proof_size();
1836

19-
println!(
20-
"Proving(no lookup) peak memory: {} MB",
21-
peak_memory as f32 / (1024.0 * 1024.0)
22-
);
23-
24-
verify(cs, args, proof);
25-
26-
Ok(())
37+
Ok(metrics)
2738
}

guests/src/ecdsa.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use k256::{
2-
ecdsa::{signature::Verifier, Signature, VerifyingKey},
3-
elliptic_curve::sec1::EncodedPoint,
42
Secp256k1,
3+
ecdsa::{Signature, VerifyingKey, signature::Verifier},
4+
elliptic_curve::sec1::EncodedPoint,
55
};
66
use serde::{Deserialize, Serialize};
77

plonky2/src/bin/measure.rs

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,25 @@ use plonky2::{plonk::config::PoseidonGoldilocksConfig, util::serialization::Writ
22
use plonky2_sha256::bench::{prove, sha256_no_lookup_prepare};
33
use plonky2_u32::gates::arithmetic_u32::{U32GateSerializer, U32GeneratorSerializer};
44

5-
use utils::bench::measure_peak_memory;
5+
use utils::bench::{SubMetrics, measure_peak_memory, write_json_submetrics};
66

77
const D: usize = 2;
88
type C = PoseidonGoldilocksConfig;
99

1010
fn main() {
11-
let ((data, pw), peak_memory) = measure_peak_memory(sha256_no_lookup_prepare);
11+
let json_file = "sha2_plonky2_submetrics.json";
1212

13-
println!(
14-
"Preprocessing peak memory: {} GB",
15-
peak_memory as f32 / (1024.0 * 1024.0 * 1024.0),
16-
);
13+
let input_num_bytes = 2048;
14+
let metrics = benchmark_sha2(input_num_bytes);
15+
16+
write_json_submetrics(json_file, &metrics);
17+
}
18+
19+
fn benchmark_sha2(num_bytes: usize) -> SubMetrics {
20+
let mut metrics = SubMetrics::new(num_bytes);
21+
22+
let ((data, pw), peak_memory) = measure_peak_memory(|| sha256_no_lookup_prepare());
23+
metrics.preprocessing_peak_memory = peak_memory;
1724

1825
let gate_serializer = U32GateSerializer;
1926
let common_data_size = data.common.to_bytes(&gate_serializer).unwrap().len();
@@ -28,8 +35,10 @@ fn main() {
2835
"Common data size: {}B, Prover data size: {}B",
2936
common_data_size, prover_data_size
3037
);
38+
metrics.preprocessing_size = prover_data_size + common_data_size;
3139

3240
let (proof, peak_memory) = measure_peak_memory(|| prove(&data.prover_data(), pw));
41+
metrics.proving_peak_memory = peak_memory;
3342

3443
println!(
3544
"Proving peak memory: {} GB",
@@ -39,4 +48,7 @@ fn main() {
3948
let mut buffer = Vec::new();
4049
buffer.write_proof(&proof.proof).unwrap();
4150
println!("Proof size: {} KB", buffer.len() as f32 / 1024.0);
51+
metrics.proof_size = buffer.len();
52+
53+
metrics
4254
}

plonky3-powdr/src/bin/measure.rs

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,39 @@
1-
use sha::bench::{prepare_pipeline, prove, verify};
2-
use utils::bench::measure_peak_memory;
1+
use sha::bench::{prepare_pipeline, prove};
2+
use utils::bench::{SubMetrics, measure_peak_memory, write_json_submetrics};
33

44
fn main() {
5-
let (mut pipeline, peak_memory) = measure_peak_memory(prepare_pipeline);
5+
let json_file = "sha2_plonky3_powdr_submetrics.json";
66

7+
let input_num_bytes = 2048;
8+
let metrics = benchmark_sha2(input_num_bytes);
9+
10+
write_json_submetrics(json_file, &metrics);
11+
}
12+
13+
fn benchmark_sha2(num_bytes: usize) -> SubMetrics {
14+
let mut metrics = SubMetrics::new(num_bytes);
15+
16+
let (mut pipeline, peak_memory) = measure_peak_memory(|| prepare_pipeline());
17+
metrics.preprocessing_peak_memory = peak_memory;
718
println!(
819
"Preprocessing peak memory: {} GB",
920
peak_memory as f32 / (1024.0 * 1024.0 * 1024.0),
1021
);
1122

23+
// Load the proving key and constants from the files.
24+
let pk_bytes = std::fs::read("powdr-target/pkey.bin").expect("Unable to read file");
25+
let constants_bytes = std::fs::read("powdr-target/constants.bin").expect("Unable to read file");
26+
let pil_bytes = std::fs::read("powdr-target/guest.pil").expect("Unable to read file");
27+
metrics.preprocessing_size = pk_bytes.len() + constants_bytes.len() + pil_bytes.len();
28+
1229
let (_, peak_memory) = measure_peak_memory(|| prove(&mut pipeline));
30+
metrics.proving_peak_memory = peak_memory;
31+
metrics.proof_size = pipeline.proof().unwrap().len();
1332

1433
println!(
1534
"Proving peak memory: {} GB",
1635
peak_memory as f32 / (1024.0 * 1024.0 * 1024.0),
1736
);
1837

19-
verify(pipeline);
38+
metrics
2039
}

plonky3-sp1/script/src/bin/measure.rs

Lines changed: 24 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,41 @@
11
use sp1_sdk::{ProverClient, SP1Stdin, include_elf};
2-
use utils::bench::measure_peak_memory;
2+
use utils::bench::{SubMetrics, measure_peak_memory, write_json_submetrics};
33

44
/// The ELF (executable and linkable format) file for the Succinct RISC-V zkVM.
55
pub const SHA_ELF: &[u8] = include_elf!("sha-program");
66

77
fn main() {
8+
let json_file = "sha2_plonky3_sp1_submetrics.json";
9+
10+
let input_num_bytes = 2048;
11+
let metrics = benchmark_sha2(input_num_bytes);
12+
13+
write_json_submetrics(json_file, &metrics);
14+
}
15+
16+
fn benchmark_sha2(input_num_bytes: usize) -> SubMetrics {
17+
let mut metrics = SubMetrics::new(input_num_bytes);
18+
19+
// Load the proving key and verifying key from the files.
20+
let pk_bytes = std::fs::read("pk.bin").expect("Unable to read file");
21+
let pk: sp1_sdk::SP1ProvingKey = bincode::deserialize(&pk_bytes).unwrap();
22+
// Load the proof from the file.
23+
let proof_bytes = std::fs::read("proof.bin").expect("Unable to read file");
24+
825
// Setup the prover client.
926
let client = ProverClient::from_env();
1027
let stdin = SP1Stdin::new();
1128

1229
// Setup the program for proving.
1330
let ((_, _), peak_memory) = measure_peak_memory(|| client.setup(SHA_ELF));
1431

32+
metrics.preprocessing_peak_memory = peak_memory;
1533
println!(
1634
"Preprocessing peak memory: {} GB",
1735
peak_memory as f32 / (1024.0 * 1024.0 * 1024.0)
1836
);
1937

20-
// Load the proving key and verifying key from the files.
21-
let pk_bytes = std::fs::read("pk.bin").expect("Unable to read file");
22-
let pk: sp1_sdk::SP1ProvingKey = bincode::deserialize(&pk_bytes).unwrap();
38+
metrics.preprocessing_size = pk_bytes.len() + SHA_ELF.len();
2339

2440
// Generate the proof
2541
let (_, peak_memory) = measure_peak_memory(|| {
@@ -28,9 +44,13 @@ fn main() {
2844
.run()
2945
.expect("failed to generate proof")
3046
});
47+
metrics.proving_peak_memory = peak_memory;
48+
metrics.proof_size = proof_bytes.len();
3149

3250
println!(
3351
"Proving peak memory: {} GB",
3452
peak_memory as f32 / (1024.0 * 1024.0 * 1024.0),
3553
);
54+
55+
metrics
3656
}

utils/src/bench.rs

Lines changed: 35 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,16 @@
11
use human_repr::{HumanCount, HumanDuration};
22
use serde::Serialize;
3-
use serde_with::{serde_as, DurationNanoSeconds};
3+
use serde_with::{DurationNanoSeconds, serde_as};
44
use std::{
55
fmt::Display,
66
sync::{
7-
atomic::{AtomicBool, AtomicUsize, Ordering},
87
Arc,
8+
atomic::{AtomicBool, AtomicUsize, Ordering},
99
},
1010
thread,
1111
time::Duration,
1212
};
13-
use tabled::{settings::Style, Table, Tabled};
13+
use tabled::{Table, Tabled, settings::Style};
1414

1515
fn get_current_memory_usage() -> Result<usize, std::io::Error> {
1616
unsafe {
@@ -129,3 +129,35 @@ pub fn write_csv(out_path: &str, results: &[Metrics]) {
129129
table.with(Style::modern());
130130
println!("{table}");
131131
}
132+
133+
#[serde_as]
134+
#[derive(Serialize, Tabled)]
135+
pub struct SubMetrics {
136+
#[tabled(display_with = "display_bytes")]
137+
pub input_size: usize,
138+
#[tabled(display_with = "display_bytes")]
139+
pub proof_size: usize,
140+
#[tabled(display_with = "display_bytes")]
141+
pub proving_peak_memory: usize,
142+
#[tabled(display_with = "display_bytes")]
143+
pub preprocessing_size: usize,
144+
#[tabled(display_with = "display_bytes")]
145+
pub preprocessing_peak_memory: usize,
146+
}
147+
148+
impl SubMetrics {
149+
pub fn new(size: usize) -> Self {
150+
SubMetrics {
151+
input_size: size,
152+
proof_size: 0,
153+
proving_peak_memory: 0,
154+
preprocessing_size: 0,
155+
preprocessing_peak_memory: 0,
156+
}
157+
}
158+
}
159+
160+
pub fn write_json_submetrics(output_path: &str, metrics: &SubMetrics) {
161+
let json = serde_json::to_string_pretty(metrics).unwrap();
162+
std::fs::write(output_path, json).unwrap();
163+
}

utils/src/bin/sign_ecdsa.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
use k256::{
2-
ecdsa::{signature::Signer, Signature, SigningKey},
2+
ecdsa::{Signature, SigningKey, signature::Signer},
33
elliptic_curve::rand_core::OsRng,
44
};
55
use std::{fs::File, io::Write};

utils/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use guests::ecdsa::EcdsaVerifyInput;
2-
use k256::{ecdsa::Signature, elliptic_curve::sec1::EncodedPoint, Secp256k1};
3-
use rand::{rngs::StdRng, RngCore, SeedableRng};
2+
use k256::{Secp256k1, ecdsa::Signature, elliptic_curve::sec1::EncodedPoint};
3+
use rand::{RngCore, SeedableRng, rngs::StdRng};
44
use std::fs;
55
use std::fs::File;
66
use std::io::Write;

0 commit comments

Comments
 (0)