Skip to content
Merged
4 changes: 3 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 6 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,15 @@ missing_docs_in_private_items = { level = "allow", priority = 1 }
missing_safety_doc = { level = "deny", priority = 1 }

[profile.release]
debug = true # Generate symbol info for profiling
opt-level = 3
codegen-units = 1
lto = "fat"

# Size-optimized builds for mobile (iOS/Android): cargo build --profile release-mobile
[profile.release-mobile]
inherits = "release"
opt-level = "z"

# Fast release builds for development iteration: cargo build --profile release-fast
[profile.release-fast]
inherits = "release"
Expand Down Expand Up @@ -143,6 +147,7 @@ tracing-subscriber = { version = "0.3.18", features = ["env-filter", "ansi"] }
tracing-tracy = "=0.11.4"
tracy-client = "=0.18.0"
tracy-client-sys = "=0.24.3"
parking_lot = "0.12"
xz2 = "0.1.7"
zerocopy = "0.8.25"
zeroize = "1.8.1"
Expand Down
163 changes: 121 additions & 42 deletions provekit/common/src/file/io/bin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use {
HashConfig,
},
anyhow::{ensure, Context as _, Result},
bytes::{Buf, BufMut as _, Bytes, BytesMut},
bytes::{Buf, BufMut as _, Bytes},
serde::{Deserialize, Serialize},
std::{
fs::File,
Expand All @@ -20,69 +20,58 @@ use {
/// MINOR(2) = 20
const HASH_CONFIG_OFFSET: usize = 20;

/// Zstd compression level used for serialization.
const ZSTD_LEVEL: i32 = 3;

/// XZ compression level used for serialization.
const XZ_LEVEL: u32 = 6;

/// Compression algorithm for binary file output.
#[derive(Debug, Clone, Copy)]
pub enum Compression {
Zstd,
Xz,
}

/// Compress data using the specified algorithm.
fn compress(data: &[u8], compression: Compression) -> Result<Vec<u8>> {
match compression {
Compression::Zstd => {
zstd::bulk::compress(data, ZSTD_LEVEL).context("while compressing with zstd")
}
Compression::Xz => {
let mut buf = Vec::new();
let mut encoder = xz2::write::XzEncoder::new(&mut buf, XZ_LEVEL);
encoder
.write_all(data)
.context("while compressing with xz")?;
encoder.finish().context("while finishing xz stream")?;
Ok(buf)
}
}
}

/// Write a compressed binary file.
#[instrument(skip(value))]
pub fn write_bin<T: Serialize>(
value: &T,
path: &Path,
format: [u8; 8],
(major, minor): (u16, u16),
version: (u16, u16),
compression: Compression,
hash_config: Option<HashConfig>,
) -> Result<()> {
let postcard_data = postcard::to_allocvec(value).context("while encoding to postcard")?;
let uncompressed = postcard_data.len();

let compressed_data = match compression {
Compression::Zstd => {
zstd::bulk::compress(&postcard_data, 3).context("while compressing with zstd")?
}
Compression::Xz => {
let mut buf = Vec::new();
let mut encoder = xz2::write::XzEncoder::new(&mut buf, 6);
encoder
.write_all(&postcard_data)
.context("while compressing with xz")?;
encoder.finish().context("while finishing xz stream")?;
buf
}
};
let data = serialize_to_bytes(value, format, version, compression, hash_config)?;

let mut file = File::create(path).context("while creating output file")?;

// Write header: MAGIC(8) + FORMAT(8) + MAJOR(2) + MINOR(2) + HASH_CONFIG(1)
let mut header = BytesMut::with_capacity(HEADER_SIZE);
header.put(MAGIC_BYTES);
header.put(&format[..]);
header.put_u16_le(major);
header.put_u16_le(minor);
header.put_u8(hash_config.map(|c| c.to_byte()).unwrap_or(0xff));

file.write_all(&header).context("while writing header")?;

file.write_all(&compressed_data)
.context("while writing compressed data")?;

let compressed = HEADER_SIZE + compressed_data.len();
let size = file.metadata().map(|m| m.len()).ok();
file.write_all(&data).context("while writing data")?;
file.sync_all().context("while syncing output file")?;
drop(file);

let ratio = compressed as f64 / uncompressed as f64;
info!(
?path,
size,
compressed,
uncompressed,
"Wrote {}B bytes to {path:?} ({ratio:.2} compression ratio)",
human(compressed as f64)
size = data.len(),
"Wrote {}B to {path:?}",
human(data.len() as f64)
);
Ok(())
}
Expand Down Expand Up @@ -156,6 +145,96 @@ pub fn read_bin<T: for<'a> Deserialize<'a>>(
postcard::from_bytes(&uncompressed).context("while decoding from postcard")
}

/// Serialize a value to bytes in the same format as `write_bin` (header +
/// compressed postcard). The output is byte-for-byte identical to what
/// `write_bin` would write to disk.
pub fn serialize_to_bytes<T: Serialize>(
value: &T,
format: [u8; 8],
(major, minor): (u16, u16),
compression: Compression,
hash_config: Option<HashConfig>,
) -> Result<Vec<u8>> {
let postcard_data = postcard::to_allocvec(value).context("while encoding to postcard")?;
let compressed_data = compress(&postcard_data, compression)?;

let mut out = Vec::with_capacity(HEADER_SIZE + compressed_data.len());
// Header: MAGIC(8) + FORMAT(8) + MAJOR(2) + MINOR(2) + HASH_CONFIG(1)
out.put(MAGIC_BYTES);
out.put(&format[..]);
out.put_u16_le(major);
out.put_u16_le(minor);
out.put_u8(hash_config.map(|c| c.to_byte()).unwrap_or(0xff));
out.extend_from_slice(&compressed_data);

Ok(out)
}

/// Deserialize a value from bytes produced by `serialize_to_bytes` or read
/// from a file written by `write_bin`.
pub fn deserialize_from_bytes<T: for<'a> Deserialize<'a>>(
data: &[u8],
format: [u8; 8],
(major, minor): (u16, u16),
) -> Result<T> {
ensure!(
data.len() > HEADER_SIZE,
"Data too small ({} bytes, need at least {})",
data.len(),
HEADER_SIZE + 1
);

let mut header = Bytes::copy_from_slice(&data[..HEADER_SIZE]);
ensure!(
header.get_bytes::<8>() == MAGIC_BYTES,
"Invalid magic bytes"
);
ensure!(header.get_bytes::<8>() == format, "Invalid format");
ensure!(
header.get_u16_le() == major,
"Incompatible format major version"
);
ensure!(
header.get_u16_le() >= minor,
"Incompatible format minor version"
);
let _hash_config_byte = header.get_u8();

let compressed = &data[HEADER_SIZE..];
let uncompressed = decompress_bytes(compressed)?;

postcard::from_bytes(&uncompressed).context("while decoding from postcard")
}

/// Detect compression format from bytes and decompress.
fn decompress_bytes(data: &[u8]) -> Result<Vec<u8>> {
ensure!(data.len() >= 6, "Data too small to detect compression");

let is_zstd = data[..4] == ZSTD_MAGIC;
let is_xz = data[..6] == XZ_MAGIC;

if is_zstd {
let mut out = Vec::new();
let mut decoder = zstd::Decoder::new(data).context("while initializing zstd decoder")?;
decoder
.read_to_end(&mut out)
.context("while decompressing zstd data")?;
Ok(out)
} else if is_xz {
let mut out = Vec::new();
let mut decoder = xz2::read::XzDecoder::new(data);
decoder
.read_to_end(&mut out)
.context("while decompressing XZ data")?;
Ok(out)
} else {
anyhow::bail!(
"Unknown compression format (first bytes: {:02X?})",
&data[..data.len().min(6)]
);
}
}

/// Peek at the first bytes to detect compression format, then
/// stream-decompress.
fn decompress_stream(reader: &mut BufReader<File>) -> Result<Vec<u8>> {
Expand Down
21 changes: 20 additions & 1 deletion provekit/common/src/file/io/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ mod json;

use {
self::{
bin::{read_bin, read_hash_config as read_hash_config_bin, write_bin, Compression},
bin::{
deserialize_from_bytes, read_bin, read_hash_config as read_hash_config_bin,
serialize_to_bytes, write_bin, Compression,
},
buf_ext::BufExt,
counting_writer::CountingWriter,
json::{read_json, write_json},
Expand Down Expand Up @@ -134,6 +137,22 @@ pub fn read<T: FileFormat>(path: &Path) -> Result<T> {
}
}

/// Serialize a value to bytes in the same binary format as `write`.
///
/// The output is byte-for-byte identical to what `write` produces on disk
/// (header + compressed postcard). Use `deserialize` to recover the value.
#[allow(private_bounds)]
pub fn serialize<T: FileFormat + MaybeHashAware>(value: &T) -> Result<Vec<u8>> {
let hash_config = value.maybe_hash_config();
serialize_to_bytes(value, T::FORMAT, T::VERSION, T::COMPRESSION, hash_config)
}

/// Deserialize a value from bytes produced by `serialize` or read from a file
/// written by `write`.
pub fn deserialize<T: FileFormat>(data: &[u8]) -> Result<T> {
deserialize_from_bytes(data, T::FORMAT, T::VERSION)
}

/// Read just the hash configuration from a file.
#[instrument()]
pub fn read_hash_config<T: FileFormat>(path: &Path) -> Result<HashConfig> {
Expand Down
8 changes: 8 additions & 0 deletions provekit/common/src/noir_proof_scheme.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,4 +55,12 @@ impl NoirProofScheme {
let r1cs = self.r1cs();
(r1cs.num_constraints(), r1cs.num_witnesses())
}

#[must_use]
pub fn abi(&self) -> &noirc_abi::Abi {
match self {
NoirProofScheme::Noir(d) => d.witness_generator.abi(),
NoirProofScheme::Mavros(d) => &d.abi,
}
}
}
8 changes: 8 additions & 0 deletions provekit/common/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use {
HashConfig, MavrosProver, NoirElement, R1CS,
},
acir::circuit::Program,
noirc_abi::Abi,
serde::{Deserialize, Serialize},
};

Expand Down Expand Up @@ -53,6 +54,13 @@ impl Prover {
}
}

pub fn abi(&self) -> &Abi {
match self {
Prover::Noir(p) => p.witness_generator.abi(),
Prover::Mavros(p) => &p.abi,
}
}

pub fn size(&self) -> (usize, usize) {
match self {
Prover::Noir(p) => (p.r1cs.num_constraints(), p.r1cs.num_witnesses()),
Expand Down
9 changes: 4 additions & 5 deletions tooling/provekit-ffi/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,16 @@ crate-type = ["staticlib"]
# Workspace crates
provekit-common.workspace = true
provekit-prover = { workspace = true, features = ["witness-generation", "parallel"] }
provekit-r1cs-compiler = { workspace = true }
provekit-verifier = { workspace = true }

# 3rd party
anyhow.workspace = true
serde_json.workspace = true
parking_lot = "0.12"
noirc_abi.workspace = true
parking_lot.workspace = true

[target.'cfg(unix)'.dependencies]
libc = "0.2"

[lints]
workspace = true

[features]
default = []
Loading
Loading