Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
0c62284
Add transcript
recmo May 20, 2025
eb2306e
Remove TypeId
recmo May 21, 2025
2a84c58
Every interaction is hierarchical
recmo May 21, 2025
f02da32
Example ZeroCopy
recmo May 21, 2025
d6de11b
Transcript to domain separator
recmo May 23, 2025
b85385d
Domain separators and labels
recmo May 23, 2025
1600f55
ProverState and prover random
recmo May 23, 2025
349ac7b
Implement ProverState traits
recmo May 27, 2025
1b9fa68
New approach to traits
recmo May 27, 2025
b190f70
Fix build
recmo May 28, 2025
4393621
Fix tests
recmo May 28, 2025
7432a5a
Add back prover tests
recmo May 28, 2025
21f04ab
Bytes traits
recmo May 29, 2025
f813524
Add zerocopy
recmo May 29, 2025
b25ad1f
Add ark_serialize hints
recmo May 29, 2025
c2b8192
Add field_bytes
recmo May 29, 2025
c8792f9
Add field_bytes
recmo May 29, 2025
827f6f0
Challenge byte extraction
recmo May 29, 2025
c7a43b8
Test ark_serialize
recmo May 29, 2025
e1ccac2
Add Ark Field challenges
recmo May 30, 2025
b0c1ef3
Remove module name from zerocopy
recmo Jun 2, 2025
16dea90
Remove module name from bytes
recmo Jun 2, 2025
a55e6fd
Remove module name from unit
recmo Jun 2, 2025
5cccc14
Move unit to codecs
recmo Jun 2, 2025
f666944
Unit is associated type
recmo Jun 2, 2025
02ce966
Explicit error types
recmo Jun 2, 2025
1f8699c
Update zeropcopy traits
recmo Jun 2, 2025
0d83306
Update arkworks::serialize traits
recmo Jun 2, 2025
14d88e3
Update arkworks::field traits
recmo Jun 2, 2025
abd05ac
Update arkworks::field trait names
recmo Jun 2, 2025
43abb51
Simplify VerifierError
recmo Jun 3, 2025
3a5fd32
Pattern creation is infallible
recmo Jun 4, 2025
f9582c8
Interaction errors are bugs and therefore panic
recmo Jun 4, 2025
70b8037
Remove interaction errors
recmo Jun 4, 2025
9e97ff6
Simplify traits
recmo Jun 10, 2025
39ea768
Static lables
recmo Jun 11, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,8 @@ group = "0.13.0"
hex = "0.4.3"
keccak = "0.1.5"
spongefish = { path = "spongefish" }
pallas = "0.32"
pasta_curves = "0.5.1"
rand = "0.8.5"
rand = "0.9"
rayon = "1.10.0"
sha2 = "0.10.7"
sha3 = "0.10.8"
Expand Down
18 changes: 15 additions & 3 deletions spongefish/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,21 +14,33 @@ workspace = true

[dependencies]
zeroize = { workspace = true, features = ["zeroize_derive"] }
rand = { workspace = true, features = ["getrandom"] }
rand = { workspace = true, features = ["std_rng", "os_rng"] }
digest = { workspace = true }
# used as default hasher for the prover
keccak = { workspace = true }
# optional dependencies
ark-ff = { workspace = true, optional = true }
ark-ec = { workspace = true, optional = true }
ark-serialize = { workspace = true, features = ["std"], optional = true }
ark-std = { workspace = true, optional = true }
group = { workspace = true, optional = true }
hex = { workspace = true }
zerocopy = { version = "0.8", features = ["std", "derive"] }
postcard = { version = "1.1.1", optional = true }
thiserror = "2.0.12"
sha3.workspace = true
hex-literal = "1.0.0"

[features]
default = []
default = ["arkworks-algebra", "arkworks-rand"]
arkworks-rand = [
# Workaround to also implement arkworks version of rand as exported by ark_std
"dep:ark-std",
]
arkworks-algebra = ["dep:ark-ff", "dep:ark-ec", "dep:ark-serialize"]
zkcrypto-group = ["dep:group"]
ark-serialize = ["dep:ark-serialize"]
serde-postcard = ["dep:postcard"]
asm = ["keccak/asm", "keccak/simd"]

[dev-dependencies]
Expand All @@ -43,10 +55,10 @@ ark-curve25519 = { workspace = true }
bls12_381 = { workspace = true }
ark-bls12-381 = { workspace = true, features = ["std"] }
ark-pallas = { workspace = true, features = ["std"] }
pallas = { workspace = true }
pasta_curves = { workspace = true }
ark-vesta = { workspace = true, features = ["std"] }
sha3 = { workspace = true }
anyhow = { version = "1.0.98", features = ["backtrace"] }

[package.metadata.docs.rs]
rustdoc-args = [
Expand Down
13 changes: 9 additions & 4 deletions spongefish/examples/bulletproof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,18 @@
use ark_ec::{AffineRepr, CurveGroup, PrimeGroup, VariableBaseMSM};
use ark_ff::Field;
use ark_std::log2;
use rand::rngs::OsRng;
use rand::Rng;
use spongefish::codecs::arkworks_algebra::{
CommonGroupToUnit, DomainSeparator, FieldDomainSeparator, FieldToUnitDeserialize,
FieldToUnitSerialize, GroupDomainSeparator, GroupToUnitDeserialize, GroupToUnitSerialize,
ProofError, ProofResult, ProverState, UnitToField, VerifierState,
};

fn ark_rng() -> ark_std::rand::rngs::StdRng {
let seed: [u8; 32] = rand::rng().random();
<ark_std::rand::rngs::StdRng as ark_std::rand::SeedableRng>::from_seed(seed)
}

/// The domain separator of a bulleproof.
///
/// Defining this as a trait allows us to "attach" the bulletproof IO to
Expand Down Expand Up @@ -188,12 +193,12 @@ fn main() {
let ab = dot_prod(&a, &b);
// the generators to be used for respectively a, b, ip
let g = (0..a.len())
.map(|_| GAffine::rand(&mut OsRng))
.map(|_| GAffine::rand(&mut ark_rng()))
.collect::<Vec<_>>();
let h = (0..b.len())
.map(|_| GAffine::rand(&mut OsRng))
.map(|_| GAffine::rand(&mut ark_rng()))
.collect::<Vec<_>>();
let u = GAffine::rand(&mut OsRng);
let u = GAffine::rand(&mut ark_rng());

let generators = (&g[..], &h[..], &u);
let statement = G::msm_unchecked(&g, &a) + G::msm_unchecked(&h, &b) + u * ab;
Expand Down
9 changes: 7 additions & 2 deletions spongefish/examples/schnorr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
/// It can be used to verify a proof.
use ark_ec::{CurveGroup, PrimeGroup};
use ark_std::UniformRand;
use rand::rngs::OsRng;
use rand::Rng as _;
use spongefish::codecs::arkworks_algebra::{
CommonGroupToUnit, DomainSeparator, DuplexSpongeInterface, FieldDomainSeparator,
FieldToUnitDeserialize, FieldToUnitSerialize, GroupDomainSeparator, GroupToUnitDeserialize,
Expand Down Expand Up @@ -63,11 +63,16 @@ where
}
}

fn ark_rng() -> ark_std::rand::rngs::StdRng {
let seed: [u8; 32] = rand::rng().random();
<ark_std::rand::rngs::StdRng as ark_std::rand::SeedableRng>::from_seed(seed)
}

/// The key generation algorithm otuputs
/// a secret key `sk` in $\mathbb{Z}_p$
/// and its respective public key `pk` in $\mathbb{G}$.
fn keygen<G: CurveGroup>() -> (G::ScalarField, G) {
let sk = G::ScalarField::rand(&mut OsRng);
let sk = G::ScalarField::rand(&mut ark_rng());
let pk = G::generator() * sk;
(sk, pk)
}
Expand Down
217 changes: 217 additions & 0 deletions spongefish/src/codecs/arkworks/field.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
//! Derive field element as challenge.
//! For field elements as (public) messages or hints, please see [`super::ark_serialize`].
// TODO: When U = F::BasePrimeField we want to use units directly. Unfortunately, this requires
// specialization.

use ark_ff::{Field, PrimeField};

use crate::{
codecs::bytes,
transcript::{self, Label, Length},
VerifierError,
};

pub trait Pattern {
fn public_ark_field<F: Field>(&mut self, label: Label);
fn public_ark_field_many<F: Field>(&mut self, label: Label, count: usize);
fn message_ark_field<F: Field>(&mut self, label: Label);
fn message_ark_field_many<F: Field>(&mut self, label: Label, count: usize);
fn challenge_ark_field<F: Field>(&mut self, label: Label);
fn challenge_ark_field_many<F: Field>(&mut self, label: Label, count: usize);
}

pub trait Common {
fn public_ark_field<F: Field>(&mut self, label: Label, value: F) -> F;

fn public_ark_field_slice<F: Field>(&mut self, label: Label, values: &[F]);

fn challenge_ark_field<F: Field>(&mut self, label: Label) -> F;

fn challenge_ark_field_out<F: Field>(&mut self, label: Label, out: &mut [F]);

fn challenge_ark_field_array<F: Field, const N: usize>(&mut self, label: Label) -> [F; N] {
let mut result = [F::ZERO; N];
self.challenge_ark_field_out(label, &mut result);
result
}

fn challenge_ark_field_vec<F: Field>(&mut self, label: Label, size: usize) -> Vec<F> {
let mut result = vec![F::ZERO; size];
self.challenge_ark_field_out(label, &mut result);
result
}
}

pub trait Prover: Common {
fn message_ark_field<F: Field>(&mut self, label: Label, value: F) -> F;

fn message_ark_field_slice<F: Field>(&mut self, label: Label, values: &[F]);
}

pub trait Verifier: Common {
fn message_ark_field<F: Field>(&mut self, label: Label) -> Result<F, VerifierError>;

fn message_ark_field_out<F: Field>(
&mut self,
label: Label,
out: &mut [F],
) -> Result<(), VerifierError>;

fn message_ark_field_array<F: Field, const N: usize>(
&mut self,
label: Label,
) -> Result<[F; N], VerifierError> {
let mut result = [F::ZERO; N];
self.message_ark_field_out(label, &mut result)?;
Ok(result)
}

fn message_ark_field_vec<F: Field>(
&mut self,
label: Label,
size: usize,
) -> Result<Vec<F>, VerifierError> {
let mut result = vec![F::ZERO; size];
self.message_ark_field_out(label, &mut result)?;
Ok(result)
}
}

impl<P> Pattern for P
where
P: transcript::Pattern + bytes::Pattern,
{
fn challenge_ark_field<F: Field>(&mut self, label: Label) {
self.begin_challenge::<F>(label, Length::Scalar);
let base_field_size = bytes_uniform_modp::<F::BasePrimeField>();
let size = F::extension_degree() as usize * base_field_size;
self.challenge_bytes("ark-field", size);
self.end_challenge::<F>(label, Length::Scalar);
}

fn challenge_ark_field_many<F: Field>(&mut self, label: Label, count: usize) {
self.begin_challenge::<F>(label, Length::Fixed(count));
let base_field_size = bytes_uniform_modp::<F::BasePrimeField>();
let field_size = F::extension_degree() as usize * base_field_size;
self.challenge_bytes("ark-field", count * field_size);
self.end_challenge::<F>(label, Length::Fixed(count));
}

fn public_ark_field<F: Field>(&mut self, label: Label) {
todo!()
}

fn public_ark_field_many<F: Field>(&mut self, label: Label, count: usize) {
todo!()
}

fn message_ark_field<F: Field>(&mut self, label: Label) {
todo!()
}

fn message_ark_field_many<F: Field>(&mut self, label: Label, count: usize) {
todo!()
}
}

impl<P> Common for P
where
P: transcript::Common + bytes::Common,
{
fn challenge_ark_field<F: Field>(&mut self, label: Label) -> F {
self.begin_challenge::<F>(label, Length::Scalar);
let base_field_size = bytes_uniform_modp::<F::BasePrimeField>();
let size = F::extension_degree() as usize * base_field_size;
let bytes = self.challenge_bytes_vec("ark-field", size);
let result = F::from_base_prime_field_elems(
bytes
.chunks_exact(base_field_size)
.map(F::BasePrimeField::from_be_bytes_mod_order),
)
.expect("Number of field elements should match extension degree");
self.end_challenge::<F>(label, Length::Scalar);
result
}

fn challenge_ark_field_out<F: Field>(&mut self, label: Label, out: &mut [F]) {
self.begin_challenge::<F>(label, Length::Fixed(out.len()));
let base_field_size = bytes_uniform_modp::<F::BasePrimeField>();
let field_size = F::extension_degree() as usize * base_field_size;
let bytes = self.challenge_bytes_vec("ark-field", out.len() * field_size);
for (out, chunk) in out.iter_mut().zip(bytes.chunks_exact(field_size)) {
*out = F::from_base_prime_field_elems(
chunk
.chunks_exact(base_field_size)
.map(F::BasePrimeField::from_be_bytes_mod_order),
)
.expect("Number of field elements should match extension degree");
}
self.end_challenge::<F>(label, Length::Fixed(out.len()));
}

fn public_ark_field<F: Field>(&mut self, label: Label, value: F) -> F {
todo!()
}

fn public_ark_field_slice<F: Field>(&mut self, label: Label, values: &[F]) {
todo!()
}
}

/// Bytes needed in order to obtain a uniformly distributed random element of `modulus_bits`
const fn bytes_uniform_modp<F>() -> usize
where
F: PrimeField,
{
(F::MODULUS_BIT_SIZE as usize + 128) / 8
}

#[cfg(test)]
mod tests {
use anyhow::Result;

use super::{super::tests::BabyBear, *};
use crate::{transcript::PatternState, ProverState, VerifierState};

#[test]
fn test_all_ops() -> Result<()> {
let mut pattern: PatternState = PatternState::new();
pattern.challenge_ark_field::<BabyBear>("1");
pattern.challenge_ark_field_many::<BabyBear>("2", 3);
let pattern = pattern.finalize();
eprintln!("{pattern}");

let mut prover: ProverState = ProverState::from(&pattern);
assert_eq!(
prover.challenge_ark_field::<BabyBear>("1"),
BabyBear::from(303345864)
);
assert_eq!(
prover.challenge_ark_field_array::<BabyBear, 3>("2"),
[
BabyBear::from(1634935281),
BabyBear::from(928942326),
BabyBear::from(42987044)
]
);
let proof = prover.finalize();
assert_eq!(hex::encode(&proof), "");

let mut verifier: VerifierState = VerifierState::new(pattern.into(), &proof);
assert_eq!(
verifier.challenge_ark_field::<BabyBear>("1"),
BabyBear::from(303345864)
);
assert_eq!(
verifier.challenge_ark_field_array::<BabyBear, 3>("2"),
[
BabyBear::from(1634935281),
BabyBear::from(928942326),
BabyBear::from(42987044)
]
);
verifier.finalize();

Ok(())
}
}
Loading