commit
10c5010fd9
@ -6,7 +6,7 @@ homepage = "https://github.com/ebfull/bellman"
|
|||||||
license = "MIT/Apache-2.0"
|
license = "MIT/Apache-2.0"
|
||||||
name = "bellman"
|
name = "bellman"
|
||||||
repository = "https://github.com/ebfull/bellman"
|
repository = "https://github.com/ebfull/bellman"
|
||||||
version = "0.0.9"
|
version = "0.1.0"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rand = "0.4"
|
rand = "0.4"
|
||||||
|
@ -26,9 +26,9 @@ pub use self::verifier::*;
|
|||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Proof<E: Engine> {
|
pub struct Proof<E: Engine> {
|
||||||
a: E::G1Affine,
|
pub a: E::G1Affine,
|
||||||
b: E::G2Affine,
|
pub b: E::G2Affine,
|
||||||
c: E::G1Affine
|
pub c: E::G1Affine
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: Engine> PartialEq for Proof<E> {
|
impl<E: Engine> PartialEq for Proof<E> {
|
||||||
@ -101,28 +101,28 @@ impl<E: Engine> Proof<E> {
|
|||||||
pub struct VerifyingKey<E: Engine> {
|
pub struct VerifyingKey<E: Engine> {
|
||||||
// alpha in g1 for verifying and for creating A/C elements of
|
// alpha in g1 for verifying and for creating A/C elements of
|
||||||
// proof. Never the point at infinity.
|
// proof. Never the point at infinity.
|
||||||
alpha_g1: E::G1Affine,
|
pub alpha_g1: E::G1Affine,
|
||||||
|
|
||||||
// beta in g1 and g2 for verifying and for creating B/C elements
|
// beta in g1 and g2 for verifying and for creating B/C elements
|
||||||
// of proof. Never the point at infinity.
|
// of proof. Never the point at infinity.
|
||||||
beta_g1: E::G1Affine,
|
pub beta_g1: E::G1Affine,
|
||||||
beta_g2: E::G2Affine,
|
pub beta_g2: E::G2Affine,
|
||||||
|
|
||||||
// gamma in g2 for verifying. Never the point at infinity.
|
// gamma in g2 for verifying. Never the point at infinity.
|
||||||
gamma_g2: E::G2Affine,
|
pub gamma_g2: E::G2Affine,
|
||||||
|
|
||||||
// delta in g1/g2 for verifying and proving, essentially the magic
|
// delta in g1/g2 for verifying and proving, essentially the magic
|
||||||
// trapdoor that forces the prover to evaluate the C element of the
|
// trapdoor that forces the prover to evaluate the C element of the
|
||||||
// proof with only components from the CRS. Never the point at
|
// proof with only components from the CRS. Never the point at
|
||||||
// infinity.
|
// infinity.
|
||||||
delta_g1: E::G1Affine,
|
pub delta_g1: E::G1Affine,
|
||||||
delta_g2: E::G2Affine,
|
pub delta_g2: E::G2Affine,
|
||||||
|
|
||||||
// Elements of the form (beta * u_i(tau) + alpha v_i(tau) + w_i(tau)) / gamma
|
// Elements of the form (beta * u_i(tau) + alpha v_i(tau) + w_i(tau)) / gamma
|
||||||
// for all public inputs. Because all public inputs have a dummy constraint,
|
// for all public inputs. Because all public inputs have a dummy constraint,
|
||||||
// this is the same size as the number of inputs, and never contains points
|
// this is the same size as the number of inputs, and never contains points
|
||||||
// at infinity.
|
// at infinity.
|
||||||
ic: Vec<E::G1Affine>
|
pub ic: Vec<E::G1Affine>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: Engine> PartialEq for VerifyingKey<E> {
|
impl<E: Engine> PartialEq for VerifyingKey<E> {
|
||||||
@ -149,7 +149,7 @@ impl<E: Engine> VerifyingKey<E> {
|
|||||||
writer.write_all(self.gamma_g2.into_uncompressed().as_ref())?;
|
writer.write_all(self.gamma_g2.into_uncompressed().as_ref())?;
|
||||||
writer.write_all(self.delta_g1.into_uncompressed().as_ref())?;
|
writer.write_all(self.delta_g1.into_uncompressed().as_ref())?;
|
||||||
writer.write_all(self.delta_g2.into_uncompressed().as_ref())?;
|
writer.write_all(self.delta_g2.into_uncompressed().as_ref())?;
|
||||||
writer.write_u64::<BigEndian>(self.ic.len() as u64)?;
|
writer.write_u32::<BigEndian>(self.ic.len() as u32)?;
|
||||||
for ic in &self.ic {
|
for ic in &self.ic {
|
||||||
writer.write_all(ic.into_uncompressed().as_ref())?;
|
writer.write_all(ic.into_uncompressed().as_ref())?;
|
||||||
}
|
}
|
||||||
@ -182,7 +182,7 @@ impl<E: Engine> VerifyingKey<E> {
|
|||||||
reader.read_exact(g2_repr.as_mut())?;
|
reader.read_exact(g2_repr.as_mut())?;
|
||||||
let delta_g2 = g2_repr.into_affine().map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
let delta_g2 = g2_repr.into_affine().map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
||||||
|
|
||||||
let ic_len = reader.read_u64::<BigEndian>()? as usize;
|
let ic_len = reader.read_u32::<BigEndian>()? as usize;
|
||||||
|
|
||||||
let mut ic = vec![];
|
let mut ic = vec![];
|
||||||
|
|
||||||
@ -218,23 +218,23 @@ pub struct Parameters<E: Engine> {
|
|||||||
|
|
||||||
// Elements of the form ((tau^i * t(tau)) / delta) for i between 0 and
|
// Elements of the form ((tau^i * t(tau)) / delta) for i between 0 and
|
||||||
// m-2 inclusive. Never contains points at infinity.
|
// m-2 inclusive. Never contains points at infinity.
|
||||||
h: Arc<Vec<E::G1Affine>>,
|
pub h: Arc<Vec<E::G1Affine>>,
|
||||||
|
|
||||||
// Elements of the form (beta * u_i(tau) + alpha v_i(tau) + w_i(tau)) / delta
|
// Elements of the form (beta * u_i(tau) + alpha v_i(tau) + w_i(tau)) / delta
|
||||||
// for all auxillary inputs. Variables can never be unconstrained, so this
|
// for all auxillary inputs. Variables can never be unconstrained, so this
|
||||||
// never contains points at infinity.
|
// never contains points at infinity.
|
||||||
l: Arc<Vec<E::G1Affine>>,
|
pub l: Arc<Vec<E::G1Affine>>,
|
||||||
|
|
||||||
// QAP "A" polynomials evaluated at tau in the Lagrange basis. Never contains
|
// QAP "A" polynomials evaluated at tau in the Lagrange basis. Never contains
|
||||||
// points at infinity: polynomials that evaluate to zero are omitted from
|
// points at infinity: polynomials that evaluate to zero are omitted from
|
||||||
// the CRS and the prover can deterministically skip their evaluation.
|
// the CRS and the prover can deterministically skip their evaluation.
|
||||||
a: Arc<Vec<E::G1Affine>>,
|
pub a: Arc<Vec<E::G1Affine>>,
|
||||||
|
|
||||||
// QAP "B" polynomials evaluated at tau in the Lagrange basis. Needed in
|
// QAP "B" polynomials evaluated at tau in the Lagrange basis. Needed in
|
||||||
// G1 and G2 for C/B queries, respectively. Never contains points at
|
// G1 and G2 for C/B queries, respectively. Never contains points at
|
||||||
// infinity for the same reason as the "A" polynomials.
|
// infinity for the same reason as the "A" polynomials.
|
||||||
b_g1: Arc<Vec<E::G1Affine>>,
|
pub b_g1: Arc<Vec<E::G1Affine>>,
|
||||||
b_g2: Arc<Vec<E::G2Affine>>
|
pub b_g2: Arc<Vec<E::G2Affine>>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<E: Engine> PartialEq for Parameters<E> {
|
impl<E: Engine> PartialEq for Parameters<E> {
|
||||||
@ -256,27 +256,27 @@ impl<E: Engine> Parameters<E> {
|
|||||||
{
|
{
|
||||||
self.vk.write(&mut writer)?;
|
self.vk.write(&mut writer)?;
|
||||||
|
|
||||||
writer.write_u64::<BigEndian>(self.h.len() as u64)?;
|
writer.write_u32::<BigEndian>(self.h.len() as u32)?;
|
||||||
for g in &self.h[..] {
|
for g in &self.h[..] {
|
||||||
writer.write_all(g.into_uncompressed().as_ref())?;
|
writer.write_all(g.into_uncompressed().as_ref())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.write_u64::<BigEndian>(self.l.len() as u64)?;
|
writer.write_u32::<BigEndian>(self.l.len() as u32)?;
|
||||||
for g in &self.l[..] {
|
for g in &self.l[..] {
|
||||||
writer.write_all(g.into_uncompressed().as_ref())?;
|
writer.write_all(g.into_uncompressed().as_ref())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.write_u64::<BigEndian>(self.a.len() as u64)?;
|
writer.write_u32::<BigEndian>(self.a.len() as u32)?;
|
||||||
for g in &self.a[..] {
|
for g in &self.a[..] {
|
||||||
writer.write_all(g.into_uncompressed().as_ref())?;
|
writer.write_all(g.into_uncompressed().as_ref())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.write_u64::<BigEndian>(self.b_g1.len() as u64)?;
|
writer.write_u32::<BigEndian>(self.b_g1.len() as u32)?;
|
||||||
for g in &self.b_g1[..] {
|
for g in &self.b_g1[..] {
|
||||||
writer.write_all(g.into_uncompressed().as_ref())?;
|
writer.write_all(g.into_uncompressed().as_ref())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
writer.write_u64::<BigEndian>(self.b_g2.len() as u64)?;
|
writer.write_u32::<BigEndian>(self.b_g2.len() as u32)?;
|
||||||
for g in &self.b_g2[..] {
|
for g in &self.b_g2[..] {
|
||||||
writer.write_all(g.into_uncompressed().as_ref())?;
|
writer.write_all(g.into_uncompressed().as_ref())?;
|
||||||
}
|
}
|
||||||
@ -336,35 +336,35 @@ impl<E: Engine> Parameters<E> {
|
|||||||
let mut b_g2 = vec![];
|
let mut b_g2 = vec![];
|
||||||
|
|
||||||
{
|
{
|
||||||
let len = reader.read_u64::<BigEndian>()? as usize;
|
let len = reader.read_u32::<BigEndian>()? as usize;
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
h.push(read_g1(&mut reader)?);
|
h.push(read_g1(&mut reader)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let len = reader.read_u64::<BigEndian>()? as usize;
|
let len = reader.read_u32::<BigEndian>()? as usize;
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
l.push(read_g1(&mut reader)?);
|
l.push(read_g1(&mut reader)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let len = reader.read_u64::<BigEndian>()? as usize;
|
let len = reader.read_u32::<BigEndian>()? as usize;
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
a.push(read_g1(&mut reader)?);
|
a.push(read_g1(&mut reader)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let len = reader.read_u64::<BigEndian>()? as usize;
|
let len = reader.read_u32::<BigEndian>()? as usize;
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
b_g1.push(read_g1(&mut reader)?);
|
b_g1.push(read_g1(&mut reader)?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
let len = reader.read_u64::<BigEndian>()? as usize;
|
let len = reader.read_u32::<BigEndian>()? as usize;
|
||||||
for _ in 0..len {
|
for _ in 0..len {
|
||||||
b_g2.push(read_g2(&mut reader)?);
|
b_g2.push(read_g2(&mut reader)?);
|
||||||
}
|
}
|
||||||
@ -535,7 +535,7 @@ mod test_with_bls12_381 {
|
|||||||
let mut v = vec![];
|
let mut v = vec![];
|
||||||
|
|
||||||
params.write(&mut v).unwrap();
|
params.write(&mut v).unwrap();
|
||||||
assert_eq!(v.len(), 2160);
|
assert_eq!(v.len(), 2136);
|
||||||
|
|
||||||
let de_params = Parameters::read(&v[..], true).unwrap();
|
let de_params = Parameters::read(&v[..], true).unwrap();
|
||||||
assert!(params == de_params);
|
assert!(params == de_params);
|
||||||
|
@ -290,6 +290,12 @@ pub fn create_proof<E, C, P: ParameterSource<E>>(
|
|||||||
let b_g2_inputs = multiexp(&worker, b_g2_inputs_source, b_input_density, input_assignment);
|
let b_g2_inputs = multiexp(&worker, b_g2_inputs_source, b_input_density, input_assignment);
|
||||||
let b_g2_aux = multiexp(&worker, b_g2_aux_source, b_aux_density, aux_assignment);
|
let b_g2_aux = multiexp(&worker, b_g2_aux_source, b_aux_density, aux_assignment);
|
||||||
|
|
||||||
|
if vk.delta_g1.is_zero() || vk.delta_g2.is_zero() {
|
||||||
|
// If this element is zero, someone is trying to perform a
|
||||||
|
// subversion-CRS attack.
|
||||||
|
return Err(SynthesisError::UnexpectedIdentity);
|
||||||
|
}
|
||||||
|
|
||||||
let mut g_a = vk.delta_g1.mul(r);
|
let mut g_a = vk.delta_g1.mul(r);
|
||||||
g_a.add_assign_mixed(&vk.alpha_g1);
|
g_a.add_assign_mixed(&vk.alpha_g1);
|
||||||
let mut g_b = vk.delta_g2.mul(s);
|
let mut g_b = vk.delta_g2.mul(s);
|
||||||
|
@ -7,7 +7,7 @@ extern crate bit_vec;
|
|||||||
extern crate crossbeam;
|
extern crate crossbeam;
|
||||||
extern crate byteorder;
|
extern crate byteorder;
|
||||||
|
|
||||||
mod multicore;
|
pub mod multicore;
|
||||||
mod multiexp;
|
mod multiexp;
|
||||||
pub mod domain;
|
pub mod domain;
|
||||||
pub mod groth16;
|
pub mod groth16;
|
||||||
|
Loading…
Reference in New Issue
Block a user