2018-04-04 03:59:39 +03:00
extern crate powersoftau ;
extern crate rand ;
extern crate blake2 ;
extern crate byteorder ;
2019-09-04 13:15:17 +03:00
extern crate bellman_ce ;
2018-04-04 03:59:39 +03:00
2019-09-04 13:15:17 +03:00
use bellman_ce ::pairing ::{ CurveAffine , CurveProjective } ;
use bellman_ce ::pairing ::bn256 ::Bn256 ;
use bellman_ce ::pairing ::bn256 ::{ G1 , G2 } ;
2020-01-08 15:25:21 +03:00
use powersoftau ::bn256 ::{ Bn256CeremonyParameters } ;
2019-09-04 13:15:17 +03:00
use powersoftau ::batched_accumulator ::* ;
use powersoftau ::accumulator ::HashWriter ;
2018-04-04 03:59:39 +03:00
use powersoftau ::* ;
2019-09-04 13:15:17 +03:00
use crate ::utils ::* ;
use crate ::parameters ::* ;
use crate ::keypair ::* ;
2018-04-04 03:59:39 +03:00
2019-09-04 13:15:17 +03:00
use bellman_ce ::multicore ::Worker ;
use bellman_ce ::domain ::{ EvaluationDomain , Point } ;
use std ::path ::Path ;
use std ::fs ::{ OpenOptions , remove_file } ;
use std ::io ::{ self , Read , BufWriter , Write } ;
use memmap ::* ;
2018-04-04 03:59:39 +03:00
2019-12-05 20:30:09 +03:00
const fn num_bits < T > ( ) -> usize { std ::mem ::size_of ::< T > ( ) * 8 }
fn log_2 ( x : u64 ) -> u32 {
assert! ( x > 0 ) ;
num_bits ::< u64 > ( ) as u32 - x . leading_zeros ( ) - 1
}
2018-04-04 03:59:39 +03:00
// Computes the hash of the challenge file for the player,
// given the current state of the accumulator and the last
// response file hash.
fn get_challenge_file_hash (
2019-09-04 13:15:17 +03:00
acc : & mut BachedAccumulator ::< Bn256 , Bn256CeremonyParameters > ,
last_response_file_hash : & [ u8 ; 64 ] ,
is_initial : bool ,
2018-04-04 03:59:39 +03:00
) -> [ u8 ; 64 ]
{
let sink = io ::sink ( ) ;
let mut sink = HashWriter ::new ( sink ) ;
2019-09-04 13:15:17 +03:00
let file_name = " tmp_challenge_file_hash " ;
if Path ::new ( file_name ) . exists ( ) {
remove_file ( file_name ) . unwrap ( ) ;
}
{
let writer = OpenOptions ::new ( )
. read ( true )
. write ( true )
. create_new ( true )
. open ( file_name )
. expect ( " unable to create temporary tmp_challenge_file_hash " ) ;
writer . set_len ( Bn256CeremonyParameters ::ACCUMULATOR_BYTE_SIZE as u64 ) . expect ( " must make output file large enough " ) ;
let mut writable_map = unsafe { MmapOptions ::new ( ) . map_mut ( & writer ) . expect ( " unable to create a memory map for output " ) } ;
( & mut writable_map [ 0 .. ] ) . write ( & last_response_file_hash [ .. ] ) . expect ( " unable to write a default hash to mmap " ) ;
2020-01-08 15:39:29 +03:00
writable_map . flush ( ) . expect ( " unable to write blank hash to challenge file " ) ;
2019-09-04 13:15:17 +03:00
if is_initial {
BachedAccumulator ::< Bn256 , Bn256CeremonyParameters > ::generate_initial ( & mut writable_map , UseCompression ::No ) . expect ( " generation of initial accumulator is successful " ) ;
} else {
acc . serialize (
& mut writable_map ,
UseCompression ::No
) . unwrap ( ) ;
}
writable_map . flush ( ) . expect ( " must flush the memory map " ) ;
}
2018-04-04 03:59:39 +03:00
2019-09-04 13:15:17 +03:00
let mut challenge_reader = OpenOptions ::new ( )
. read ( true )
. open ( file_name ) . expect ( " unable to open temporary tmp_challenge_file_hash " ) ;
let mut contents = vec! [ ] ;
challenge_reader . read_to_end ( & mut contents ) . unwrap ( ) ;
sink . write_all ( & contents )
. unwrap ( ) ;
2018-04-04 03:59:39 +03:00
let mut tmp = [ 0 ; 64 ] ;
tmp . copy_from_slice ( sink . into_hash ( ) . as_slice ( ) ) ;
tmp
}
// Computes the hash of the response file, given the new
// accumulator, the player's public key, and the challenge
// file's hash.
fn get_response_file_hash (
2019-09-04 13:15:17 +03:00
acc : & mut BachedAccumulator ::< Bn256 , Bn256CeremonyParameters > ,
pubkey : & PublicKey ::< Bn256 > ,
2018-04-04 03:59:39 +03:00
last_challenge_file_hash : & [ u8 ; 64 ]
) -> [ u8 ; 64 ]
{
let sink = io ::sink ( ) ;
let mut sink = HashWriter ::new ( sink ) ;
2019-09-04 13:15:17 +03:00
let file_name = " tmp_response_file_hash " ;
if Path ::new ( file_name ) . exists ( ) {
remove_file ( file_name ) . unwrap ( ) ;
}
{
let writer = OpenOptions ::new ( )
. read ( true )
. write ( true )
. create_new ( true )
. open ( file_name )
. expect ( " unable to create temporary tmp_response_file_hash " ) ;
writer . set_len ( Bn256CeremonyParameters ::CONTRIBUTION_BYTE_SIZE as u64 ) . expect ( " must make output file large enough " ) ;
let mut writable_map = unsafe { MmapOptions ::new ( ) . map_mut ( & writer ) . expect ( " unable to create a memory map for output " ) } ;
2018-04-04 03:59:39 +03:00
2019-09-04 13:15:17 +03:00
( & mut writable_map [ 0 .. ] ) . write ( & last_challenge_file_hash [ .. ] ) . expect ( " unable to write a default hash to mmap " ) ;
2020-01-08 15:39:29 +03:00
writable_map . flush ( ) . expect ( " unable to write blank hash to challenge file " ) ;
2019-09-04 13:15:17 +03:00
acc . serialize (
& mut writable_map ,
UseCompression ::Yes
) . unwrap ( ) ;
pubkey . write ::< Bn256CeremonyParameters > ( & mut writable_map , UseCompression ::Yes ) . expect ( " unable to write public key " ) ;
writable_map . flush ( ) . expect ( " must flush the memory map " ) ;
}
let mut challenge_reader = OpenOptions ::new ( )
. read ( true )
. open ( file_name ) . expect ( " unable to open temporary tmp_response_file_hash " ) ;
let mut contents = vec! [ ] ;
challenge_reader . read_to_end ( & mut contents ) . unwrap ( ) ;
sink . write_all ( & contents )
. unwrap ( ) ;
2018-04-04 03:59:39 +03:00
let mut tmp = [ 0 ; 64 ] ;
tmp . copy_from_slice ( sink . into_hash ( ) . as_slice ( ) ) ;
tmp
}
2019-09-04 13:15:17 +03:00
fn new_accumulator_for_verify ( ) -> BachedAccumulator < Bn256 , Bn256CeremonyParameters > {
let file_name = " tmp_initial_challenge " ;
{
if Path ::new ( file_name ) . exists ( ) {
remove_file ( file_name ) . unwrap ( ) ;
}
let file = OpenOptions ::new ( )
. read ( true )
. write ( true )
. create_new ( true )
2020-01-08 15:39:29 +03:00
. open ( file_name )
. expect ( " unable to create `./tmp_initial_challenge` " ) ;
2019-09-04 15:54:48 +03:00
2019-09-04 13:15:17 +03:00
let expected_challenge_length = Bn256CeremonyParameters ::ACCUMULATOR_BYTE_SIZE ;
file . set_len ( expected_challenge_length as u64 ) . expect ( " unable to allocate large enough file " ) ;
let mut writable_map = unsafe { MmapOptions ::new ( ) . map_mut ( & file ) . expect ( " unable to create a memory map " ) } ;
BachedAccumulator ::< Bn256 , Bn256CeremonyParameters > ::generate_initial ( & mut writable_map , UseCompression ::No ) . expect ( " generation of initial accumulator is successful " ) ;
writable_map . flush ( ) . expect ( " unable to flush memmap to disk " ) ;
}
let reader = OpenOptions ::new ( )
. read ( true )
. open ( file_name )
2020-01-08 15:39:29 +03:00
. expect ( " unable open transcript file in this directory " ) ;
2019-09-04 13:15:17 +03:00
let readable_map = unsafe { MmapOptions ::new ( ) . map ( & reader ) . expect ( " unable to create a memory map for input " ) } ;
let initial_accumulator = BachedAccumulator ::deserialize (
& readable_map ,
CheckForCorrectness ::Yes ,
UseCompression ::No ,
) . expect ( " unable to read uncompressed accumulator " ) ;
initial_accumulator
}
2018-04-04 03:59:39 +03:00
fn main ( ) {
2020-01-08 15:39:29 +03:00
let args : Vec < String > = std ::env ::args ( ) . collect ( ) ;
if args . len ( ) ! = 2 {
println! ( " Usage: \n <transcript_file> " ) ;
std ::process ::exit ( exitcode ::USAGE ) ;
}
let transcript_filename = & args [ 1 ] ;
// Try to load transcript file from disk.
2018-04-04 03:59:39 +03:00
let reader = OpenOptions ::new ( )
. read ( true )
2020-01-08 15:39:29 +03:00
. open ( transcript_filename )
. expect ( " unable open transcript file in this directory " ) ;
2019-09-04 13:15:17 +03:00
let transcript_readable_map = unsafe { MmapOptions ::new ( ) . map ( & reader ) . expect ( " unable to create a memory map for input " ) } ;
2018-04-04 03:59:39 +03:00
// Initialize the accumulator
2019-09-04 13:15:17 +03:00
let mut current_accumulator = new_accumulator_for_verify ( ) ;
2018-04-04 03:59:39 +03:00
// The "last response file hash" is just a blank BLAKE2b hash
// at the beginning of the hash chain.
let mut last_response_file_hash = [ 0 ; 64 ] ;
last_response_file_hash . copy_from_slice ( blank_hash ( ) . as_slice ( ) ) ;
// There were 89 rounds.
2019-09-04 13:15:17 +03:00
for i in 0 .. 2 {
2018-04-04 03:59:39 +03:00
// Compute the hash of the challenge file that the player
// should have received.
2019-09-04 13:15:17 +03:00
let file_name = " tmp_response " ;
if Path ::new ( file_name ) . exists ( ) {
remove_file ( file_name ) . unwrap ( ) ;
}
let memory_slice = transcript_readable_map . get ( i * Bn256CeremonyParameters ::CONTRIBUTION_BYTE_SIZE .. ( i + 1 ) * Bn256CeremonyParameters ::CONTRIBUTION_BYTE_SIZE ) . expect ( " must read point data from file " ) ;
let writer = OpenOptions ::new ( )
. read ( true )
. write ( true )
. create_new ( true )
. open ( file_name )
. expect ( " unable to create temporary tmp_response " ) ;
writer . set_len ( Bn256CeremonyParameters ::CONTRIBUTION_BYTE_SIZE as u64 ) . expect ( " must make output file large enough " ) ;
let mut writable_map = unsafe { MmapOptions ::new ( ) . map_mut ( & writer ) . expect ( " unable to create a memory map for output " ) } ;
( & mut writable_map [ 0 .. ] ) . write ( & memory_slice [ .. ] ) . expect ( " unable to write a default hash to mmap " ) ;
writable_map . flush ( ) . expect ( " must flush the memory map " ) ;
let response_readable_map = writable_map . make_read_only ( ) . expect ( " must make a map readonly " ) ;
2018-04-04 03:59:39 +03:00
let last_challenge_file_hash = get_challenge_file_hash (
2019-09-04 13:15:17 +03:00
& mut current_accumulator ,
& last_response_file_hash ,
i = = 0 ,
2018-04-04 03:59:39 +03:00
) ;
// Deserialize the accumulator provided by the player in
// their response file. It's stored in the transcript in
// uncompressed form so that we can more efficiently
// deserialize it.
2019-09-04 13:15:17 +03:00
let mut response_file_accumulator = BachedAccumulator ::deserialize (
& response_readable_map ,
CheckForCorrectness ::Yes ,
UseCompression ::Yes ,
2018-04-04 03:59:39 +03:00
) . expect ( " unable to read uncompressed accumulator " ) ;
2019-09-04 13:15:17 +03:00
let response_file_pubkey = PublicKey ::< Bn256 > ::read ::< Bn256CeremonyParameters > ( & response_readable_map , UseCompression ::Yes ) . unwrap ( ) ;
2018-04-04 03:59:39 +03:00
// Compute the hash of the response file. (we had it in uncompressed
// form in the transcript, but the response file is compressed to save
// participants bandwidth.)
last_response_file_hash = get_response_file_hash (
2019-09-04 13:15:17 +03:00
& mut response_file_accumulator ,
2018-04-04 03:59:39 +03:00
& response_file_pubkey ,
& last_challenge_file_hash
) ;
// Verify the transformation from the previous accumulator to the new
// one. This also verifies the correctness of the accumulators and the
// public keys, with respect to the transcript so far.
if ! verify_transform (
& current_accumulator ,
& response_file_accumulator ,
& response_file_pubkey ,
& last_challenge_file_hash
)
{
println! ( " ... FAILED " ) ;
panic! ( " INVALID RESPONSE FILE! " ) ;
} else {
println! ( " " ) ;
}
current_accumulator = response_file_accumulator ;
}
println! ( " Transcript OK! " ) ;
let worker = & Worker ::new ( ) ;
// Create the parameters for various 2^m circuit depths.
2019-12-05 20:30:09 +03:00
let max_degree = log_2 ( current_accumulator . tau_powers_g2 . len ( ) as u64 ) ;
for m in 0 .. max_degree + 1 {
2018-04-04 03:59:39 +03:00
let paramname = format! ( " phase1radix2m {} " , m ) ;
println! ( " Creating {} " , paramname ) ;
let degree = 1 < < m ;
let mut g1_coeffs = EvaluationDomain ::from_coeffs (
current_accumulator . tau_powers_g1 [ 0 .. degree ] . iter ( )
. map ( | e | Point ( e . into_projective ( ) ) )
. collect ( )
) . unwrap ( ) ;
let mut g2_coeffs = EvaluationDomain ::from_coeffs (
current_accumulator . tau_powers_g2 [ 0 .. degree ] . iter ( )
. map ( | e | Point ( e . into_projective ( ) ) )
. collect ( )
) . unwrap ( ) ;
let mut g1_alpha_coeffs = EvaluationDomain ::from_coeffs (
current_accumulator . alpha_tau_powers_g1 [ 0 .. degree ] . iter ( )
. map ( | e | Point ( e . into_projective ( ) ) )
. collect ( )
) . unwrap ( ) ;
2019-09-04 15:54:48 +03:00
2018-04-04 03:59:39 +03:00
let mut g1_beta_coeffs = EvaluationDomain ::from_coeffs (
current_accumulator . beta_tau_powers_g1 [ 0 .. degree ] . iter ( )
. map ( | e | Point ( e . into_projective ( ) ) )
. collect ( )
) . unwrap ( ) ;
// This converts all of the elements into Lagrange coefficients
// for later construction of interpolation polynomials
g1_coeffs . ifft ( & worker ) ;
g2_coeffs . ifft ( & worker ) ;
g1_alpha_coeffs . ifft ( & worker ) ;
g1_beta_coeffs . ifft ( & worker ) ;
let g1_coeffs = g1_coeffs . into_coeffs ( ) ;
let g2_coeffs = g2_coeffs . into_coeffs ( ) ;
let g1_alpha_coeffs = g1_alpha_coeffs . into_coeffs ( ) ;
let g1_beta_coeffs = g1_beta_coeffs . into_coeffs ( ) ;
assert_eq! ( g1_coeffs . len ( ) , degree ) ;
assert_eq! ( g2_coeffs . len ( ) , degree ) ;
assert_eq! ( g1_alpha_coeffs . len ( ) , degree ) ;
assert_eq! ( g1_beta_coeffs . len ( ) , degree ) ;
// Remove the Point() wrappers
let mut g1_coeffs = g1_coeffs . into_iter ( )
. map ( | e | e . 0 )
. collect ::< Vec < _ > > ( ) ;
let mut g2_coeffs = g2_coeffs . into_iter ( )
. map ( | e | e . 0 )
. collect ::< Vec < _ > > ( ) ;
let mut g1_alpha_coeffs = g1_alpha_coeffs . into_iter ( )
. map ( | e | e . 0 )
. collect ::< Vec < _ > > ( ) ;
let mut g1_beta_coeffs = g1_beta_coeffs . into_iter ( )
. map ( | e | e . 0 )
. collect ::< Vec < _ > > ( ) ;
// Batch normalize
G1 ::batch_normalization ( & mut g1_coeffs ) ;
G2 ::batch_normalization ( & mut g2_coeffs ) ;
G1 ::batch_normalization ( & mut g1_alpha_coeffs ) ;
G1 ::batch_normalization ( & mut g1_beta_coeffs ) ;
// H query of Groth16 needs...
// x^i * (x^m - 1) for i in 0..=(m-2) a.k.a.
// x^(i + m) - x^i for i in 0..=(m-2)
// for radix2 evaluation domains
let mut h = Vec ::with_capacity ( degree - 1 ) ;
for i in 0 .. ( degree - 1 ) {
let mut tmp = current_accumulator . tau_powers_g1 [ i + degree ] . into_projective ( ) ;
let mut tmp2 = current_accumulator . tau_powers_g1 [ i ] . into_projective ( ) ;
tmp2 . negate ( ) ;
tmp . add_assign ( & tmp2 ) ;
h . push ( tmp ) ;
}
// Batch normalize this as well
G1 ::batch_normalization ( & mut h ) ;
// Create the parameter file
let writer = OpenOptions ::new ( )
. read ( false )
. write ( true )
. create_new ( true )
. open ( paramname )
. expect ( " unable to create parameter file in this directory " ) ;
let mut writer = BufWriter ::new ( writer ) ;
// Write alpha (in g1)
// Needed by verifier for e(alpha, beta)
// Needed by prover for A and C elements of proof
writer . write_all (
current_accumulator . alpha_tau_powers_g1 [ 0 ]
. into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
// Write beta (in g1)
// Needed by prover for C element of proof
writer . write_all (
current_accumulator . beta_tau_powers_g1 [ 0 ]
. into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
// Write beta (in g2)
// Needed by verifier for e(alpha, beta)
// Needed by prover for B element of proof
writer . write_all (
current_accumulator . beta_g2
. into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
// Lagrange coefficients in G1 (for constructing
// LC/IC queries and precomputing polynomials for A)
for coeff in g1_coeffs {
// Was normalized earlier in parallel
let coeff = coeff . into_affine ( ) ;
writer . write_all (
coeff . into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
}
// Lagrange coefficients in G2 (for precomputing
// polynomials for B)
for coeff in g2_coeffs {
// Was normalized earlier in parallel
let coeff = coeff . into_affine ( ) ;
writer . write_all (
coeff . into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
}
// Lagrange coefficients in G1 with alpha (for
// LC/IC queries)
for coeff in g1_alpha_coeffs {
// Was normalized earlier in parallel
let coeff = coeff . into_affine ( ) ;
writer . write_all (
coeff . into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
}
// Lagrange coefficients in G1 with beta (for
// LC/IC queries)
for coeff in g1_beta_coeffs {
// Was normalized earlier in parallel
let coeff = coeff . into_affine ( ) ;
writer . write_all (
coeff . into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
}
// Bases for H polynomial computation
for coeff in h {
// Was normalized earlier in parallel
let coeff = coeff . into_affine ( ) ;
writer . write_all (
coeff . into_uncompressed ( )
. as_ref ( )
) . unwrap ( ) ;
}
}
}