contribution generated in wasm

This commit is contained in:
Jordi Baylina 2020-05-15 21:30:37 +02:00
parent 9f72725b3d
commit 4c7a37c274
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
10 changed files with 880 additions and 333 deletions

52
cli.js
View File

@ -116,17 +116,17 @@ const commands = [
action: powersOfTawExportChallange
},
{
cmd: "powersoftaw contribute <challange> [response]",
cmd: "powersoftaw challange contribute <challange> [response]",
description: "Contribute to a challange",
alias: ["ptc"],
alias: ["ptcc"],
options: "-verbose|v -entropy|e",
action: powersOfTawContribute
action: powersOfTawChallangeContribute
},
{
cmd: "powersoftaw import <powersoftaw_old.ptaw> <response> <<powersoftaw_new.ptaw>",
description: "import a response to a ptaw file",
alias: ["pti"],
options: "-verbose|v -nopoints -nocheck",
options: "-verbose|v -nopoints -nocheck -description|d -name|n",
action: powersOfTawImport
},
{
@ -136,6 +136,20 @@ const commands = [
options: "-verbose|v",
action: powersOfTawVerify
},
{
cmd: "powersoftaw beacon <old_powersoftaw.ptaw> <new_powersoftaw.ptaw> <beaconHash(Hex)> <numIterationsExp>",
description: "adds a beacon",
alias: ["ptb"],
options: "-verbose|v -name|n",
action: powersOfTawBeacon
},
{
cmd: "powersoftaw contribute <powersoftaw.ptaw> <new_powersoftaw.ptaw>",
description: "verifies a powers of tau file",
alias: ["ptc"],
options: "-verbose|v -name|n -entropy|e",
action: powersOfTawContribute
},
];
@ -513,7 +527,7 @@ async function powersOfTawExportChallange(params, options) {
}
async function powersOfTawContribute(params, options) {
async function powersOfTawChallangeContribute(params, options) {
let challangeName;
let responseName;
@ -525,7 +539,7 @@ async function powersOfTawContribute(params, options) {
responseName = params[1];
}
return await powersOfTaw.contribute(bn128, challangeName, responseName, options.entropy, options.verbose);
return await powersOfTaw.challangeContribute(bn128, challangeName, responseName, options.entropy, options.verbose);
}
@ -543,7 +557,7 @@ async function powersOfTawImport(params, options) {
if (options.nopoints) importPoints = false;
if (options.nocheck) doCheck = false;
const res = await powersOfTaw.impoertResponse(oldPtauName, response, newPtauName, importPoints, options.verbose);
const res = await powersOfTaw.impoertResponse(oldPtauName, response, newPtauName, options.name, importPoints, options.verbose);
if (res) return res;
if (!doCheck) return;
@ -564,6 +578,30 @@ async function powersOfTawVerify(params, options) {
}
}
async function powersOfTawBeacon(params, options) {
let oldPtauName;
let newPtauName;
let beaconHashStr;
let numIterationsExp;
oldPtauName = params[0];
newPtauName = params[1];
beaconHashStr = params[2];
numIterationsExp = params[3];
return await powersOfTaw.beacon(oldPtauName, newPtauName, options.name ,numIterationsExp, beaconHashStr, options.verbose);
}
async function powersOfTawContribute(params, options) {
let oldPtauName;
let newPtauName;
oldPtauName = params[0];
newPtauName = params[1];
return await powersOfTaw.contribute(oldPtauName, newPtauName, options.name , options.entropy, options.verbose);
}
function generateVerifier_original(verificationKey) {
let template = fs.readFileSync(path.join( __dirname, "templates", "verifier_original.sol"), "utf-8");

View File

@ -30,9 +30,7 @@ function getG2sp(persinalization, challange, g1s, g1sx) {
}
function createKeyPair(curve, personalization, challangeHash, rng ) {
const k = {};
k.prvKey= curve.Fr.fromRng(rng);
function calculatePubKey(k, curve, personalization, challangeHash, rng ) {
k.g1_s = curve.G1.affine(curve.G1.fromRng(rng));
k.g1_sx = curve.G1.affine(curve.G1.mulScalar(k.g1_s, k.prvKey));
k.g2_sp = curve.G2.affine(getG2sp(personalization, challangeHash, k.g1_s, k.g1_sx));
@ -41,10 +39,17 @@ function createKeyPair(curve, personalization, challangeHash, rng ) {
}
function createPTauKey(curve, challangeHash, rng) {
const key = {};
key.tau = createKeyPair(curve, 0, challangeHash, rng);
key.alpha = createKeyPair(curve, 1, challangeHash, rng);
key.beta = createKeyPair(curve, 2, challangeHash, rng);
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.prvKey = curve.Fr.fromRng(rng);
key.alpha.prvKey = curve.Fr.fromRng(rng);
key.beta.prvKey = curve.Fr.fromRng(rng);
calculatePubKey(key.tau, curve, 0, challangeHash, rng);
calculatePubKey(key.alpha, curve, 1, challangeHash, rng);
calculatePubKey(key.beta, curve, 2, challangeHash, rng);
return key;
}

125
src/mpc_applykey.js Normal file
View File

@ -0,0 +1,125 @@
const buildTaskManager = require("./taskmanager");
/*
This function creates a new section in the fdTo file with id idSection.
It multiplies the pooints in fdFrom by first, first*inc, first*inc^2, ....
nPoint Times.
It also updates the newChallangeHasher with the new points
*/
async function applyKey(params) {
const {
fdFrom,
sections,
curve,
fdTo,
sectionId,
NPoints,
G:Gs,
first,
inc,
newChallangeHasher,
responseHasher,
returnPoints,
sectionName,
verbose
} = params;
const G = curve[Gs];
const MAX_CHUNK_SIZE = 1024;
let res = [];
const sG = G.F.n8*2;
const buffU = new ArrayBuffer(sG);
const buffUv = new Uint8Array(buffU);
const scG = G.F.n8;
const buffC = new ArrayBuffer(scG);
const buffCv = new Uint8Array(buffC);
const taskManager = await buildTaskManager(contributeThread, {
ffjavascript: "ffjavascript"
},{
curve: curve.name
});
fdFrom.pos = sections[sectionId][0].p;
await fdTo.writeULE32(sectionId); // tauG1
const pSection = fdTo.pos;
await fdTo.writeULE64(0); // Temporally set to 0 length
let t = first;
let writePointer = fdTo.pos;
let beginWritePointer = fdTo.pos;
for (let i=0; i< NPoints; i+=MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n = Math.min(NPoints - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG);
await taskManager.addTask({
cmd: "MUL",
G: Gs,
first: t,
inc: inc.toString(),
buff: buff.slice(),
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
writePointer += n*sG;
}
await taskManager.finish();
const sSize = fdTo.pos - pSection -8;
const lastPos = fdTo.pos;
await fdTo.writeULE64(sSize, pSection);
fdTo.pos = lastPos;
fdTo.pos = beginWritePointer;
for (let i=0; i<NPoints; i++) {
const buff = await fdTo.read(sG);
const P = G.fromRprLEM(buff, 0);
G.toRprBE(buffU, 0, P);
newChallangeHasher.update(buffUv);
G.toRprCompressed(buffC, 0, P);
responseHasher.update(buffCv);
const idx = returnPoints.indexOf(i);
if (idx>=0) res[idx] = P;
}
return res;
}
function contributeThread(ctx, task) {
if (task.cmd == "INIT") {
ctx.assert = ctx.modules.assert;
if (task.curve == "bn128") {
ctx.curve = ctx.modules.ffjavascript.bn128;
} else {
ctx.assert(false, "curve not defined");
}
return {};
} else if (task.cmd == "MUL") {
const G = ctx.curve[task.G];
const sG = G.F.n64*8*2;
const buffDest = new ArrayBuffer(sG*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const P = G.fromRprLEM(task.buff, i*sG);
const R = G.mulScalar(P, t);
G.toRprLEM(buffDest, i*sG, R); // Main thread will convert it to Montgomery
t = ctx.curve.Fr.mul(t, inc);
}
return {
buff: buffDest,
writePos: task.writePos
};
} else {
ctx.assert(false, "Op not implemented");
}
}
module.exports = applyKey;

167
src/powersoftau_beacon.js Normal file
View File

@ -0,0 +1,167 @@
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const applyKey = require("./mpc_applykey");
function hex2ByteArray(s) {
return new Uint8Array(s.match(/[\da-f]{2}/gi).map(function (h) {
return parseInt(h, 16);
}));
}
async function beacon(oldPtauFilename, newPTauFilename, name, numIterationsExp, beaconHashStr, verbose) {
const beaconHash = hex2ByteArray(beaconHashStr);
if ( (beaconHash.byteLength == 0)
|| (beaconHash.byteLength*2 !=beaconHashStr.length))
{
console.log("Invalid Beacon Hash. (It must be a valid hexadecimal sequence)");
return false;
}
if (beaconHash.length>=256) {
console.log("Maximum lenght of beacon hash is 255 bytes");
return false;
}
numIterationsExp = parseInt(numIterationsExp);
if ((numIterationsExp<10)||(numIterationsExp>63)) {
console.log("Invalid numIterationsExp. (Must be between 10 and 63)");
return false;
}
await Blake2b.ready();
const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
const contributions = await utils.readContributions(fdOld, curve, sections);
const currentContribution = {
name: name,
type: 1, // Beacon
numIterationsExp: numIterationsExp,
beaconHash: beaconHash
};
let lastChallangeHash;
if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power);
}
currentContribution.key = utils.keyFromBeacon(curve, lastChallangeHash, beaconHash, numIterationsExp);
const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
const newChallangeHasher = new Blake2b(64);
newChallangeHasher.update(lastChallangeHash);
const responseHasher = new Blake2b(64);
responseHasher.update(lastChallangeHash);
currentContribution.tauG1 = (await applyKey({
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 2,
NPoints: (1 << power) * 2 -1,
G: "G1",
first: curve.Fr.one,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [1],
sectionName: "tauG1",
verbose
}))[0];
currentContribution.tauG2 = (await applyKey({
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 3,
NPoints: 1 << power,
G: "G2",
first: curve.Fr.one,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [1],
sectionName: "tauG2",
verbose
}))[0];
currentContribution.alphaG1 = (await applyKey({
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 4,
NPoints: 1 << power,
G: "G1",
first: currentContribution.key.alpha.prvKey,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [0],
sectionName: "alphaTauG1",
verbose
}))[0];
currentContribution.betaG1 = (await applyKey({
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 5,
NPoints: 1 << power,
G: "G1",
first: currentContribution.key.beta.prvKey,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [0],
sectionName: "betaTauG1",
verbose
}))[0];
currentContribution.betaG2 = (await applyKey({
fdFrom: fdOld,
sections,
curve,
fdTo: fdNew,
sectionId: 6,
NPoints: 1,
G: "G2",
first: currentContribution.key.beta.prvKey,
inc: currentContribution.key.tau.prvKey,
newChallangeHasher,
responseHasher,
returnPoints: [0],
sectionName: "betaG2",
verbose
}))[0];
currentContribution.nextChallange = newChallangeHasher.digest();
currentContribution.partialHash = responseHasher.getPartialHash();
const buffKey = new ArrayBuffer(curve.F1.n8*2*6+curve.F2.n8*2*3);
utils.toPtauPubKeyRpr(buffKey, 0, curve, currentContribution.key, false);
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
console.log("Contribution Response Hash imported: ");
console.log(utils.formatHash(hashResponse));
contributions.push(currentContribution);
await utils.writeContributions(fdNew, curve, contributions);
await fdOld.close();
await fdNew.close();
}
module.exports = beacon;

View File

@ -0,0 +1,287 @@
// Format of the output
// Hash of the last contribution 64 Bytes
// 2^N*2-1 TauG1 Points (compressed)
// 2^N TauG2 Points (compressed)
// 2^N AlphaTauG1 Points (compressed)
// 2^N BetaTauG1 Points (compressed)
// Public Key
// BetaG2 (compressed)
// G1*s (compressed)
// G1*s*tau (compressed)
// G1*t (compressed)
// G1*t*alpha (compressed)
// G1*u (compressed)
// G1*u*beta (compressed)
// G2*sp*tau (compressed)
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
const fastFile = require("fastfile");
const assert = require("assert");
const blake2b = require("blake2b-wasm");
const readline = require("readline");
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const fs = require("fs");
const utils = require("./powersoftau_utils");
const buildTaskManager = require("./taskmanager");
const keyPair = require("./keypair");
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
function askEntropy() {
return new Promise((resolve) => {
rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) );
});
}
async function challangeContribute(curve, challangeFilename, responesFileName, entropy, verbose) {
await blake2b.ready();
const MAX_CHUNK_SIZE = 1024;
let stats = await fs.promises.stat(challangeFilename);
const sG1 = curve.F1.n64*8*2;
const scG1 = curve.F1.n64*8; // Compresed size
const sG2 = curve.F2.n64*8*2;
const scG2 = curve.F2.n64*8; // Compresed size
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize;
let power = 0;
while (e>1) {
e = e /2;
power += 1;
}
assert(1<<power == domainSize, "Invalid file size");
const fdFrom = await fastFile.readExisting(challangeFilename);
const fdTo = await fastFile.createOverride(responesFileName);
let writePointer = 0;
while (!entropy) {
entropy = await askEntropy();
}
// Calculate the hash
console.log("Hashing challange");
const challangeHasher = blake2b(64);
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
const s = Math.min(stats.size - i, fdFrom.pageSize);
const buff = await fdFrom.read(s);
challangeHasher.update(new Uint8Array(buff));
}
const challangeHash = challangeHasher.digest();
console.log("Challange Hash: ");
console.log(utils.formatHash(challangeHash));
const claimedHash = new Uint8Array( await fdFrom.read(64, 0));
console.log("Claimed Hash: ");
console.log(utils.formatHash(claimedHash));
const hasher = blake2b(64);
hasher.update(crypto.randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
const key = keyPair.createPTauKey(curve, challangeHash, rng);
if (verbose) {
["tau", "alpha", "beta"].forEach( (k) => {
console.log(k, ".g1_s_x: " + key[k].g1_s[0].toString(16));
console.log(k, ".g1_s_y: " + key[k].g1_s[1].toString(16));
console.log(k, ".g1_sx_x: " + key[k].g1_sx[0].toString(16));
console.log(k, ".g1_sx_y: " + key[k].g1_sx[1].toString(16));
console.log(k, ".g2_sp_x_c0: " + key[k].g2_sp[0][0].toString(16));
console.log(k, ".g2_sp_x_c1: " + key[k].g2_sp[0][1].toString(16));
console.log(k, ".g2_sp_y_c0: " + key[k].g2_sp[1][0].toString(16));
console.log(k, ".g2_sp_y_c1: " + key[k].g2_sp[1][1].toString(16));
console.log(k, ".g2_spx_x_c0: " + key[k].g2_spx[0][0].toString(16));
console.log(k, ".g2_spx_x_c1: " + key[k].g2_spx[0][1].toString(16));
console.log(k, ".g2_spx_y_c0: " + key[k].g2_spx[1][0].toString(16));
console.log(k, ".g2_spx_y_c1: " + key[k].g2_spx[1][1].toString(16));
console.log("");
});
}
await fdTo.write(challangeHash);
writePointer += 64;
const taskManager = await buildTaskManager(contributeThread, {
ffjavascript: "ffjavascript"
},{
curve: curve.name
});
// TauG1
let t = curve.Fr.e(1);
for (let i=0; i<domainSize*2-1; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("TauG1: " + i);
const n = Math.min(domainSize*2-1 - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
// TauG2
t = curve.Fr.e(1);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("TauG2: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG2);
await taskManager.addTask({
cmd: "MULG2",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG2;
}
// AlphaTauG1
t = curve.Fr.e(key.alpha.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("AlfaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
// BetaTauG1
t = curve.Fr.e(key.beta.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("BetaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
// BetaG2
const buffOldBeta = await fdFrom.read(sG2);
const oldBeta = curve.G2.fromRprBE(buffOldBeta);
const newBeta = curve.G2.mulScalar(oldBeta, key.beta.prvKey);
const buffNewBeta = new ArrayBuffer(curve.F2.n8*2);
curve.G2.toRprCompressed(buffNewBeta, 0, newBeta);
await fdTo.write(buffNewBeta, writePointer);
writePointer += scG2;
await taskManager.finish();
//Write Key
fdTo.pos = writePointer;
await utils.writePtauPubKey(fdTo, curve, key);
await fdTo.close();
await fdFrom.close();
}
function contributeThread(ctx, task) {
if (task.cmd == "INIT") {
ctx.assert = ctx.modules.assert;
if (task.curve == "bn128") {
ctx.curve = ctx.modules.ffjavascript.bn128;
} else {
ctx.assert(false, "curve not defined");
}
return {};
} else if (task.cmd == "MULG1") {
const sG1 = ctx.curve.F1.n64*8*2;
const scG1 = ctx.curve.F1.n64*8; // Compresed size
const buffDest = new ArrayBuffer(scG1*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const P = ctx.curve.G1.fromRprBE(task.buff, i*sG1);
const R = ctx.curve.G1.mulScalar(P, t);
ctx.curve.G1.toRprCompressed(buffDest, i*scG1, R);
t = ctx.curve.Fr.mul(t, inc);
}
return {
buff: buffDest,
writePos: task.writePos
};
} else if (task.cmd == "MULG2") {
const sG2 = ctx.curve.F2.n64*8*2;
const scG2 = ctx.curve.F2.n64*8; // Compresed size
const buffDest = new ArrayBuffer(scG2*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const P = ctx.curve.G2.fromRprBE(task.buff, i*sG2);
const R = ctx.curve.G2.mulScalar(P, t);
ctx.curve.G2.toRprCompressed(buffDest, i*scG2, R);
t = ctx.curve.Fr.mul(t, inc);
}
return {
buff: buffDest,
writePos: task.writePos
};
} else {
ctx.assert(false, "Op not implemented");
}
}
module.exports = challangeContribute;

View File

@ -1,33 +1,10 @@
// Format of the output
// Hash of the last contribution 64 Bytes
// 2^N*2-1 TauG1 Points (compressed)
// 2^N TauG2 Points (compressed)
// 2^N AlphaTauG1 Points (compressed)
// 2^N BetaTauG1 Points (compressed)
// Public Key
// BetaG2 (compressed)
// G1*s (compressed)
// G1*s*tau (compressed)
// G1*t (compressed)
// G1*t*alpha (compressed)
// G1*u (compressed)
// G1*u*beta (compressed)
// G2*sp*tau (compressed)
// G2*tp*alpha (compressed)
// G2*up*beta (compressed)
const fastFile = require("fastfile");
const assert = require("assert");
const blake2b = require("blake2b-wasm");
const readline = require("readline");
const crypto = require("crypto");
const ChaCha = require("ffjavascript").ChaCha;
const fs = require("fs");
const Blake2b = require("blake2b-wasm");
const utils = require("./powersoftau_utils");
const buildTaskManager = require("./taskmanager");
const wasmSnark = require("wasmsnark");
const ChaCha = require("ffjavascript").ChaCha;
const crypto = require("crypto");
const keyPair = require("./keypair");
const readline = require("readline");
const rl = readline.createInterface({
@ -41,245 +18,125 @@ function askEntropy() {
});
}
async function contribute(oldPtauFilename, newPTauFilename, name, entropy, verbose) {
await Blake2b.ready();
async function contribute(curve, challangeFilename, responesFileName, entropy, verbose) {
await blake2b.ready();
const {fd: fdOld, sections} = await utils.readBinFile(oldPtauFilename, "ptau", 1);
const {curve, power} = await utils.readPTauHeader(fdOld, sections);
if (curve.name == "bn128") {
wasmCurve = await wasmSnark.buildBn128();
} else {
throw new Error("Curve not supported");
}
const contributions = await utils.readContributions(fdOld, curve, sections);
const curContribution = {
name: name,
type: 0, // Beacon
};
const MAX_CHUNK_SIZE = 1024;
let lastChallangeHash;
let stats = await fs.promises.stat(challangeFilename);
const sG1 = curve.F1.n64*8*2;
const scG1 = curve.F1.n64*8; // Compresed size
const sG2 = curve.F2.n64*8*2;
const scG2 = curve.F2.n64*8; // Compresed size
const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2);
let e = domainSize;
let power = 0;
while (e>1) {
e = e /2;
power += 1;
if (contributions.length>0) {
lastChallangeHash = contributions[contributions.length-1].nextChallange;
} else {
lastChallangeHash = utils.calculateFirstChallangeHash(curve, power);
}
assert(1<<power == domainSize, "Invalid file size");
const fdFrom = await fastFile.readExisting(challangeFilename);
const fdTo = await fastFile.createOverride(responesFileName);
let writePointer = 0;
// Generate a random key
while (!entropy) {
entropy = await askEntropy();
}
// Calculate the hash
console.log("Hashing challange");
const challangeHasher = blake2b(64);
for (let i=0; i<stats.size; i+= fdFrom.pageSize) {
const s = Math.min(stats.size - i, fdFrom.pageSize);
const buff = await fdFrom.read(s);
challangeHasher.update(new Uint8Array(buff));
}
const challangeHash = challangeHasher.digest();
console.log("Challange Hash: ");
console.log(utils.formatHash(challangeHash));
const claimedHash = new Uint8Array( await fdFrom.read(64, 0));
console.log("Claimed Hash: ");
console.log(utils.formatHash(claimedHash));
const hasher = blake2b(64);
const hasher = Blake2b(64);
hasher.update(crypto.randomBytes(64));
const enc = new TextEncoder(); // always utf-8
hasher.update(enc.encode(entropy));
const hash = Buffer.from(hasher.digest());
const seed = [];
for (let i=0;i<8;i++) {
seed[i] = hash.readUInt32BE(i*4);
}
const rng = new ChaCha(seed);
const key = keyPair.createPTauKey(curve, challangeHash, rng);
if (verbose) {
["tau", "alpha", "beta"].forEach( (k) => {
console.log(k, ".g1_s_x: " + key[k].g1_s[0].toString(16));
console.log(k, ".g1_s_y: " + key[k].g1_s[1].toString(16));
console.log(k, ".g1_sx_x: " + key[k].g1_sx[0].toString(16));
console.log(k, ".g1_sx_y: " + key[k].g1_sx[1].toString(16));
console.log(k, ".g2_sp_x_c0: " + key[k].g2_sp[0][0].toString(16));
console.log(k, ".g2_sp_x_c1: " + key[k].g2_sp[0][1].toString(16));
console.log(k, ".g2_sp_y_c0: " + key[k].g2_sp[1][0].toString(16));
console.log(k, ".g2_sp_y_c1: " + key[k].g2_sp[1][1].toString(16));
console.log(k, ".g2_spx_x_c0: " + key[k].g2_spx[0][0].toString(16));
console.log(k, ".g2_spx_x_c1: " + key[k].g2_spx[0][1].toString(16));
console.log(k, ".g2_spx_y_c0: " + key[k].g2_spx[1][0].toString(16));
console.log(k, ".g2_spx_y_c1: " + key[k].g2_spx[1][1].toString(16));
console.log("");
});
}
// const rng = new ChaCha(seed);
const rng = new ChaCha();
curContribution.key = keyPair.createPTauKey(curve, lastChallangeHash, rng);
await fdTo.write(challangeHash);
writePointer += 64;
const newChallangeHasher = new Blake2b(64);
newChallangeHasher.update(lastChallangeHash);
const taskManager = await buildTaskManager(contributeThread, {
ffjavascript: "ffjavascript"
},{
curve: curve.name
});
const responseHasher = new Blake2b(64);
responseHasher.update(lastChallangeHash);
// TauG1
let t = curve.Fr.e(1);
for (let i=0; i<domainSize*2-1; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("TauG1: " + i);
const n = Math.min(domainSize*2-1 - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
const fdNew = await utils.createBinFile(newPTauFilename, "ptau", 1, 7);
await utils.writePTauHeader(fdNew, curve, power);
// TauG2
t = curve.Fr.e(1);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("TauG2: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG2);
await taskManager.addTask({
cmd: "MULG2",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG2;
}
let firstPoints;
firstPoints = await processSection(2, "G1", (1<<power) * 2 -1, curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG1" );
curContribution.tauG1 = firstPoints[1];
firstPoints = await processSection(3, "G2", (1<<power) , curve.Fr.e(1), curContribution.key.tau.prvKey, "tauG2" );
curContribution.tauG2 = firstPoints[1];
firstPoints = await processSection(4, "G1", (1<<power) , curContribution.key.alpha.prvKey, curContribution.key.tau.prvKey, "alphaTauG1" );
curContribution.alphaG1 = firstPoints[0];
firstPoints = await processSection(5, "G1", (1<<power) , curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG1" );
curContribution.betaG1 = firstPoints[0];
firstPoints = await processSection(6, "G2", 1, curContribution.key.beta.prvKey, curContribution.key.tau.prvKey, "betaTauG2" );
curContribution.betaG2 = firstPoints[0];
// AlphaTauG1
t = curve.Fr.e(key.alpha.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("AlfaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
curContribution.nextChallange = newChallangeHasher.digest();
curContribution.partialHash = responseHasher.getPartialHash();
// BetaTauG1
t = curve.Fr.e(key.beta.prvKey);
for (let i=0; i<domainSize; i += MAX_CHUNK_SIZE) {
if ((verbose)&&i) console.log("BetaTauG1: " + i);
const n = Math.min(domainSize - i, MAX_CHUNK_SIZE);
const buff = await fdFrom.read(n*sG1);
await taskManager.addTask({
cmd: "MULG1",
first: t,
inc: key.tau.prvKey.toString(),
buff: buff,
n: n,
writePos: writePointer
}, async function(r) {
return await fdTo.write(r.buff, r.writePos);
});
t = curve.Fr.mul(t, curve.Fr.pow(key.tau.prvKey, n));
writePointer += n*scG1;
}
const buffKey = new ArrayBuffer(curve.F1.n8*2*6+curve.F2.n8*2*3);
// BetaG2
const buffOldBeta = await fdFrom.read(sG2);
const oldBeta = curve.G2.fromRprBE(buffOldBeta);
const newBeta = curve.G2.mulScalar(oldBeta, key.beta.prvKey);
const buffNewBeta = new ArrayBuffer(curve.F2.n8*2);
curve.G2.toRprCompressed(buffNewBeta, 0, newBeta);
await fdTo.write(buffNewBeta, writePointer);
writePointer += scG2;
utils.toPtauPubKeyRpr(buffKey, 0, curve, curContribution.key, false);
await taskManager.finish();
responseHasher.update(new Uint8Array(buffKey));
const hashResponse = responseHasher.digest();
//Write Key
fdTo.pos = writePointer;
await utils.writePtauPubKey(fdTo, curve, key);
console.log("Contribution Response Hash imported: ");
console.log(utils.formatHash(hashResponse));
contributions.push(curContribution);
await fdTo.close();
await fdFrom.close();
await utils.writeContributions(fdNew, curve, contributions);
}
await fdOld.close();
await fdNew.close();
function contributeThread(ctx, task) {
if (task.cmd == "INIT") {
ctx.assert = ctx.modules.assert;
if (task.curve == "bn128") {
ctx.curve = ctx.modules.ffjavascript.bn128;
} else {
ctx.assert(false, "curve not defined");
return;
async function processSection(sectionId, Gstr, NPoints, first, inc, sectionName) {
const res = [];
fdOld.pos = sections[sectionId][0].p;
await fdNew.writeULE32(sectionId); // tauG1
const pSection = fdNew.pos;
await fdNew.writeULE64(0); // Temporally set to 0 length
const G = curve[Gstr];
const sG = G.F.n8*2;
const chunkSize = (1<<27) / sG; // 128Mb chunks
let t = first;
for (let i=0 ; i<NPoints ; i+= chunkSize) {
if ((verbose)&&i) console.log(`${sectionName}: ` + i);
const n= Math.min(NPoints-i, chunkSize );
const buffIn = await fdOld.read(n * sG);
const buffOutLEM = await G.batchApplyKey(buffIn, t, inc);
const promiseWrite = fdNew.write(buffOutLEM.buffer);
const buffOutU = await G.batchLEMtoU(buffOutLEM);
const buffOutC = await G.batchLEMtoC(buffOutLEM);
newChallangeHasher.update(buffOutU);
responseHasher.update(buffOutC);
await promiseWrite;
if (i==0) // Return the 2 first points.
for (let j=0; j<Math.min(2, NPoints); j++)
res.push(G.fromRprLEM(buffOutLEM.buffer, j*sG));
t = curve.Fr.mul(t, curve.Fr.pow(inc, n));
}
return {};
} else if (task.cmd == "MULG1") {
const sG1 = ctx.curve.F1.n64*8*2;
const scG1 = ctx.curve.F1.n64*8; // Compresed size
const buffDest = new ArrayBuffer(scG1*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const P = ctx.curve.G1.fromRprBE(task.buff, i*sG1);
const R = ctx.curve.G1.mulScalar(P, t);
ctx.curve.G1.toRprCompressed(buffDest, i*scG1, R);
t = ctx.curve.Fr.mul(t, inc);
}
return {
buff: buffDest,
writePos: task.writePos
};
} else if (task.cmd == "MULG2") {
const sG2 = ctx.curve.F2.n64*8*2;
const scG2 = ctx.curve.F2.n64*8; // Compresed size
const buffDest = new ArrayBuffer(scG2*task.n);
let t = ctx.curve.Fr.e(task.first);
let inc = ctx.curve.Fr.e(task.inc);
for (let i=0; i<task.n; i++) {
const P = ctx.curve.G2.fromRprBE(task.buff, i*sG2);
const R = ctx.curve.G2.mulScalar(P, t);
ctx.curve.G2.toRprCompressed(buffDest, i*scG2, R);
t = ctx.curve.Fr.mul(t, inc);
}
return {
buff: buffDest,
writePos: task.writePos
};
} else {
ctx.assert(false, "Op not implemented");
const sSize = fdNew.pos - pSection -8;
const lastPos = fdNew.pos;
await fdNew.writeULE64(sSize, pSection);
fdNew.pos = lastPos;
return res;
}
}

View File

@ -4,7 +4,7 @@ const Blake2b = require("blake2b-wasm");
const fs = require("fs");
const utils = require("./powersoftau_utils");
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, importPoints, verbose) {
async function importResponse(oldPtauFilename, contributionFilename, newPTauFilename, name, importPoints, verbose) {
await Blake2b.ready();
@ -81,6 +81,7 @@ async function importResponse(oldPtauFilename, contributionFilename, newPTauFile
await fdResponse.close();
await fdNew.close();
await fdOld.close();
async function processSection(fdFrom, fdTo, sectionId, n, G, name, contributionId) {

View File

@ -3,6 +3,9 @@ const assert = require("assert");
const Scalar = require("ffjavascript").Scalar;
const bn128 = require("ffjavascript").bn128;
const Blake2b = require("blake2b-wasm");
const ChaCha = require("ffjavascript").ChaCha;
const keyPair = require("./keypair");
const crypto = require("crypto");
async function readBinFile(fileName, type, maxVersion) {
@ -103,43 +106,6 @@ async function readPtauPubKey(fd, curve, montgomery) {
const buff = await fd.read(curve.F1.n8*2*6 + curve.F2.n8*2*3);
return fromPtauPubKeyRpr(buff, 0, curve, montgomery);
/*
const key = {
tau: {},
alpha: {},
beta: {}
};
key.tau.g1_s = await readG1();
key.tau.g1_sx = await readG1();
key.alpha.g1_s = await readG1();
key.alpha.g1_sx = await readG1();
key.beta.g1_s = await readG1();
key.beta.g1_sx = await readG1();
key.tau.g2_spx = await readG2();
key.alpha.g2_spx = await readG2();
key.beta.g2_spx = await readG2();
return key;
async function readG1() {
const pBuff = await fd.read(curve.F1.n8*2);
if (montgomery) {
return curve.G1.fromRprLEM( pBuff );
} else {
return curve.G1.fromRprBE( pBuff );
}
}
async function readG2() {
const pBuff = await fd.read(curve.F2.n8*2);
if (montgomery) {
return curve.G2.fromRprLEM( pBuff );
} else {
return curve.G2.fromRprBE( pBuff );
}
}
*/
}
function fromPtauPubKeyRpr(buff, pos, curve, montgomery) {
@ -222,38 +188,6 @@ async function writePtauPubKey(fd, curve, key, montgomery) {
const buff = new ArrayBuffer(curve.F1.n8*2*6 + curve.F2.n8*2*3);
toPtauPubKeyRpr(buff, 0, curve, key, montgomery);
await fd.write(buff);
/*
const buffG1 = new ArrayBuffer(curve.F1.n8*2);
const buffG2 = new ArrayBuffer(curve.F2.n8*2);
await writeG1(key.tau.g1_s);
await writeG1(key.tau.g1_sx);
await writeG1(key.alpha.g1_s);
await writeG1(key.alpha.g1_sx);
await writeG1(key.beta.g1_s);
await writeG1(key.beta.g1_sx);
await writeG2(key.tau.g2_spx);
await writeG2(key.alpha.g2_spx);
await writeG2(key.beta.g2_spx);
async function writeG1(p) {
if (montgomery) {
curve.G1.toRprLEM(buffG1, 0, p);
} else {
curve.G1.toRprBE(buffG1, 0, p);
}
await fd.write(buffG1);
}
async function writeG2(p) {
if (montgomery) {
curve.G2.toRprLEM(buffG2, 0, p);
} else {
curve.G2.toRprBE(buffG2, 0, p);
}
await fd.write(buffG2);
}
*/
}
async function readContribution(fd, curve) {
@ -267,6 +201,32 @@ async function readContribution(fd, curve) {
c.key = await readPtauPubKey(fd, curve, true);
c.partialHash = new Uint8Array(await fd.read(216));
c.nextChallange = new Uint8Array(await fd.read(64));
c.type = await fd.readULE32();
const paramLength = await fd.readULE32();
const curPos = fd.pos;
let lastType =0;
while (fd.pos-curPos < paramLength) {
const buffType = await readDV(1);
if (buffType[0]<= lastType) throw new Error("Parameters in the contribution must be sorted");
lastType = buffType[0];
if (buffType[0]==1) { // Name
const buffLen = await readDV(1);
const buffStr = await readDV(buffLen[0]);
c.name = new TextDecoder().decode(buffStr);
} else if (buffType[0]==2) {
const buffExp = await readDV(1);
c.numIterationsExp = buffExp[0];
} else if (buffType[0]==3) {
const buffLen = await readDV(1);
c.beaconHash = await readDV(buffLen[0]);
} else {
throw new Error("Parameter not recognized");
}
}
if (fd.pos != curPos + paramLength) {
throw new Error("Parametes do not match");
}
return c;
@ -279,6 +239,11 @@ async function readContribution(fd, curve) {
const pBuff = await fd.read(curve.F2.n8*2);
return curve.G2.fromRprLEM( pBuff );
}
async function readDV(n) {
const b = await fd.read(n);
return new Uint8Array(b);
}
}
async function readContributions(fd, curve, sections) {
@ -311,6 +276,30 @@ async function writeContribution(fd, curve, contribution) {
await writePtauPubKey(fd, curve, contribution.key, true);
await fd.write(contribution.partialHash);
await fd.write(contribution.nextChallange);
await fd.writeULE32(contribution.type || 0);
const params = [];
if (contribution.name) {
params.push(1); // Param Name
const nameData = new TextEncoder("utf-8").encode(contribution.name.substring(0,64));
params.push(nameData.byteLength);
for (let i=0; i<nameData.byteLength; i++) params.push(nameData[i]);
}
if (contribution.type == 1) {
params.push(2); // Param numIterationsExp
params.push(contribution.numIterationsExp);
params.push(3); // Beacon Hash
params.push(contribution.beaconHash.byteLength);
for (let i=0; i<contribution.beaconHash.byteLength; i++) params.push(contribution.beaconHash[i]);
}
if (params.length>0) {
const paramsBuff = new Uint8Array(params);
await fd.writeULE32(paramsBuff.byteLength);
await fd.write(paramsBuff);
} else {
await fd.writeULE32(0);
}
async function writeG1(p) {
@ -352,7 +341,7 @@ function formatHash(b) {
S += "\t\t";
for (let j=0; j<4; j++) {
if (j>0) S += " ";
S += a.getUint32(i*4+j).toString(16).padStart(8, "0");
S += a.getUint32(i*16+j*4).toString(16).padStart(8, "0");
}
}
return S;
@ -394,6 +383,38 @@ function calculateFirstChallangeHash(curve, power) {
return hasher.digest();
}
function keyFromBeacon(curve, challangeHash, beaconHash, numIterationsExp) {
let nIterationsInner;
let nIterationsOuter;
if (numIterationsExp<32) {
nIterationsInner = (1 << numIterationsExp) >>> 0;
nIterationsOuter = 1;
} else {
nIterationsInner = 0x100000000;
nIterationsOuter = (1 << (numIterationsExp-32)) >>> 0;
}
let curHash = beaconHash;
for (let i=0; i<nIterationsOuter; i++) {
for (let j=0; j<nIterationsInner; j++) {
curHash = crypto.createHash("sha256").update(curHash).digest();
}
}
const curHashV = new DataView(curHash.buffer);
const seed = [];
for (let i=0; i<8; i++) {
seed[i] = curHashV.getUint32(i*4, false);
}
const rng = new ChaCha(seed);
const key = keyPair.createPTauKey(curve, challangeHash, rng);
return key;
}
module.exports.readBinFile = readBinFile;
module.exports.createBinFile = createBinFile;
module.exports.readPTauHeader = readPTauHeader;
@ -407,4 +428,5 @@ module.exports.hashIsEqual = hashIsEqual;
module.exports.calculateFirstChallangeHash = calculateFirstChallangeHash;
module.exports.toPtauPubKeyRpr = toPtauPubKeyRpr;
module.exports.fromPtauPubKeyRpr = fromPtauPubKeyRpr;
module.exports.keyFromBeacon = keyFromBeacon;

View File

@ -10,7 +10,49 @@ function sameRatio(curve, g1s, g1sx, g2s, g2sx) {
}
function verifyContribution(curve, cur, prev) {
// TODO
if (cur.type == 1) { // Verify the beacon.
const beaconKey = utils.keyFromBeacon(curve, prev.nextChallange, cur.beaconHash, cur.numIterationsExp);
if (!curve.G1.eq(cur.key.tau.g1_s, beaconKey.tau.g1_s)) {
console.log(`BEACON key (tauG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.tau.g1_sx, beaconKey.tau.g1_sx)) {
console.log(`BEACON key (tauG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.tau.g2_spx, beaconKey.tau.g2_spx)) {
console.log(`BEACON key (tauG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_s, beaconKey.alpha.g1_s)) {
console.log(`BEACON key (alphaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.alpha.g1_sx, beaconKey.alpha.g1_sx)) {
console.log(`BEACON key (alphaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.alpha.g2_spx, beaconKey.alpha.g2_spx)) {
console.log(`BEACON key (alphaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_s, beaconKey.beta.g1_s)) {
console.log(`BEACON key (betaG1_s) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G1.eq(cur.key.beta.g1_sx, beaconKey.beta.g1_sx)) {
console.log(`BEACON key (betaG1_sx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
if (!curve.G2.eq(cur.key.beta.g2_spx, beaconKey.beta.g2_spx)) {
console.log(`BEACON key (betaG2_spx) is not generated correctly in challange #${cur.id} ${cur.name || ""}` );
return false;
}
}
cur.key.tau.g2_sp = keyPair.getG2sp(0, prev.nextChallange, cur.key.tau.g1_s, cur.key.tau.g1_sx);
cur.key.alpha.g2_sp = keyPair.getG2sp(1, prev.nextChallange, cur.key.alpha.g1_s, cur.key.alpha.g1_sx);
@ -32,22 +74,22 @@ function verifyContribution(curve, cur, prev) {
}
if (!sameRatio(curve, prev.tauG1, cur.tauG1, cur.key.tau.g2_sp, cur.key.tau.g2_spx)) {
console.log("INVALID tau*G1. challange #"+cur.id+"It does not follow the previous contribution");
console.log("INVALID tau*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, cur.key.tau.g1_s, cur.key.tau.g1_sx, prev.tauG2, cur.tauG2,)) {
console.log("INVALID tau*G2. challange #"+cur.id+"It does not follow the previous contribution");
console.log("INVALID tau*G2. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, prev.alphaG1, cur.alphaG1, cur.key.alpha.g2_sp, cur.key.alpha.g2_spx)) {
console.log("INVALID alpha*G1. challange #"+cur.id+"It does not follow the previous contribution");
console.log("INVALID alpha*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
if (!sameRatio(curve, prev.betaG1, cur.betaG1, cur.key.beta.g2_sp, cur.key.beta.g2_spx)) {
console.log("INVALID beta*G1. challange #"+cur.id+"It does not follow the previous contribution");
console.log("INVALID beta*G1. challange #"+cur.id+" It does not follow the previous contribution");
return false;
}
@ -200,7 +242,7 @@ async function verify(tauFilename, verbose) {
function printContribution(curContr, prevContr) {
console.log("-----------------------------------------------------");
console.log(`Contribution #${curContr.id}:`);
console.log(`Contribution #${curContr.id}: ${curContr.name ||""}`);
console.log("\tNext Challange");
console.log(utils.formatHash(curContr.nextChallange));
@ -405,13 +447,13 @@ function verifyThread(ctx, task) {
function loadPage(p) {
seed[0] = p;
const rng = new ctx.modules.ffjavascript.ChaCha(seed);
for (let i=0; i<16; i++) {
pages[nextLoad][i] = rng.nextU32();
}
const c = nextLoad;
nextLoad = (nextLoad+1) % nPages;
pageId[nextLoad] = p;
const rng = new ctx.modules.ffjavascript.ChaCha(seed);
for (let i=0; i<16; i++) {
pages[c][i] = rng.nextU32();
}
pageId[c] = p;
return c;
}
@ -419,7 +461,7 @@ function verifyThread(ctx, task) {
const page = n>>4;
let idx = pageId.indexOf(page);
if (idx < 0) idx = loadPage(page);
return pages[page][n & 0xF] % (NSet-1);
return pages[idx][n & 0xF] % (NSet-1);
};
}

View File

@ -1,6 +1,9 @@
module.exports.newAccumulator = require("./powersoftau_new");
module.exports.exportChallange = require("./powersoftau_export");
module.exports.contribute = require("./powersoftau_contribute");
module.exports.challangeContribute = require("./powersoftau_challangecontribute");
module.exports.impoertResponse = require("./powersoftau_import");
module.exports.verify = require("./powersoftau_verify");
module.exports.challangeContribute = require("./powersoftau_challangecontribute");
module.exports.beacon = require("./powersoftau_beacon");
module.exports.contribute = require("./powersoftau_contribute");