From 8ecb2cfdf378ce67a7efc7e9d24aa90d3dd499f5 Mon Sep 17 00:00:00 2001 From: Jordi Baylina Date: Sun, 10 May 2020 10:10:42 +0200 Subject: [PATCH] split poweroftau in different sources --- src/powersoftau_contribute.js | 301 ++++++++++++++++++ src/powersoftau_export.js | 163 ++++++++++ src/powersoftau_import.js | 106 +++++++ src/powersoftau_new.js | 141 +++++++++ src/powersoftau_utils.js | 44 +++ src/powersoftau_verify.js | 0 src/powersoftaw.js | 575 +--------------------------------- 7 files changed, 758 insertions(+), 572 deletions(-) create mode 100644 src/powersoftau_contribute.js create mode 100644 src/powersoftau_export.js create mode 100644 src/powersoftau_import.js create mode 100644 src/powersoftau_new.js create mode 100644 src/powersoftau_utils.js create mode 100644 src/powersoftau_verify.js diff --git a/src/powersoftau_contribute.js b/src/powersoftau_contribute.js new file mode 100644 index 0000000..74dcfbc --- /dev/null +++ b/src/powersoftau_contribute.js @@ -0,0 +1,301 @@ +// Format of the output +// Hash of the last contribution 64 Bytes +// 2^N*2-1 TauG1 Points (compressed) +// 2^N TauG2 Points (compressed) +// 2^N AlphaTauG1 Points (compressed) +// 2^N BetaTauG1 Points (compressed) +// Public Key +// BetaG2 (compressed) +// G1*s (compressed) +// G1*s*tau (compressed) +// G1*t (compressed) +// G1*t*alpha (compressed) +// G1*u (compressed) +// G1*u*beta (compressed) +// G2*sp*tau (compressed) +// G2*tp*alpha (compressed) +// G2*up*beta (compressed) + +const fastFile = require("fastfile"); +const assert = require("assert"); +const blake2b = require("blake2b"); +const readline = require("readline"); +const crypto = require("crypto"); +const ChaCha = require("ffjavascript").ChaCha; +const fs = require("fs"); + + +const buildTaskManager = require("./taskmanager"); +const keyPair = require("./keypair"); + + +const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout +}); + +function askEntropy() { + return new Promise((resolve) => { + rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) ); + }); +} + + +async function contribute(curve, challangeFilename, responesFileName, entropy, verbose) { + const MAX_CHUNK_SIZE = 1024; + + let stats = await fs.promises.stat(challangeFilename); + + const sG1 = curve.F1.n64*8*2; + const scG1 = curve.F1.n64*8; // Compresed size + const sG2 = curve.F2.n64*8*2; + const scG2 = curve.F2.n64*8; // Compresed size + const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2); + let e = domainSize; + let power = 0; + while (e>1) { + e = e /2; + power += 1; + } + + assert(1<1) assert(false, "Version not supported"); + + const nSections = await fdFrom.readULE32(); + + // Scan sections + let sections = []; + for (let i=0; i1) assert(false, "File has more than one header"); + + fdFrom.pos = sections[1][0].p; + const n8 = await fdFrom.readULE32(); + const qBuff = await fdFrom.read(n8); + const q = Scalar.fromRprLE(qBuff); + let curve; + if (Scalar.eq(q, bn128.q)) { + curve = bn128; + } else { + assert(false, "Curve not supported"); + } + assert(curve.F1.n64*8 == n8, "Invalid size"); + + const power = await fdFrom.readULE32(); + const nContributions = await fdFrom.readULE32(); + + let challangeHash; + if (nContributions == 0) { + challangeHash = Buffer.from(blake2b(64).digest()); + } else { + assert(false, "Not implemented"); + } + + const fdTo = await fastFile.createOverride(challangeFilename); + + const toHash = blake2b(64); + fdTo.write(challangeHash); + toHash.update(challangeHash); + + // Process tauG1 + if (!sections[2]) assert(false, "File has no tauG1 section"); + if (sections[2].length>1) assert(false, "File has more than one tauG1 section"); + fdFrom.pos = sections[2][0].p; + const nTauG1 = (1 << power) * 2 -1; + for (let i=0; i< nTauG1; i++) { + const p = await readG1(); + await writeG1(p); + if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i); + } + if (fdFrom.pos != sections[2][0].p + sections[2][0].size) assert(false, "Invalid tauG1 section size"); + + // Process tauG2 + if (!sections[3]) assert(false, "File has no tauG2 section"); + if (sections[3].length>1) assert(false, "File has more than one tauG2 section"); + fdFrom.pos = sections[3][0].p; + const nTauG2 = 1 << power ; + for (let i=0; i< nTauG2; i++) { + const p = await readG2(); + await writeG2(p); + if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i); + } + if (fdFrom.pos != sections[3][0].p + sections[3][0].size) assert(false, "Invalid tauG2 section size"); + + // Process alphaTauG1 + if (!sections[4]) assert(false, "File has no alphaTauG1 section"); + if (sections[4].length>1) assert(false, "File has more than one alphaTauG1 section"); + fdFrom.pos = sections[4][0].p; + const nAlphaTauG1 = 1 << power ; + for (let i=0; i< nAlphaTauG1; i++) { + const p = await readG1(); + await writeG1(p); + if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i); + } + if (fdFrom.pos != sections[4][0].p + sections[4][0].size) assert(false, "Invalid alphaTauG1 section size"); + + // Process betaTauG1 + if (!sections[5]) assert(false, "File has no betaTauG1 section"); + if (sections[5].length>1) assert(false, "File has more than one betaTauG1 section"); + fdFrom.pos = sections[5][0].p; + const nBetaTauG1 = 1 << power ; + for (let i=0; i< nBetaTauG1; i++) { + const p = await readG1(); + await writeG1(p); + if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i); + } + if (fdFrom.pos != sections[5][0].p + sections[5][0].size) assert(false, "Invalid betaTauG1 section size"); + + // Process betaG2 + if (!sections[6]) assert(false, "File has no betaG2 section"); + if (sections[6].length>1) assert(false, "File has more than one betaG2 section"); + fdFrom.pos = sections[6][0].p; + const betaG2 = await readG2(); + await writeG2(betaG2); + if (fdFrom.pos != sections[6][0].p + sections[6][0].size) assert(false, "Invalid betaG2 section size"); + + await fdFrom.close(); + await fdTo.close(); + + const newChallangeHash = toHash.digest("hex"); + + console.log("Challange Hash: " +newChallangeHash); + + async function readG1() { + const pBuff = await fdFrom.read(curve.F1.n64*8*2); + return curve.G1.fromRprLEM( pBuff ); + } + + async function readG2() { + const pBuff = await fdFrom.read(curve.F1.n64*8*2*2); + return curve.G2.fromRprLEM( pBuff ); + } + + async function writeG1(p) { + const rpr = curve.G1.toRprBE(p); + await fdTo.write(rpr); + toHash.update(rpr); + } + + async function writeG2(p) { + const rpr = curve.G2.toRprBE(p); + await fdTo.write(rpr); + toHash.update(rpr); + } + +} + +module.exports = exportChallange; diff --git a/src/powersoftau_import.js b/src/powersoftau_import.js new file mode 100644 index 0000000..4be0e3a --- /dev/null +++ b/src/powersoftau_import.js @@ -0,0 +1,106 @@ + +const assert = require("assert"); +const fastFile = require("fastfile"); +const Scalar = require("Scalar"); +const bn128 = require("ffjavascript").bn128; +const Blake2 = require("blake2"); +const fs = require("fs"); + + +async function importResponse(oldPtauFilename, contributionFilename, newPotFilename, verbose) { + const fdOld = await fastFile.readExisting(oldPtauFilename); + + const b = await fdOld.read(4); + + if (b.toString() != "ptau") assert(false, "Old ptau file: invalid format."); + + let v = await fdOld.readULE32(); + + if (v>1) assert(false, "Old ptau file: Version not supported"); + + const nSections = await fdOld.readULE32(); + + // Scan sections + let sections = []; + for (let i=0; i1) assert(false, "Old ptau file: File has more than one header"); + + fdOld.pos = sections[1][0].p; + const n8 = await fdOld.readULE32(); + const qBuff = await fdOld.read(n8); + const q = Scalar.fromRprLE(qBuff); + let curve; + if (Scalar.eq(q, bn128.q)) { + curve = bn128; + } else { + assert(false, "Old ptau file: Curve not supported"); + } + assert(curve.F1.n64*8 == n8, "Old ptau file: Invalid size"); + + const power = await fdOld.readULE32(); + const nContributions = await fdOld.readULE32(); + const sG1 = curve.F1.n64*8*2; + const scG1 = curve.F1.n64*8; // Compresed size + const sG2 = curve.F2.n64*8*2; + const scG2 = curve.F2.n64*8; // Compresed size + + + let stats = await fs.promises.stat(contributionFilename); + assert.equal(stats.size, + 64 + // Old Hash + ((1<maxVersion) assert(false, "Version not supported"); + + const nSections = await fd.readULE32(); + + // Scan sections + let sections = []; + for (let i=0; i1) assert(false, "Version not supported"); - - const nSections = await fdFrom.readULE32(); - - // Scan sections - let sections = []; - for (let i=0; i1) assert(false, "File has more than one header"); - - fdFrom.pos = sections[1][0].p; - const n8 = await fdFrom.readULE32(); - const qBuff = await fdFrom.read(n8); - const q = Scalar.fromRprLE(qBuff); - let curve; - if (Scalar.eq(q, bn128.q)) { - curve = bn128; - } else { - assert(false, "Curve not supported"); - } - assert(curve.F1.n64*8 == n8, "Invalid size"); - - const power = await fdFrom.readULE32(); - const nContributions = await fdFrom.readULE32(); - - let challangeHash; - if (nContributions == 0) { - challangeHash = Buffer.from(blake2b(64).digest()); - } else { - assert(false, "Not implemented"); - } - - const fdTo = await fastFile.createOverride(challangeFilename); - - const toHash = blake2b(64); - fdTo.write(challangeHash); - toHash.update(challangeHash); - - // Process tauG1 - if (!sections[2]) assert(false, "File has no tauG1 section"); - if (sections[2].length>1) assert(false, "File has more than one tauG1 section"); - fdFrom.pos = sections[2][0].p; - const nTauG1 = (1 << power) * 2 -1; - for (let i=0; i< nTauG1; i++) { - const p = await readG1(); - await writeG1(p); - if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG1: " + i); - } - if (fdFrom.pos != sections[2][0].p + sections[2][0].size) assert(false, "Invalid tauG1 section size"); - - // Process tauG2 - if (!sections[3]) assert(false, "File has no tauG2 section"); - if (sections[3].length>1) assert(false, "File has more than one tauG2 section"); - fdFrom.pos = sections[3][0].p; - const nTauG2 = 1 << power ; - for (let i=0; i< nTauG2; i++) { - const p = await readG2(); - await writeG2(p); - if ((verbose)&&((i%100000) == 0)&&i) console.log("tauG2: " + i); - } - if (fdFrom.pos != sections[3][0].p + sections[3][0].size) assert(false, "Invalid tauG2 section size"); - - // Process alphaTauG1 - if (!sections[4]) assert(false, "File has no alphaTauG1 section"); - if (sections[4].length>1) assert(false, "File has more than one alphaTauG1 section"); - fdFrom.pos = sections[4][0].p; - const nAlphaTauG1 = 1 << power ; - for (let i=0; i< nAlphaTauG1; i++) { - const p = await readG1(); - await writeG1(p); - if ((verbose)&&((i%100000) == 0)&&i) console.log("alphaTauG1: " + i); - } - if (fdFrom.pos != sections[4][0].p + sections[4][0].size) assert(false, "Invalid alphaTauG1 section size"); - - // Process betaTauG1 - if (!sections[5]) assert(false, "File has no betaTauG1 section"); - if (sections[5].length>1) assert(false, "File has more than one betaTauG1 section"); - fdFrom.pos = sections[5][0].p; - const nBetaTauG1 = 1 << power ; - for (let i=0; i< nBetaTauG1; i++) { - const p = await readG1(); - await writeG1(p); - if ((verbose)&&((i%100000) == 0)&&i) console.log("betaTauG1: " + i); - } - if (fdFrom.pos != sections[5][0].p + sections[5][0].size) assert(false, "Invalid betaTauG1 section size"); - - // Process betaG2 - if (!sections[6]) assert(false, "File has no betaG2 section"); - if (sections[6].length>1) assert(false, "File has more than one betaG2 section"); - fdFrom.pos = sections[6][0].p; - const betaG2 = await readG2(); - await writeG2(betaG2); - if (fdFrom.pos != sections[6][0].p + sections[6][0].size) assert(false, "Invalid betaG2 section size"); - - await fdFrom.close(); - await fdTo.close(); - - const newChallangeHash = toHash.digest("hex"); - - console.log("Challange Hash: " +newChallangeHash); - - async function readG1() { - const pBuff = await fdFrom.read(curve.F1.n64*8*2); - return curve.G1.fromRprLEM( pBuff ); - } - - async function readG2() { - const pBuff = await fdFrom.read(curve.F1.n64*8*2*2); - return curve.G2.fromRprLEM( pBuff ); - } - - async function writeG1(p) { - const rpr = curve.G1.toRprBE(p); - await fdTo.write(rpr); - toHash.update(rpr); - } - - async function writeG2(p) { - const rpr = curve.G2.toRprBE(p); - await fdTo.write(rpr); - toHash.update(rpr); - } - -} - -const rl = readline.createInterface({ - input: process.stdin, - output: process.stdout -}); - -function askEntropy() { - return new Promise((resolve, reject) => { - rl.question("Enter a random text. (Entropy): ", (input) => resolve(input) ); - }); -} - - -async function contribute(curve, challangeFilename, responesFileName, entropy, verbose) { - const MAX_CHUNK_SIZE = 1024; - - let stats = await fs.promises.stat(challangeFilename); - - const sG1 = curve.F1.n64*8*2; - const scG1 = curve.F1.n64*8; // Compresed size - const sG2 = curve.F2.n64*8*2; - const scG2 = curve.F2.n64*8; // Compresed size - const domainSize = (stats.size + sG1 - 64 - sG2) / (4*sG1 + sG2); - let e = domainSize; - let power = 0; - while (e>1) { - e = e /2; - power += 1; - } - - assert(1<