split zkne infunctions and log Hs
This commit is contained in:
parent
4e03641027
commit
ec9fbfe713
@ -4016,6 +4016,56 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const C = new BigArray$1(r1cs.nVars- nPublic -1);
|
||||
const IC = new Array(nPublic+1);
|
||||
|
||||
await processConstraints();
|
||||
|
||||
await composeAndWritePoints(3, "G1", IC, "IC");
|
||||
|
||||
await writeHs();
|
||||
|
||||
await hashHPoints();
|
||||
|
||||
await composeAndWritePoints(8, "G1", C, "C");
|
||||
await composeAndWritePoints(5, "G1", A, "A");
|
||||
await composeAndWritePoints(6, "G1", B1, "B1");
|
||||
await composeAndWritePoints(7, "G2", B2, "B2");
|
||||
|
||||
const csHash = csHasher.digest();
|
||||
// Contributions section
|
||||
await startWriteSection(fdZKey, 10);
|
||||
await fdZKey.write(csHash);
|
||||
await fdZKey.writeULE32(0);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
|
||||
|
||||
await fdZKey.close();
|
||||
await fdPTau.close();
|
||||
await fdR1cs.close();
|
||||
|
||||
return csHash;
|
||||
|
||||
async function writeHs() {
|
||||
await startWriteSection(fdZKey, 9);
|
||||
const buffOut = new ffjavascript.BigBuffer(domainSize*sG1);
|
||||
if (cirPower < curve.Fr.s) {
|
||||
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
|
||||
for (let i=0; i< domainSize; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`);
|
||||
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
|
||||
buffOut.set(buff, i*sG1);
|
||||
}
|
||||
} else if (cirPower == curve.Fr.s) {
|
||||
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
|
||||
await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
|
||||
} else {
|
||||
if (logger) logger.error("Circuit too big");
|
||||
throw new Error("Circuit too big for this curve");
|
||||
}
|
||||
await fdZKey.write(buffOut);
|
||||
await endWriteSection(fdZKey);
|
||||
}
|
||||
|
||||
async function processConstraints() {
|
||||
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
|
||||
const buffCoeffV = new DataView(buffCoeff.buffer);
|
||||
|
||||
@ -4128,50 +4178,6 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
await composeAndWritePoints(3, "G1", IC, "IC");
|
||||
|
||||
// Write Hs
|
||||
await startWriteSection(fdZKey, 9);
|
||||
|
||||
|
||||
const buffOut = new ffjavascript.BigBuffer(domainSize*sG1);
|
||||
if (cirPower < curve.Fr.s) {
|
||||
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
|
||||
for (let i=0; i< domainSize; i++) {
|
||||
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
|
||||
buffOut.set(buff, i*sG1);
|
||||
}
|
||||
} else if (cirPower == curve.Fr.s) {
|
||||
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
|
||||
await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
|
||||
} else {
|
||||
if (logger) logger.error("Circuit too big");
|
||||
throw new Error("Circuit too big for this curve");
|
||||
}
|
||||
await fdZKey.write(buffOut);
|
||||
await endWriteSection(fdZKey);
|
||||
await hashHPoints();
|
||||
|
||||
await composeAndWritePoints(8, "G1", C, "C");
|
||||
await composeAndWritePoints(5, "G1", A, "A");
|
||||
await composeAndWritePoints(6, "G1", B1, "B1");
|
||||
await composeAndWritePoints(7, "G2", B2, "B2");
|
||||
|
||||
const csHash = csHasher.digest();
|
||||
// Contributions section
|
||||
await startWriteSection(fdZKey, 10);
|
||||
await fdZKey.write(csHash);
|
||||
await fdZKey.writeULE32(0);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
|
||||
|
||||
|
||||
await fdZKey.close();
|
||||
await fdPTau.close();
|
||||
await fdR1cs.close();
|
||||
|
||||
return csHash;
|
||||
|
||||
async function writeCoef(a, c, s, coef) {
|
||||
const n = curve.Fr.fromRprLE(coef, 0);
|
||||
@ -4183,6 +4189,8 @@ async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await fdZKey.write(buffCoeff);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
|
||||
const CHUNK_SIZE= 1<<13;
|
||||
const G = curve[groupName];
|
||||
|
3984
build/main.cjs
3984
build/main.cjs
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
2
build/snarkjs.min.js
vendored
2
build/snarkjs.min.js
vendored
File diff suppressed because one or more lines are too long
@ -8,8 +8,6 @@ import {
|
||||
writeBigInt,
|
||||
startWriteSection,
|
||||
endWriteSection,
|
||||
startReadUniqueSection,
|
||||
endReadSection
|
||||
} from "@iden3/binfileutils";
|
||||
import { log2, formatHash } from "./misc.js";
|
||||
import { Scalar, BigBuffer } from "ffjavascript";
|
||||
@ -120,6 +118,56 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
const C = new BigArray(r1cs.nVars- nPublic -1);
|
||||
const IC = new Array(nPublic+1);
|
||||
|
||||
await processConstraints();
|
||||
|
||||
await composeAndWritePoints(3, "G1", IC, "IC");
|
||||
|
||||
await writeHs();
|
||||
|
||||
await hashHPoints();
|
||||
|
||||
await composeAndWritePoints(8, "G1", C, "C");
|
||||
await composeAndWritePoints(5, "G1", A, "A");
|
||||
await composeAndWritePoints(6, "G1", B1, "B1");
|
||||
await composeAndWritePoints(7, "G2", B2, "B2");
|
||||
|
||||
const csHash = csHasher.digest();
|
||||
// Contributions section
|
||||
await startWriteSection(fdZKey, 10);
|
||||
await fdZKey.write(csHash);
|
||||
await fdZKey.writeULE32(0);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
|
||||
|
||||
await fdZKey.close();
|
||||
await fdPTau.close();
|
||||
await fdR1cs.close();
|
||||
|
||||
return csHash;
|
||||
|
||||
async function writeHs() {
|
||||
await startWriteSection(fdZKey, 9);
|
||||
const buffOut = new BigBuffer(domainSize*sG1);
|
||||
if (cirPower < curve.Fr.s) {
|
||||
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
|
||||
for (let i=0; i< domainSize; i++) {
|
||||
if ((logger)&&(i%10000 == 0)) logger.debug(`spliting buffer: ${i}/${domainSize}`);
|
||||
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
|
||||
buffOut.set(buff, i*sG1);
|
||||
}
|
||||
} else if (cirPower == curve.Fr.s) {
|
||||
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
|
||||
await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
|
||||
} else {
|
||||
if (logger) logger.error("Circuit too big");
|
||||
throw new Error("Circuit too big for this curve");
|
||||
}
|
||||
await fdZKey.write(buffOut);
|
||||
await endWriteSection(fdZKey);
|
||||
}
|
||||
|
||||
async function processConstraints() {
|
||||
const buffCoeff = new Uint8Array(12 + curve.Fr.n8);
|
||||
const buffCoeffV = new DataView(buffCoeff.buffer);
|
||||
|
||||
@ -232,50 +280,6 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
await composeAndWritePoints(3, "G1", IC, "IC");
|
||||
|
||||
// Write Hs
|
||||
await startWriteSection(fdZKey, 9);
|
||||
|
||||
|
||||
const buffOut = new BigBuffer(domainSize*sG1);
|
||||
if (cirPower < curve.Fr.s) {
|
||||
let sTauG1 = await readSection(fdPTau, sectionsPTau, 12, (domainSize*2-1)*sG1, domainSize*2*sG1);
|
||||
for (let i=0; i< domainSize; i++) {
|
||||
const buff = sTauG1.slice( (i*2+1)*sG1, (i*2+1)*sG1 + sG1 );
|
||||
buffOut.set(buff, i*sG1);
|
||||
}
|
||||
} else if (cirPower == curve.Fr.s) {
|
||||
const o = sectionsPTau[12][0].p + ((2 ** (cirPower+1)) -1)*sG1;
|
||||
await fdPTau.readToBuffer(buffOut, 0, domainSize*sG1, o + domainSize*sG1);
|
||||
} else {
|
||||
if (logger) logger.error("Circuit too big");
|
||||
throw new Error("Circuit too big for this curve");
|
||||
}
|
||||
await fdZKey.write(buffOut);
|
||||
await endWriteSection(fdZKey);
|
||||
await hashHPoints();
|
||||
|
||||
await composeAndWritePoints(8, "G1", C, "C");
|
||||
await composeAndWritePoints(5, "G1", A, "A");
|
||||
await composeAndWritePoints(6, "G1", B1, "B1");
|
||||
await composeAndWritePoints(7, "G2", B2, "B2");
|
||||
|
||||
const csHash = csHasher.digest();
|
||||
// Contributions section
|
||||
await startWriteSection(fdZKey, 10);
|
||||
await fdZKey.write(csHash);
|
||||
await fdZKey.writeULE32(0);
|
||||
await endWriteSection(fdZKey);
|
||||
|
||||
if (logger) logger.info(formatHash(csHash, "Circuit hash: "));
|
||||
|
||||
|
||||
await fdZKey.close();
|
||||
await fdPTau.close();
|
||||
await fdR1cs.close();
|
||||
|
||||
return csHash;
|
||||
|
||||
async function writeCoef(a, c, s, coef) {
|
||||
const n = curve.Fr.fromRprLE(coef, 0);
|
||||
@ -287,6 +291,8 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
|
||||
await fdZKey.write(buffCoeff);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
|
||||
const CHUNK_SIZE= 1<<13;
|
||||
const G = curve[groupName];
|
||||
|
Loading…
Reference in New Issue
Block a user