Fix bls12-381

This commit is contained in:
Jordi Baylina 2020-09-25 07:38:22 +02:00
parent 4a00e93f41
commit 661f4a4b6e
No known key found for this signature in database
GPG Key ID: 7480C80C1BE43112
9 changed files with 1710 additions and 4205 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

64
package-lock.json generated
View File

@ -30,6 +30,11 @@
"js-tokens": "^4.0.0"
}
},
"@iden3/bigarray": {
"version": "0.0.2",
"resolved": "https://registry.npmjs.org/@iden3/bigarray/-/bigarray-0.0.2.tgz",
"integrity": "sha512-Xzdyxqm1bOFF6pdIsiHLLl3HkSLjbhqJHVyqaTxXt3RqXBEnmsUmEW47H7VOi/ak7TdkRpNkxjyK5Zbkm+y52g=="
},
"@types/color-name": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz",
@ -667,29 +672,18 @@
"dev": true
},
"fastfile": {
"version": "0.0.15",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.15.tgz",
"integrity": "sha512-LwvI75nCVs83EsuKpT+cL7M7T3VNdY4O50nLi4jnVZcxvqbWWqX43dM2GDrNHeD5uaX+ug0Di5RcvnTTI+hEAQ=="
"version": "0.0.18",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.18.tgz",
"integrity": "sha512-q03PTKc+wptis4WmuFOwPNQx2p5myFUrl/dMgRlW9mymc1Egyc14JPHgiGnWK+sJ0+dBl2Vwtfh5GfSQltYOpw=="
},
"ffjavascript": {
"version": "0.2.21",
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.21.tgz",
"integrity": "sha512-7HyYo3j7lohPolvMMaRihMQUR5MLBFsqFv1Gy3182mPQqYd0TEq8wdd6sWV7Y+mv25yjU9MDJgOR2T2nkCRd9g==",
"version": "0.2.22",
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.22.tgz",
"integrity": "sha512-EsVqap2Txm17bKW0z/jXCX3M7rQ++nQUAJY8alWDpyhjRj90xjl6GLeVSKZQ8rOFDQ/SFFXcEB8w9X8Boxid+w==",
"requires": {
"big-integer": "^1.6.48",
"wasmcurves": "0.0.11",
"wasmcurves": "0.0.12",
"worker-threads": "^1.0.0"
},
"dependencies": {
"wasmcurves": {
"version": "0.0.11",
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.11.tgz",
"integrity": "sha512-iRuX0slhizCSYGScgQH0P7j2GS5qgfnpYVPtKoj+wrlLGPZQZiviGj8AwJdeg7fI68yVw4Wquvyp0ZCPfcb0wQ==",
"requires": {
"big-integer": "^1.6.42",
"blakejs": "^1.1.0"
}
}
}
},
"figures": {
@ -1525,29 +1519,13 @@
"dev": true
},
"r1csfile": {
"version": "0.0.12",
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.12.tgz",
"integrity": "sha512-PcxL8dlJJ3i6g2wwcsXUnETIUQ0pg8RstZlTn00iqC79EYYBc/jsYC9Y8tt1+NTZ8pW5A+y0SefNsqVR16I3JQ==",
"version": "0.0.16",
"resolved": "https://registry.npmjs.org/r1csfile/-/r1csfile-0.0.16.tgz",
"integrity": "sha512-A2jRVWzGgmXeG2lVAc0H4suJmzt50it5UvBnycJgBCpMXM3tH/M6RguP7nvs6suY/yYnkN6jX6iTScSiDUF3FA==",
"requires": {
"fastfile": "0.0.6",
"ffjavascript": "0.2.4"
},
"dependencies": {
"fastfile": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/fastfile/-/fastfile-0.0.6.tgz",
"integrity": "sha512-6cOUdePcue0DAssqGKPhmcSgdLTaB2IzxNgg2WAADOuta00Os88+ShpDItSkQ/eLCiAeYjsPasdBLYozVz+4Ug=="
},
"ffjavascript": {
"version": "0.2.4",
"resolved": "https://registry.npmjs.org/ffjavascript/-/ffjavascript-0.2.4.tgz",
"integrity": "sha512-XFeWcjUDFPavN+DDOxhE8p5MOhZQJc9oO1Sj4ml1pyjqNhS1ujEamcjFyK0cctdnat61i7lvpTYzdtS3RYDC8w==",
"requires": {
"big-integer": "^1.6.48",
"wasmcurves": "0.0.4",
"worker-threads": "^1.0.0"
}
}
"@iden3/bigarray": "0.0.2",
"fastfile": "0.0.18",
"ffjavascript": "0.2.22"
}
},
"randombytes": {
@ -2012,9 +1990,9 @@
"dev": true
},
"wasmcurves": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.4.tgz",
"integrity": "sha512-c/Tob+F/7jJhep1b2qtj54r4nkGaRifNbQ1OJx8cBBFH1RlHbWIbISHWONClOxiVwy/JZOpbN4SgvSX/4lF80A==",
"version": "0.0.12",
"resolved": "https://registry.npmjs.org/wasmcurves/-/wasmcurves-0.0.12.tgz",
"integrity": "sha512-1Jl9mkatyHSNj80ILjf85SZUNuZQBCkTjJlhzqHnZQXUmIimCIWkugaVaYNjozLs1Gun4h/keZe1MBeBN0sRpg==",
"requires": {
"big-integer": "^1.6.42",
"blakejs": "^1.1.0"

View File

@ -40,10 +40,10 @@
"dependencies": {
"blake2b-wasm": "https://github.com/jbaylina/blake2b-wasm.git",
"circom_runtime": "0.1.5",
"fastfile": "0.0.15",
"ffjavascript": "0.2.21",
"fastfile": "0.0.18",
"ffjavascript": "0.2.22",
"logplease": "^1.2.15",
"r1csfile": "0.0.12"
"r1csfile": "0.0.16"
},
"devDependencies": {
"chai": "^4.2.0",

View File

@ -2,9 +2,9 @@
import { Scalar } from "ffjavascript";
import * as fastFile from "fastfile";
export async function readBinFile(fileName, type, maxVersion) {
export async function readBinFile(fileName, type, maxVersion, cacheSize, pageSize) {
const fd = await fastFile.readExisting(fileName);
const fd = await fastFile.readExisting(fileName, cacheSize, pageSize);
const b = await fd.read(4);
let readedType = "";
@ -34,9 +34,9 @@ export async function readBinFile(fileName, type, maxVersion) {
return {fd, sections};
}
export async function createBinFile(fileName, type, version, nSections) {
export async function createBinFile(fileName, type, version, nSections, cacheSize, pageSize) {
const fd = await fastFile.createOverride(fileName, null);
const fd = await fastFile.createOverride(fileName, cacheSize, pageSize);
const buff = new Uint8Array(4);
for (let i=0; i<4; i++) buff[i] = type.charCodeAt(i);

View File

@ -42,17 +42,17 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
const inc = power == Fr.s ? curve.Fr.shift : curve.Fr.w[power+1];
const buffA = await Fr.ifft(buffA_T);
const buffA = await Fr.ifft(buffA_T, "", "", logger, "FFT_A");
const buffAodd = await Fr.batchApplyKey(buffA, Fr.e(1), inc);
const buffAodd_T = await Fr.fft(buffAodd);
const buffAodd_T = await Fr.fft(buffAodd, "", "", logger, "IFFT_A");
const buffB = await Fr.ifft(buffB_T);
const buffB = await Fr.ifft(buffB_T, "", "", logger, "FFT_B");
const buffBodd = await Fr.batchApplyKey(buffB, Fr.e(1), inc);
const buffBodd_T = await Fr.fft(buffBodd);
const buffBodd_T = await Fr.fft(buffBodd, "", "", logger, "IFFT_B");
const buffC = await Fr.ifft(buffC_T);
const buffC = await Fr.ifft(buffC_T, "", "", logger, "FFT_C");
const buffCodd = await Fr.batchApplyKey(buffC, Fr.e(1), inc);
const buffCodd_T = await Fr.fft(buffCodd);
const buffCodd_T = await Fr.fft(buffCodd, "", "", logger, "IFFT_C");
const buffPodd_T = await joinABC(curve, zkey, buffAodd_T, buffBodd_T, buffCodd_T);

View File

@ -272,48 +272,51 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
}
async function composeAndWritePoints(idSection, groupName, arr, sectionName) {
const CHUNK_SIZE= 1<<16;
const CHUNK_SIZE= 1<<13;
const G = curve[groupName];
hashU32(arr.length);
await binFileUtils.startWriteSection(fdZKey, idSection);
for (let i=0; i<arr.length; i+= CHUNK_SIZE) {
if (logger) logger.debug(`Writing points ${sectionName}: ${i}/${arr.length}`);
const n = Math.min(arr.length -i, CHUNK_SIZE);
const subArr = arr.slice(i, i + n);
await composeAndWritePointsChunk(groupName, subArr);
let opPromises = [];
let i=0;
while (i<arr.length) {
let t=0;
while ((i<arr.length)&&(t<curve.tm.concurrency)) {
if (logger) logger.debug(`Writing points start ${sectionName}: ${i}/${arr.length}`);
let n = 1;
let nP = (arr[i] ? arr[i].length : 0);
while ((i + n < arr.length) && (nP + (arr[i+n] ? arr[i+n].length : 0) < CHUNK_SIZE)) {
nP += (arr[i+n] ? arr[i+n].length : 0);
n ++;
}
const subArr = arr.slice(i, i + n);
const _i = i;
opPromises.push(composeAndWritePointsThread(groupName, subArr, logger, sectionName).then( (r) => {
if (logger) logger.debug(`Writing points end ${sectionName}: ${_i}/${arr.length}`);
return r;
}));
i += n;
t++;
}
const result = await Promise.all(opPromises);
for (let k=0; k<result.length; k++) {
await fdZKey.write(result[k][0]);
const buff = await G.batchLEMtoU(result[k][0]);
csHasher.update(buff);
}
opPromises = [];
}
await binFileUtils.endWriteSection(fdZKey);
}
async function composeAndWritePointsChunk(groupName, arr) {
const concurrency= curve.tm.concurrency;
const nElementsPerThread = Math.floor(arr.length / concurrency);
const opPromises = [];
const G = curve[groupName];
for (let i=0; i<concurrency; i++) {
let n;
if (i< concurrency-1) {
n = nElementsPerThread;
} else {
n = arr.length - i*nElementsPerThread;
}
if (n==0) continue;
const subArr = arr.slice(i*nElementsPerThread, i*nElementsPerThread + n);
opPromises.push(composeAndWritePointsThread(groupName, subArr));
}
const result = await Promise.all(opPromises);
for (let i=0; i<result.length; i++) {
await fdZKey.write(result[i][0]);
const buff = await G.batchLEMtoU(result[i][0]);
csHasher.update(buff);
}
}
async function composeAndWritePointsThread(groupName, arr) {
async function composeAndWritePointsThread(groupName, arr, logger, sectionName) {
const G = curve[groupName];
const sGin = G.F.n8*2;
const sGmid = G.F.n8*3;
@ -334,65 +337,96 @@ export default async function newZKey(r1csName, ptauName, zkeyName, logger) {
}
let acc =0;
for (let i=0; i<arr.length; i++) acc += arr[i] ? arr[i].length : 0;
const bBases = new Uint8Array(acc*sGin);
const bScalars = new Uint8Array(acc*curve.Fr.n8);
let bBases, bScalars;
if (acc> 2<<14) {
bBases = new BigBuffer(acc*sGin);
bScalars = new BigBuffer(acc*curve.Fr.n8);
} else {
bBases = new Uint8Array(acc*sGin);
bScalars = new Uint8Array(acc*curve.Fr.n8);
}
let pB =0;
let pS =0;
let readOps = [];
let scalars = [];
let offset = 0;
for (let i=0; i<arr.length; i++) {
if (!arr[i]) continue;
for (let j=0; j<arr[i].length; j++) {
const bBase = await fdPTau.read(sGin, arr[i][j][0]);
bBases.set(bBase, pB);
pB += sGin;
bScalars.set(arr[i][j][1], pS);
pS += curve.Fr.n8;
if (readOps.length > 2<<14) {
logger.debug(`${sectionName}: Long MExp Load ${j}/${arr[i].length}`);
const points = await Promise.all(readOps);
for (let k=0; k<points.length; k++) {
bBases.set(points[k], (offset+k)*sGin);
bScalars.set(scalars[k], (offset+k)*curve.Fr.n8);
}
offset += readOps.length;
readOps = [];
scalars = [];
}
scalars.push(arr[i][j][1]);
readOps.push(fdPTau.read(sGin, arr[i][j][0]));
}
}
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: bBases});
task.push({cmd: "ALLOCSET", var: 1, buff: bScalars});
task.push({cmd: "ALLOC", var: 2, len: arr.length*sGmid});
pB = 0;
pS = 0;
let pD =0;
for (let i=0; i<arr.length; i++) {
if (!arr[i]) {
task.push({cmd: "CALL", fnName: fnZero, params: [
{var: 2, offset: pD}
]});
const points = await Promise.all(readOps);
for (let i=0; i<points.length; i++) {
bBases.set(points[i], (offset+i)*sGin);
bScalars.set(scalars[i], (offset+i)*curve.Fr.n8);
}
if (arr.length>1) {
const task = [];
task.push({cmd: "ALLOCSET", var: 0, buff: bBases});
task.push({cmd: "ALLOCSET", var: 1, buff: bScalars});
task.push({cmd: "ALLOC", var: 2, len: arr.length*sGmid});
pB = 0;
pS = 0;
let pD =0;
for (let i=0; i<arr.length; i++) {
if (!arr[i]) {
task.push({cmd: "CALL", fnName: fnZero, params: [
{var: 2, offset: pD}
]});
pD += sGmid;
continue;
}
if (arr[i].length == 1) {
task.push({cmd: "CALL", fnName: fnExp, params: [
{var: 0, offset: pB},
{var: 1, offset: pS},
{val: curve.Fr.n8},
{var: 2, offset: pD}
]});
} else {
task.push({cmd: "CALL", fnName: fnMultiExp, params: [
{var: 0, offset: pB},
{var: 1, offset: pS},
{val: curve.Fr.n8},
{val: arr[i].length},
{var: 2, offset: pD}
]});
}
pB += sGin*arr[i].length;
pS += curve.Fr.n8*arr[i].length;
pD += sGmid;
continue;
}
if (arr[i].length == 1) {
task.push({cmd: "CALL", fnName: fnExp, params: [
{var: 0, offset: pB},
{var: 1, offset: pS},
{val: curve.Fr.n8},
{var: 2, offset: pD}
]});
} else {
task.push({cmd: "CALL", fnName: fnMultiExp, params: [
{var: 0, offset: pB},
{var: 1, offset: pS},
{val: curve.Fr.n8},
{val: arr[i].length},
{var: 2, offset: pD}
]});
}
pB += sGin*arr[i].length;
pS += curve.Fr.n8*arr[i].length;
pD += sGmid;
task.push({cmd: "CALL", fnName: fnBatchToAffine, params: [
{var: 2},
{val: arr.length},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: arr.length*sGout});
const res = await curve.tm.queueAction(task);
return res;
} else {
let res = await G.multiExpAffine(bBases, bScalars, logger, sectionName);
res = [ G.toAffine(res) ];
return res;
}
task.push({cmd: "CALL", fnName: fnBatchToAffine, params: [
{var: 2},
{val: arr.length},
{var: 2},
]});
task.push({cmd: "GET", out: 0, var: 2, len: arr.length*sGout});
const res = await curve.tm.queueAction(task);
return res;
}