Skip to content

Commit

Permalink
Chunked zkey
Browse files Browse the repository at this point in the history
  • Loading branch information
nalinbhardwaj committed Jan 22, 2022
1 parent b78994d commit d1c10a6
Show file tree
Hide file tree
Showing 13 changed files with 812 additions and 741 deletions.
506 changes: 273 additions & 233 deletions build/cli.cjs

Large diffs are not rendered by default.

498 changes: 266 additions & 232 deletions build/main.cjs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"blake2b-wasm": "^2.4.0",
"circom_runtime": "0.1.17",
"ejs": "^3.1.6",
"fastfile": "0.0.19",
"fastfile": "^0.0.19",
"ffjavascript": "0.2.48",
"js-sha3": "^0.8.0",
"logplease": "^1.2.15",
Expand Down
105 changes: 105 additions & 0 deletions src/chunk_utils.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import fs from "fs";
import * as binFileUtils from "@iden3/binfileutils";
import * as fastFile from "fastfile";
import { BigBuffer } from "ffjavascript";

// Each section is its own file.

function sectionName(sectionId) {
return String.fromCharCode('a'.charCodeAt(0) + sectionId);
}

export async function startWriteSectionFile(zkeyName, sectionId) {
const fd = await binFileUtils.createBinFile(zkeyName + sectionName(sectionId), "zky" + sectionName(sectionId), 1, 1, 1<<22, 1<<24);

fd.writingSection = {
pSectionSize: fd.pos
};
await fd.writeULE64(0); // Temporally set to 0 length
return fd;
}

export async function endWriteSectionFile(fd) {
if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");

const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
const oldPos = fd.pos;
fd.pos = fd.writingSection.pSectionSize;
await fd.writeULE64(sectionSize);
fd.pos = oldPos;
await fd.close();
delete fd.writingSection;
}

export async function startReadSectionFile(zkeyName, sectionId, maxVersion) {
const fileName = zkeyName + sectionName(sectionId);
const type = "zky" + sectionName(sectionId);
const fd = await fastFile.readExisting(fileName);

const b = await fd.read(4);
let readedType = "";
for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);

if (readedType != type) throw new Error(fileName + ": Invalid File format");

let v = await fd.readULE32();

if (v>maxVersion) throw new Error("Version not supported");

let nSections = await fd.readULE32();
if (nSections != 1) throw new Error("More than one section found");

let size = await fd.readULE64();
fd.readingSection = {
size: size,
p: fd.pos,
}
return fd;
}

export async function endReadSectionFile(fd, noCheck) {
if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
if (!noCheck) {
if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
}
await fd.close();
delete fd.readingSection;
}

export async function readSectionFile(zkeyName, sectionId, maxVersion) {
const fd = await startReadSectionFile(zkeyName, sectionId, maxVersion);
let buff;
if (fd.readingSection.size < (1 << 30) ) {
buff = new Uint8Array(fd.readingSection.size);
} else {
buff = new BigBuffer(fd.readingSection.size);
}

await fd.readToBuffer(buff, 0, fd.readingSection.size, fd.pos);
await endReadSectionFile(fd);
return buff;
}

export async function copySectionFile(zkeyNameOld, zkeyNameNew, sectionId) {
await fs.copyFile(zkeyNameOld + sectionName(sectionId), zkeyNameNew + sectionName(sectionId), (err) => {
if (err) throw err;
});
}

export async function sectionFileIsEqual(zkeyNameOld, zkeyNameNew, sectionId, maxVersion) {
const fdOld = await startReadSectionFile(zkeyNameOld, sectionId, maxVersion);
const fdNew = await startReadSectionFile(zkeyNameNew, sectionId, maxVersion);

const MAX_BUFF_SIZE = fdOld.pageSize * 16;
if (fdOld.readingSection.size != fdNew.readingSection.size) return false;
const totalBytes=fdOld.readingSection.size;
for (let i=0; i<totalBytes; i+= MAX_BUFF_SIZE) {
const n = Math.min(totalBytes-i, MAX_BUFF_SIZE);
const buff1 = await fdOld.read(n);
const buff2 = await fdNew.read(n);
for (let j=0; j<n; j++) if (buff1[j] != buff2[j]) return false;
}
await endReadSectionFile(fdOld);
await endReadSectionFile(fdNew);
return true;
}
18 changes: 9 additions & 9 deletions src/groth16_prove.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/

import * as binFileUtils from "@iden3/binfileutils";
import * as chunkFileUtils from "./chunk_utils.js";
import * as zkeyUtils from "./zkey_utils.js";
import * as wtnsUtils from "./wtns_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
Expand All @@ -30,9 +31,9 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger

const wtns = await wtnsUtils.readHeader(fdWtns, sectionsWtns);

const {fd: fdZKey, sections: sectionsZKey} = await binFileUtils.readBinFile(zkeyFileName, "zkey", 2, 1<<25, 1<<23);
const maxZKeyVersion = 2;

const zkey = await zkeyUtils.readHeader(fdZKey, sectionsZKey);
const zkey = await zkeyUtils.readHeader(zkeyFileName, maxZKeyVersion);

if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
Expand All @@ -56,7 +57,7 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
if (logger) logger.debug("Reading Wtns");
const buffWitness = await binFileUtils.readSection(fdWtns, sectionsWtns, 2);
if (logger) logger.debug("Reading Coeffs");
const buffCoeffs = await binFileUtils.readSection(fdZKey, sectionsZKey, 4);
const buffCoeffs = await chunkFileUtils.readSectionFile(zkeyFileName, 4, maxZKeyVersion);

if (logger) logger.debug("Building ABC");
const [buffA_T, buffB_T, buffC_T] = await buildABC1(curve, zkey, buffWitness, buffCoeffs, logger);
Expand All @@ -81,23 +82,23 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
let proof = {};

if (logger) logger.debug("Reading A Points");
const buffBasesA = await binFileUtils.readSection(fdZKey, sectionsZKey, 5);
const buffBasesA = await chunkFileUtils.readSectionFile(zkeyFileName, 5, maxZKeyVersion);
proof.pi_a = await curve.G1.multiExpAffine(buffBasesA, buffWitness, logger, "multiexp A");

if (logger) logger.debug("Reading B1 Points");
const buffBasesB1 = await binFileUtils.readSection(fdZKey, sectionsZKey, 6);
const buffBasesB1 = await chunkFileUtils.readSectionFile(zkeyFileName, 6, maxZKeyVersion);
let pib1 = await curve.G1.multiExpAffine(buffBasesB1, buffWitness, logger, "multiexp B1");

if (logger) logger.debug("Reading B2 Points");
const buffBasesB2 = await binFileUtils.readSection(fdZKey, sectionsZKey, 7);
const buffBasesB2 = await chunkFileUtils.readSectionFile(zkeyFileName, 7, maxZKeyVersion);
proof.pi_b = await curve.G2.multiExpAffine(buffBasesB2, buffWitness, logger, "multiexp B2");

if (logger) logger.debug("Reading C Points");
const buffBasesC = await binFileUtils.readSection(fdZKey, sectionsZKey, 8);
const buffBasesC = await chunkFileUtils.readSectionFile(zkeyFileName, 8, maxZKeyVersion);
proof.pi_c = await curve.G1.multiExpAffine(buffBasesC, buffWitness.slice((zkey.nPublic+1)*curve.Fr.n8), logger, "multiexp C");

if (logger) logger.debug("Reading H Points");
const buffBasesH = await binFileUtils.readSection(fdZKey, sectionsZKey, 9);
const buffBasesH = await chunkFileUtils.readSectionFile(zkeyFileName, 9, maxZKeyVersion);
const resH = await curve.G1.multiExpAffine(buffBasesH, buffPodd_T, logger, "multiexp H");

const r = curve.Fr.random();
Expand Down Expand Up @@ -134,7 +135,6 @@ export default async function groth16Prove(zkeyFileName, witnessFileName, logger
proof.protocol = "groth16";
proof.curve = curve.name;

await fdZKey.close();
await fdWtns.close();

proof = stringifyBigInts(proof);
Expand Down
13 changes: 7 additions & 6 deletions src/mpc_applykey.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/

import * as binFileUtils from "@iden3/binfileutils";
import * as chunkFileUtils from "./chunk_utils.js";

/*
This function creates a new section in the fdTo file with id idSection.
Expand All @@ -26,15 +27,15 @@ import * as binFileUtils from "@iden3/binfileutils";
It also updates the newChallengeHasher with the new points
*/

export async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve, groupName, first, inc, sectionName, logger) {
export async function applyKeyToSection(zkeyFileNameOld, maxZKeyVersion, zkeyFileNameNew, idSection, curve, groupName, first, inc, sectionName, logger) {
const MAX_CHUNK_SIZE = 1 << 16;
const G = curve[groupName];
const sG = G.F.n8*2;
const nPoints = sections[idSection][0].size / sG;

await binFileUtils.startReadUniqueSection(fdOld, sections,idSection );
await binFileUtils.startWriteSection(fdNew, idSection);
const fdOld = await chunkFileUtils.startReadSectionFile(zkeyFileNameOld, idSection, maxZKeyVersion);
const fdNew = await chunkFileUtils.startWriteSectionFile(zkeyFileNameNew, idSection);

const nPoints = fdOld.readingSection.size / sG;
let t = first;
for (let i=0; i<nPoints; i += MAX_CHUNK_SIZE) {
if (logger) logger.debug(`Applying key: ${sectionName}: ${i}/${nPoints}`);
Expand All @@ -46,8 +47,8 @@ export async function applyKeyToSection(fdOld, sections, fdNew, idSection, curve
t = curve.Fr.mul(t, curve.Fr.exp(inc, n));
}

await binFileUtils.endWriteSection(fdNew);
await binFileUtils.endReadSection(fdOld);
await chunkFileUtils.endWriteSectionFile(fdNew);
await chunkFileUtils.endReadSectionFile(fdOld);
}


Expand Down
28 changes: 13 additions & 15 deletions src/zkey_beacon.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/

import * as binFileUtils from "@iden3/binfileutils";
import * as chunkFileUtils from "./chunk_utils.js";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import * as misc from "./misc.js";
Expand Down Expand Up @@ -49,8 +50,8 @@ export default async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashS
}


const {fd: fdOld, sections: sections} = await binFileUtils.readBinFile(zkeyNameOld, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fdOld, sections);
const maxZKeyVersion = 2;
const zkey = await zkeyUtils.readHeader(zkeyNameOld, maxZKeyVersion);

if (zkey.protocol != "groth16") {
throw new Error("zkey file is not groth16");
Expand All @@ -59,7 +60,7 @@ export default async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashS

const curve = await getCurve(zkey.q);

const mpcParams = await zkeyUtils.readMPCParams(fdOld, curve, sections);
const mpcParams = await zkeyUtils.readMPCParams(zkeyNameOld, maxZKeyVersion, curve);

const fdNew = await binFileUtils.createBinFile(zkeyNameNew, "zkey", 1, 10);

Expand Down Expand Up @@ -95,31 +96,28 @@ export default async function beacon(zkeyNameOld, zkeyNameNew, name, beaconHashS

mpcParams.contributions.push(curContribution);

await zkeyUtils.writeHeader(fdNew, zkey);
await zkeyUtils.writeHeader(zkeyNameNew, zkey);

// IC
await binFileUtils.copySection(fdOld, sections, fdNew, 3);
await chunkFileUtils.copySectionFile(zkeyNameOld, zkeyNameNew, 3);

// Coeffs (Keep original)
await binFileUtils.copySection(fdOld, sections, fdNew, 4);
await chunkFileUtils.copySectionFile(zkeyNameOld, zkeyNameNew, 4);

// A Section
await binFileUtils.copySection(fdOld, sections, fdNew, 5);
await chunkFileUtils.copySectionFile(zkeyNameOld, zkeyNameNew, 5);

// B1 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 6);
await chunkFileUtils.copySectionFile(zkeyNameOld, zkeyNameNew, 6);

// B2 Section
await binFileUtils.copySection(fdOld, sections, fdNew, 7);
await chunkFileUtils.copySectionFile(zkeyNameOld, zkeyNameNew, 7);

const invDelta = curve.Fr.inv(curContribution.delta.prvKey);
await applyKeyToSection(fdOld, sections, fdNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(fdOld, sections, fdNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);
await applyKeyToSection(zkeyNameOld, maxZKeyVersion, zkeyNameNew, 8, curve, "G1", invDelta, curve.Fr.e(1), "L Section", logger);
await applyKeyToSection(zkeyNameOld, maxZKeyVersion, zkeyNameNew, 9, curve, "G1", invDelta, curve.Fr.e(1), "H Section", logger);

await zkeyUtils.writeMPCParams(fdNew, curve, mpcParams);

await fdOld.close();
await fdNew.close();
await zkeyUtils.writeMPCParams(zkeyNameNew, curve, mpcParams);

const contributionHasher = Blake2b(64);
utils.hashPubKey(contributionHasher, curve, curContribution);
Expand Down
17 changes: 8 additions & 9 deletions src/zkey_export_verificationkey.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,32 +18,31 @@
*/

import * as binFileUtils from "@iden3/binfileutils";
import * as chunkFileUtils from "./chunk_utils.js";
import * as zkeyUtils from "./zkey_utils.js";
import { getCurveFromQ as getCurve } from "./curves.js";
import { utils } from "ffjavascript";
const {stringifyBigInts} = utils;

export default async function zkeyExportVerificationKey(zkeyName, /* logger */ ) {
const maxZKeyVersion = 2;

const {fd, sections} = await binFileUtils.readBinFile(zkeyName, "zkey", 2);
const zkey = await zkeyUtils.readHeader(fd, sections);
const zkey = await zkeyUtils.readHeader(zkeyName, maxZKeyVersion);

let res;
if (zkey.protocol == "groth16") {
res = await groth16Vk(zkey, fd, sections);
res = await groth16Vk(zkey, zkeyName, maxZKeyVersion);
} else if (zkey.protocol == "plonk") {
res = await plonkVk(zkey);
} else {
throw new Error("zkey file is not groth16");
}

await fd.close();

return res;
}


async function groth16Vk(zkey, fd, sections) {
async function groth16Vk(zkey, zkeyFileName, maxZKeyVersion) {
const curve = await getCurve(zkey.q);
const sG1 = curve.G1.F.n8*2;

Expand All @@ -65,14 +64,14 @@ async function groth16Vk(zkey, fd, sections) {

// Read IC Section
///////////
await binFileUtils.startReadUniqueSection(fd, sections, 3);
const fd3 = await chunkFileUtils.startReadSectionFile(zkeyFileName, 3, maxZKeyVersion);
vKey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
const buff = await fd.read(sG1);
const buff = await fd3.read(sG1);
const P = curve.G1.toObject(buff);
vKey.IC.push(P);
}
await binFileUtils.endReadSection(fd);
await chunkFileUtils.endReadSectionFile(fd3);

vKey = stringifyBigInts(vKey);

Expand Down
Loading

0 comments on commit d1c10a6

Please sign in to comment.