diff --git a/build/cli.cjs b/build/cli.cjs
index 147be84b..fbe6c915 100755
--- a/build/cli.cjs
+++ b/build/cli.cjs
@@ -2023,6 +2023,107 @@ async function verify(tauFilename, logger) {
}
}
+// Each section is its own file.
+
+function sectionName(sectionId) {
+ return String.fromCharCode('a'.charCodeAt(0) + sectionId);
+}
+
+async function startWriteSectionFile(zkeyName, sectionId) {
+ const fd = await binFileUtils__namespace.createBinFile(zkeyName + sectionName(sectionId), "zky" + sectionName(sectionId), 1, 1, 1<<22, 1<<24);
+
+ fd.writingSection = {
+ pSectionSize: fd.pos
+ };
+ await fd.writeULE64(0); // Temporally set to 0 length
+ return fd;
+}
+
+async function endWriteSectionFile(fd) {
+ if (typeof fd.writingSection === "undefined") throw new Error("Not writing a section");
+
+ const sectionSize = fd.pos - fd.writingSection.pSectionSize - 8;
+ const oldPos = fd.pos;
+ fd.pos = fd.writingSection.pSectionSize;
+ await fd.writeULE64(sectionSize);
+ fd.pos = oldPos;
+ await fd.close();
+ delete fd.writingSection;
+}
+
+async function startReadSectionFile(zkeyName, sectionId, maxVersion) {
+ const fileName = zkeyName + sectionName(sectionId);
+ const type = "zky" + sectionName(sectionId);
+ const fd = await fastFile__namespace.readExisting(fileName);
+
+ const b = await fd.read(4);
+ let readedType = "";
+ for (let i=0; i<4; i++) readedType += String.fromCharCode(b[i]);
+
+ if (readedType != type) throw new Error(fileName + ": Invalid File format");
+
+ let v = await fd.readULE32();
+
+ if (v>maxVersion) throw new Error("Version not supported");
+
+ let nSections = await fd.readULE32();
+ if (nSections != 1) throw new Error("More than one section found");
+
+ let size = await fd.readULE64();
+ fd.readingSection = {
+ size: size,
+ p: fd.pos,
+ };
+ return fd;
+}
+
+async function endReadSectionFile(fd, noCheck) {
+ if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
+ if (!noCheck) {
+ if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
+ }
+ await fd.close();
+ delete fd.readingSection;
+}
+
+async function readSectionFile(zkeyName, sectionId, maxVersion) {
+ const fd = await startReadSectionFile(zkeyName, sectionId, maxVersion);
+ let buff;
+ if (fd.readingSection.size < (1 << 30) ) {
+ buff = new Uint8Array(fd.readingSection.size);
+ } else {
+ buff = new ffjavascript.BigBuffer(fd.readingSection.size);
+ }
+
+ await fd.readToBuffer(buff, 0, fd.readingSection.size, fd.pos);
+ await endReadSectionFile(fd);
+ return buff;
+}
+
+async function copySectionFile(zkeyNameOld, zkeyNameNew, sectionId) {
+ await fs__default["default"].copyFile(zkeyNameOld + sectionName(sectionId), zkeyNameNew + sectionName(sectionId), (err) => {
+ if (err) throw err;
+ });
+}
+
+async function sectionFileIsEqual(zkeyNameOld, zkeyNameNew, sectionId, maxVersion) {
+ const fdOld = await startReadSectionFile(zkeyNameOld, sectionId, maxVersion);
+ const fdNew = await startReadSectionFile(zkeyNameNew, sectionId, maxVersion);
+
+ const MAX_BUFF_SIZE = fdOld.pageSize * 16;
+ if (fdOld.readingSection.size != fdNew.readingSection.size) return false;
+ const totalBytes=fdOld.readingSection.size;
+ for (let i=0; i.
*/
-async function writeHeader(fd, zkey) {
+async function writeHeader(zkeyName, zkey) {
// Write the header
///////////
- await binFileUtils__namespace.startWriteSection(fd, 1);
- await fd.writeULE32(1); // Groth
- await binFileUtils__namespace.endWriteSection(fd);
+ const fdSection1 = await startWriteSectionFile(zkeyName, 1);
+ await fdSection1.writeULE32(1); // Groth
+ await endWriteSectionFile(fdSection1);
// Write the Groth header section
///////////
const curve = await getCurveFromQ(zkey.q);
- await binFileUtils__namespace.startWriteSection(fd, 2);
+ const fdSection2 = await startWriteSectionFile(zkeyName, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8;
- await fd.writeULE32(n8q);
- await binFileUtils__namespace.writeBigInt(fd, primeQ, n8q);
- await fd.writeULE32(n8r);
- await binFileUtils__namespace.writeBigInt(fd, primeR, n8r);
- await fd.writeULE32(zkey.nVars); // Total number of bars
- await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
- await fd.writeULE32(zkey.domainSize); // domainSize
- await writeG1(fd, curve, zkey.vk_alpha_1);
- await writeG1(fd, curve, zkey.vk_beta_1);
- await writeG2(fd, curve, zkey.vk_beta_2);
- await writeG2(fd, curve, zkey.vk_gamma_2);
- await writeG1(fd, curve, zkey.vk_delta_1);
- await writeG2(fd, curve, zkey.vk_delta_2);
-
- await binFileUtils__namespace.endWriteSection(fd);
-
+ await fdSection2.writeULE32(n8q);
+ await binFileUtils__namespace.writeBigInt(fdSection2, primeQ, n8q);
+ await fdSection2.writeULE32(n8r);
+ await binFileUtils__namespace.writeBigInt(fdSection2, primeR, n8r);
+ await fdSection2.writeULE32(zkey.nVars); // Total number of bars
+ await fdSection2.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
+ await fdSection2.writeULE32(zkey.domainSize); // domainSize
+ await writeG1(fdSection2, curve, zkey.vk_alpha_1);
+ await writeG1(fdSection2, curve, zkey.vk_beta_1);
+ await writeG2(fdSection2, curve, zkey.vk_beta_2);
+ await writeG2(fdSection2, curve, zkey.vk_gamma_2);
+ await writeG1(fdSection2, curve, zkey.vk_delta_1);
+ await writeG2(fdSection2, curve, zkey.vk_delta_2);
+ await endWriteSectionFile(fdSection2);
}
async function writeG1(fd, curve, p) {
@@ -3736,17 +3830,15 @@ async function readG2(fd, curve, toObject) {
}
-async function readHeader$1(fd, sections, toObject) {
+async function readHeader$1(zkeyFileName, maxZKeyVersion) {
// Read Header
/////////////////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 1);
+ const fd = await startReadSectionFile(zkeyFileName, 1, maxZKeyVersion);
const protocolId = await fd.readULE32();
- await binFileUtils__namespace.endReadSection(fd);
+ await endReadSectionFile(fd);
if (protocolId == 1) {
- return await readHeaderGroth16(fd, sections, toObject);
- } else if (protocolId == 2) {
- return await readHeaderPlonk(fd, sections);
+ return await readHeaderGroth16(zkeyFileName, maxZKeyVersion);
} else {
throw new Error("Protocol not supported: ");
}
@@ -3755,14 +3847,14 @@ async function readHeader$1(fd, sections, toObject) {
-async function readHeaderGroth16(fd, sections, toObject) {
+async function readHeaderGroth16(zkeyFileName, maxZKeyVersion, toObject) {
const zkey = {};
zkey.protocol = "groth16";
// Read Groth Header
/////////////////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
+ const fd = await startReadSectionFile(zkeyFileName, 2, maxZKeyVersion);
const n8q = await fd.readULE32();
zkey.n8q = n8q;
zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
@@ -3783,61 +3875,25 @@ async function readHeaderGroth16(fd, sections, toObject) {
zkey.vk_gamma_2 = await readG2(fd, curve, toObject);
zkey.vk_delta_1 = await readG1(fd, curve, toObject);
zkey.vk_delta_2 = await readG2(fd, curve, toObject);
- await binFileUtils__namespace.endReadSection(fd);
+ await endReadSectionFile(fd);
return zkey;
-
}
+async function readZKey(fileName, toObject) {
+ const maxZKeyVersion = 2;
+ // const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1);
+ const zkey = await zkeyUtils.readHeader(zkeyFileName, maxZKeyVersion);
+ if (zkey.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
-async function readHeaderPlonk(fd, sections, protocol, toObject) {
- const zkey = {};
-
- zkey.protocol = "plonk";
-
- // Read Plonk Header
- /////////////////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
- const n8q = await fd.readULE32();
- zkey.n8q = n8q;
- zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
-
- const n8r = await fd.readULE32();
- zkey.n8r = n8r;
- zkey.r = await binFileUtils__namespace.readBigInt(fd, n8r);
-
- let curve = await getCurveFromQ(zkey.q);
-
- zkey.nVars = await fd.readULE32();
- zkey.nPublic = await fd.readULE32();
- zkey.domainSize = await fd.readULE32();
- zkey.power = log2(zkey.domainSize);
- zkey.nAdditions = await fd.readULE32();
- zkey.nConstrains = await fd.readULE32();
- zkey.k1 = await fd.read(n8r);
- zkey.k2 = await fd.read(n8r);
-
- zkey.Qm = await readG1(fd, curve, toObject);
- zkey.Ql = await readG1(fd, curve, toObject);
- zkey.Qr = await readG1(fd, curve, toObject);
- zkey.Qo = await readG1(fd, curve, toObject);
- zkey.Qc = await readG1(fd, curve, toObject);
- zkey.S1 = await readG1(fd, curve, toObject);
- zkey.S2 = await readG1(fd, curve, toObject);
- zkey.S3 = await readG1(fd, curve, toObject);
- zkey.X_2 = await readG2(fd, curve, toObject);
-
- await binFileUtils__namespace.endReadSection(fd);
-
- return zkey;
-}
-
-async function readZKey(fileName, toObject) {
- const {fd, sections} = await binFileUtils__namespace.readBinFile(fileName, "zkey", 1);
+ // const fd = await chunkFileUtils.startReadSectionFile(zkeyFileName, 1, maxZKeyVersion);
+ // const protocolId = await fd.readULE32();
+ // await chunkFileUtils.endReadSectionFile(fd);
- const zkey = await readHeader$1(fd, sections, "groth16");
const Fr = new ffjavascript.F1Field(zkey.r);
const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, zkey.n8r*8), zkey.r);
@@ -3848,25 +3904,24 @@ async function readZKey(fileName, toObject) {
// Read IC Section
///////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
+ const fd3 = await startReadSectionFile(zkeyFileName, 3, maxZKeyVersion);
zkey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
- const P = await readG1(fd, curve, toObject);
+ const P = await readG1(fd3, curve, toObject);
zkey.IC.push(P);
}
- await binFileUtils__namespace.endReadSection(fd);
-
+ await endReadSectionFile(fd3);
// Read Coefs
///////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 4);
- const nCCoefs = await fd.readULE32();
+ const fd4 = await startReadSectionFile(zkeyFileName, 4, maxZKeyVersion);
+ const nCCoefs = await fd4.readULE32();
zkey.ccoefs = [];
for (let i=0; i=0; i--) {
const c = mpcParams.contributions[i];
if (logger) logger.info("-------------------------");
@@ -4634,23 +4672,23 @@ async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, lo
return true;
- async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
+ async function sectionHasSameRatio(groupName, initFileName, zkeyFileName, maxZKeyVersion, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve[groupName];
const sG = G.F.n8*2;
- await binFileUtils__namespace.startReadUniqueSection(fd1, sections1, idSection);
- await binFileUtils__namespace.startReadUniqueSection(fd2, sections2, idSection);
+ const fdOld = await startReadSectionFile(initFileName, idSection, maxZKeyVersion);
+ const fdNew = await startReadSectionFile(zkeyFileName, idSection, maxZKeyVersion);
let R1 = G.zero;
let R2 = G.zero;
- const nPoints = sections1[idSection][0].size / sG;
+ const nPoints = fdOld.readingSection.size / sG;
for (let i=0; imaxVersion) throw new Error("Version not supported");
+
+ let nSections = await fd.readULE32();
+ if (nSections != 1) throw new Error("More than one section found");
+
+ let size = await fd.readULE64();
+ fd.readingSection = {
+ size: size,
+ p: fd.pos,
+ };
+ return fd;
+}
+
+async function endReadSectionFile(fd, noCheck) {
+ if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
+ if (!noCheck) {
+ if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
+ }
+ await fd.close();
+ delete fd.readingSection;
+}
+
+async function readSectionFile(zkeyName, sectionId, maxVersion) {
+ const fd = await startReadSectionFile(zkeyName, sectionId, maxVersion);
+ let buff;
+ if (fd.readingSection.size < (1 << 30) ) {
+ buff = new Uint8Array(fd.readingSection.size);
+ } else {
+ buff = new ffjavascript.BigBuffer(fd.readingSection.size);
+ }
+
+ await fd.readToBuffer(buff, 0, fd.readingSection.size, fd.pos);
+ await endReadSectionFile(fd);
+ return buff;
+}
+
+async function copySectionFile(zkeyNameOld, zkeyNameNew, sectionId) {
+ await fs__default["default"].copyFile(zkeyNameOld + sectionName(sectionId), zkeyNameNew + sectionName(sectionId), (err) => {
+ if (err) throw err;
+ });
+}
+
+async function sectionFileIsEqual(zkeyNameOld, zkeyNameNew, sectionId, maxVersion) {
+ const fdOld = await startReadSectionFile(zkeyNameOld, sectionId, maxVersion);
+ const fdNew = await startReadSectionFile(zkeyNameNew, sectionId, maxVersion);
+
+ const MAX_BUFF_SIZE = fdOld.pageSize * 16;
+ if (fdOld.readingSection.size != fdNew.readingSection.size) return false;
+ const totalBytes=fdOld.readingSection.size;
+ for (let i=0; i.
*/
-async function writeHeader(fd, zkey) {
+async function writeHeader(zkeyName, zkey) {
// Write the header
///////////
- await binFileUtils__namespace.startWriteSection(fd, 1);
- await fd.writeULE32(1); // Groth
- await binFileUtils__namespace.endWriteSection(fd);
+ const fdSection1 = await startWriteSectionFile(zkeyName, 1);
+ await fdSection1.writeULE32(1); // Groth
+ await endWriteSectionFile(fdSection1);
// Write the Groth header section
///////////
const curve = await getCurveFromQ(zkey.q);
- await binFileUtils__namespace.startWriteSection(fd, 2);
+ const fdSection2 = await startWriteSectionFile(zkeyName, 2);
const primeQ = curve.q;
const n8q = (Math.floor( (ffjavascript.Scalar.bitLength(primeQ) - 1) / 64) +1)*8;
const primeR = curve.r;
const n8r = (Math.floor( (ffjavascript.Scalar.bitLength(primeR) - 1) / 64) +1)*8;
- await fd.writeULE32(n8q);
- await binFileUtils__namespace.writeBigInt(fd, primeQ, n8q);
- await fd.writeULE32(n8r);
- await binFileUtils__namespace.writeBigInt(fd, primeR, n8r);
- await fd.writeULE32(zkey.nVars); // Total number of bars
- await fd.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
- await fd.writeULE32(zkey.domainSize); // domainSize
- await writeG1(fd, curve, zkey.vk_alpha_1);
- await writeG1(fd, curve, zkey.vk_beta_1);
- await writeG2(fd, curve, zkey.vk_beta_2);
- await writeG2(fd, curve, zkey.vk_gamma_2);
- await writeG1(fd, curve, zkey.vk_delta_1);
- await writeG2(fd, curve, zkey.vk_delta_2);
-
- await binFileUtils__namespace.endWriteSection(fd);
-
-
+ await fdSection2.writeULE32(n8q);
+ await binFileUtils__namespace.writeBigInt(fdSection2, primeQ, n8q);
+ await fdSection2.writeULE32(n8r);
+ await binFileUtils__namespace.writeBigInt(fdSection2, primeR, n8r);
+ await fdSection2.writeULE32(zkey.nVars); // Total number of bars
+ await fdSection2.writeULE32(zkey.nPublic); // Total number of public vars (not including ONE)
+ await fdSection2.writeULE32(zkey.domainSize); // domainSize
+ await writeG1(fdSection2, curve, zkey.vk_alpha_1);
+ await writeG1(fdSection2, curve, zkey.vk_beta_1);
+ await writeG2(fdSection2, curve, zkey.vk_beta_2);
+ await writeG2(fdSection2, curve, zkey.vk_gamma_2);
+ await writeG1(fdSection2, curve, zkey.vk_delta_1);
+ await writeG2(fdSection2, curve, zkey.vk_delta_2);
+
+ await endWriteSectionFile(fdSection2);
}
async function writeG1(fd, curve, p) {
@@ -323,17 +424,15 @@ async function readG2(fd, curve, toObject) {
}
-async function readHeader$1(fd, sections, toObject) {
+async function readHeader$1(zkeyFileName, maxZKeyVersion) {
// Read Header
/////////////////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 1);
+ const fd = await startReadSectionFile(zkeyFileName, 1, maxZKeyVersion);
const protocolId = await fd.readULE32();
- await binFileUtils__namespace.endReadSection(fd);
+ await endReadSectionFile(fd);
if (protocolId == 1) {
- return await readHeaderGroth16(fd, sections, toObject);
- } else if (protocolId == 2) {
- return await readHeaderPlonk(fd, sections);
+ return await readHeaderGroth16(zkeyFileName, maxZKeyVersion);
} else {
throw new Error("Protocol not supported: ");
}
@@ -342,14 +441,14 @@ async function readHeader$1(fd, sections, toObject) {
-async function readHeaderGroth16(fd, sections, toObject) {
+async function readHeaderGroth16(zkeyFileName, maxZKeyVersion, toObject) {
const zkey = {};
zkey.protocol = "groth16";
// Read Groth Header
/////////////////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
+ const fd = await startReadSectionFile(zkeyFileName, 2, maxZKeyVersion);
const n8q = await fd.readULE32();
zkey.n8q = n8q;
zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
@@ -370,61 +469,25 @@ async function readHeaderGroth16(fd, sections, toObject) {
zkey.vk_gamma_2 = await readG2(fd, curve, toObject);
zkey.vk_delta_1 = await readG1(fd, curve, toObject);
zkey.vk_delta_2 = await readG2(fd, curve, toObject);
- await binFileUtils__namespace.endReadSection(fd);
+ await endReadSectionFile(fd);
return zkey;
-
}
+async function readZKey(fileName, toObject) {
+ const maxZKeyVersion = 2;
+ // const {fd, sections} = await binFileUtils.readBinFile(fileName, "zkey", 1);
+ const zkey = await zkeyUtils.readHeader(zkeyFileName, maxZKeyVersion);
+ if (zkey.protocol != "groth16") {
+ throw new Error("zkey file is not groth16");
+ }
-async function readHeaderPlonk(fd, sections, protocol, toObject) {
- const zkey = {};
-
- zkey.protocol = "plonk";
-
- // Read Plonk Header
- /////////////////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 2);
- const n8q = await fd.readULE32();
- zkey.n8q = n8q;
- zkey.q = await binFileUtils__namespace.readBigInt(fd, n8q);
-
- const n8r = await fd.readULE32();
- zkey.n8r = n8r;
- zkey.r = await binFileUtils__namespace.readBigInt(fd, n8r);
-
- let curve = await getCurveFromQ(zkey.q);
-
- zkey.nVars = await fd.readULE32();
- zkey.nPublic = await fd.readULE32();
- zkey.domainSize = await fd.readULE32();
- zkey.power = log2(zkey.domainSize);
- zkey.nAdditions = await fd.readULE32();
- zkey.nConstrains = await fd.readULE32();
- zkey.k1 = await fd.read(n8r);
- zkey.k2 = await fd.read(n8r);
-
- zkey.Qm = await readG1(fd, curve, toObject);
- zkey.Ql = await readG1(fd, curve, toObject);
- zkey.Qr = await readG1(fd, curve, toObject);
- zkey.Qo = await readG1(fd, curve, toObject);
- zkey.Qc = await readG1(fd, curve, toObject);
- zkey.S1 = await readG1(fd, curve, toObject);
- zkey.S2 = await readG1(fd, curve, toObject);
- zkey.S3 = await readG1(fd, curve, toObject);
- zkey.X_2 = await readG2(fd, curve, toObject);
-
- await binFileUtils__namespace.endReadSection(fd);
-
- return zkey;
-}
-
-async function readZKey(fileName, toObject) {
- const {fd, sections} = await binFileUtils__namespace.readBinFile(fileName, "zkey", 1);
+ // const fd = await chunkFileUtils.startReadSectionFile(zkeyFileName, 1, maxZKeyVersion);
+ // const protocolId = await fd.readULE32();
+ // await chunkFileUtils.endReadSectionFile(fd);
- const zkey = await readHeader$1(fd, sections, "groth16");
const Fr = new ffjavascript.F1Field(zkey.r);
const Rr = ffjavascript.Scalar.mod(ffjavascript.Scalar.shl(1, zkey.n8r*8), zkey.r);
@@ -435,25 +498,24 @@ async function readZKey(fileName, toObject) {
// Read IC Section
///////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 3);
+ const fd3 = await startReadSectionFile(zkeyFileName, 3, maxZKeyVersion);
zkey.IC = [];
for (let i=0; i<= zkey.nPublic; i++) {
- const P = await readG1(fd, curve, toObject);
+ const P = await readG1(fd3, curve, toObject);
zkey.IC.push(P);
}
- await binFileUtils__namespace.endReadSection(fd);
-
+ await endReadSectionFile(fd3);
// Read Coefs
///////////
- await binFileUtils__namespace.startReadUniqueSection(fd, sections, 4);
- const nCCoefs = await fd.readULE32();
+ const fd4 = await startReadSectionFile(zkeyFileName, 4, maxZKeyVersion);
+ const nCCoefs = await fd4.readULE32();
zkey.ccoefs = [];
for (let i=0; i=0; i--) {
const c = mpcParams.contributions[i];
if (logger) logger.info("-------------------------");
@@ -5199,23 +5238,23 @@ async function phase2verifyFromInit(initFileName, pTauFileName, zkeyFileName, lo
return true;
- async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
+ async function sectionHasSameRatio(groupName, initFileName, zkeyFileName, maxZKeyVersion, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve[groupName];
const sG = G.F.n8*2;
- await binFileUtils__namespace.startReadUniqueSection(fd1, sections1, idSection);
- await binFileUtils__namespace.startReadUniqueSection(fd2, sections2, idSection);
+ const fdOld = await startReadSectionFile(initFileName, idSection, maxZKeyVersion);
+ const fdNew = await startReadSectionFile(zkeyFileName, idSection, maxZKeyVersion);
let R1 = G.zero;
let R2 = G.zero;
- const nPoints = sections1[idSection][0].size / sG;
+ const nPoints = fdOld.readingSection.size / sG;
for (let i=0; imaxVersion) throw new Error("Version not supported");
+
+ let nSections = await fd.readULE32();
+ if (nSections != 1) throw new Error("More than one section found");
+
+ let size = await fd.readULE64();
+ fd.readingSection = {
+ size: size,
+ p: fd.pos,
+ }
+ return fd;
+}
+
+export async function endReadSectionFile(fd, noCheck) {
+ if (typeof fd.readingSection === "undefined") throw new Error("Not reading a section");
+ if (!noCheck) {
+ if (fd.pos-fd.readingSection.p != fd.readingSection.size) throw new Error("Invalid section size reading");
+ }
+ await fd.close();
+ delete fd.readingSection;
+}
+
+export async function readSectionFile(zkeyName, sectionId, maxVersion) {
+ const fd = await startReadSectionFile(zkeyName, sectionId, maxVersion);
+ let buff;
+ if (fd.readingSection.size < (1 << 30) ) {
+ buff = new Uint8Array(fd.readingSection.size);
+ } else {
+ buff = new BigBuffer(fd.readingSection.size);
+ }
+
+ await fd.readToBuffer(buff, 0, fd.readingSection.size, fd.pos);
+ await endReadSectionFile(fd);
+ return buff;
+}
+
+export async function copySectionFile(zkeyNameOld, zkeyNameNew, sectionId) {
+ await fs.copyFile(zkeyNameOld + sectionName(sectionId), zkeyNameNew + sectionName(sectionId), (err) => {
+ if (err) throw err;
+ });
+}
+
+export async function sectionFileIsEqual(zkeyNameOld, zkeyNameNew, sectionId, maxVersion) {
+ const fdOld = await startReadSectionFile(zkeyNameOld, sectionId, maxVersion);
+ const fdNew = await startReadSectionFile(zkeyNameNew, sectionId, maxVersion);
+
+ const MAX_BUFF_SIZE = fdOld.pageSize * 16;
+ if (fdOld.readingSection.size != fdNew.readingSection.size) return false;
+ const totalBytes=fdOld.readingSection.size;
+ for (let i=0; i c.matrix<2);
- zkey.ccoefs.sort( (a,b) => a.constraint - b.constraint );
- await binFileUtils.startWriteSection(fd, 4);
- await fd.writeULE32(zkey.ccoefs.length);
- for (let i=0; i=0; i--) {
const c = mpcParams.contributions[i];
if (logger) logger.info("-------------------------");
@@ -234,23 +220,23 @@ export default async function phase2verifyFromInit(initFileName, pTauFileName, z
return true;
- async function sectionHasSameRatio(groupName, fd1, sections1, fd2, sections2, idSection, g2sp, g2spx, sectionName) {
+ async function sectionHasSameRatio(groupName, initFileName, zkeyFileName, maxZKeyVersion, idSection, g2sp, g2spx, sectionName) {
const MAX_CHUNK_SIZE = 1<<20;
const G = curve[groupName];
const sG = G.F.n8*2;
- await binFileUtils.startReadUniqueSection(fd1, sections1, idSection);
- await binFileUtils.startReadUniqueSection(fd2, sections2, idSection);
+ const fdOld = await chunkFileUtils.startReadSectionFile(initFileName, idSection, maxZKeyVersion);
+ const fdNew = await chunkFileUtils.startReadSectionFile(zkeyFileName, idSection, maxZKeyVersion);
let R1 = G.zero;
let R2 = G.zero;
- const nPoints = sections1[idSection][0].size / sG;
+ const nPoints = fdOld.readingSection.size / sG;
for (let i=0; i