diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fe6928718d8..85b0786b0ed 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -355,6 +355,21 @@ jobs: timeout-minutes: 40 run: earthly-ci --no-output ./+barretenberg-acir-tests-sol + bb-acir-tests-sol-honk: + needs: [noir-build-acir-tests, changes] + runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 + if: ${{ needs.changes.outputs.barretenberg == 'true' || needs.changes.outputs.noir == 'true' }} + steps: + - uses: actions/checkout@v4 + with: { ref: "${{ env.GIT_COMMIT }}" } + - uses: ./.github/ci-setup-action + with: + concurrency_key: barretenberg-acir-tests-sol-honk-x86 + - name: "BB Solidity Acir Tests" + working-directory: ./barretenberg/ + timeout-minutes: 40 + run: earthly-ci --no-output ./+barretenberg-acir-tests-sol-honk + bb-acir-tests-bb-js: needs: [noir-build-acir-tests, changes] runs-on: ${{ github.event.pull_request.user.login || github.actor }}-x86 @@ -667,6 +682,7 @@ jobs: - noir-build-acir-tests - bb-acir-tests-bb - bb-acir-tests-sol + - bb-acir-tests-sol-honk - bb-acir-tests-bb-js - noir-format - noir-test @@ -717,6 +733,7 @@ jobs: - noir-build-acir-tests - bb-acir-tests-bb - bb-acir-tests-sol + - bb-acir-tests-sol-honk - bb-acir-tests-bb-js - noir-format - noir-test diff --git a/barretenberg/Earthfile b/barretenberg/Earthfile index 2c3b9a9a129..a7a6b6b12d7 100644 --- a/barretenberg/Earthfile +++ b/barretenberg/Earthfile @@ -70,6 +70,23 @@ barretenberg-acir-tests-sol: RUN (cd sol-test && yarn) RUN PARALLEL=1 FLOW=sol ./run_acir_tests.sh assert_statement double_verify_proof double_verify_nested_proof +barretenberg-acir-tests-sol-honk: + FROM ../build-images/+from-registry + + COPY ./cpp/+preset-sol/ /usr/src/barretenberg/cpp/build + COPY ./cpp/+preset-clang-assert/bin/bb /usr/src/barretenberg/cpp/build/bin/bb + COPY ./+acir-tests/ /usr/src/barretenberg/acir_tests + COPY ./+sol/ /usr/src/barretenberg/sol + COPY ../noir/+build-acir-tests/ /usr/src/acir_artifacts + + WORKDIR /usr/src/barretenberg/acir_tests + + ENV TEST_SRC /usr/src/acir_artifacts + ENV VERBOSE=1 + + RUN (cd sol-test && yarn) + RUN PARALLEL=1 FLOW=honk_sol ./run_acir_tests.sh assert_statement 1_mul slices + barretenberg-acir-tests-bb.js: # Playwright not supported on base image ubuntu:noble, results in unmet dependencies FROM ../build-images/+base-slim-node diff --git a/barretenberg/acir_tests/flows/honk_sol.sh b/barretenberg/acir_tests/flows/honk_sol.sh new file mode 100755 index 00000000000..ed223b7d37c --- /dev/null +++ b/barretenberg/acir_tests/flows/honk_sol.sh @@ -0,0 +1,22 @@ +#!/bin/sh +set -eu + +export PROOF="$(pwd)/proof" +export PROOF_AS_FIELDS="$(pwd)/proof_fields.json" + +# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs +$BIN prove_keccak_ultra_honk -o proof +$BIN write_vk_ultra_honk -o vk +$BIN proof_as_fields_honk -k vk -c $CRS_PATH -p $PROOF +$BIN contract_ultra_honk -k vk -c $CRS_PATH -b ./target/program.json -o Verifier.sol + +# Export the paths to the environment variables for the js test runner +export VERIFIER_PATH="$(pwd)/Verifier.sol" +export TEST_PATH=$(realpath "../../sol-test/HonkTest.sol") +export TESTING_HONK="true" + +# Use solcjs to compile the generated key contract with the template verifier and test contract +# index.js will start an anvil, on a random port +# Deploy the verifier then send a test transaction +export TEST_NAME=$(basename $(pwd)) +node ../../sol-test/src/index.js diff --git a/barretenberg/acir_tests/sol-test/HonkTest.sol b/barretenberg/acir_tests/sol-test/HonkTest.sol new file mode 100644 index 00000000000..03bf0397a21 --- /dev/null +++ b/barretenberg/acir_tests/sol-test/HonkTest.sol @@ -0,0 +1,18 @@ +// THIS FILE WILL NOT COMPILE BY ITSELF +// Compilation is handled in `src/index.js` where solcjs gathers the dependencies + +pragma solidity >=0.8.4; + +import {HonkVerifier} from "./Verifier.sol"; + +contract Test { + HonkVerifier verifier; + + constructor() { + verifier = new HonkVerifier(); + } + + function test(bytes calldata proof, bytes32[] calldata publicInputs) view public returns(bool) { + return verifier.verify(proof, publicInputs); + } +} diff --git a/barretenberg/acir_tests/sol-test/src/index.js b/barretenberg/acir_tests/sol-test/src/index.js index 029074c41e1..81833cd9e24 100644 --- a/barretenberg/acir_tests/sol-test/src/index.js +++ b/barretenberg/acir_tests/sol-test/src/index.js @@ -4,7 +4,8 @@ import { spawn } from "child_process"; import { ethers } from "ethers"; import solc from "solc"; -const NUMBER_OF_FIELDS_IN_PROOF = 93; +const NUMBER_OF_FIELDS_IN_PLONK_PROOF = 93; +const NUMBER_OF_FIELDS_IN_HONK_PROOF = 393; // We use the solcjs compiler version in this test, although it is slower than foundry, to run the test end to end // it simplifies of parallelising the test suite @@ -18,6 +19,14 @@ const NUMBER_OF_FIELDS_IN_PROOF = 93; // 5. Run the test against the deployed contract // 6. Kill the anvil instance +const getEnvVarCanBeUndefined = (envvar) => { + const varVal = process.env[envvar]; + if (!varVal) { + return false; + } + return varVal; +}; + const getEnvVar = (envvar) => { const varVal = process.env[envvar]; if (!varVal) { @@ -30,33 +39,23 @@ const getEnvVar = (envvar) => { const testName = getEnvVar("TEST_NAME"); // Get solidity files, passed into environment from `flows/sol.sh` -const keyPath = getEnvVar("KEY_PATH"); -const verifierPath = getEnvVar("VERIFIER_PATH"); const testPath = getEnvVar("TEST_PATH"); -const basePath = getEnvVar("BASE_PATH"); +const verifierPath = getEnvVar("VERIFIER_PATH"); const encoding = { encoding: "utf8" }; -const [key, test, verifier, base] = await Promise.all([ - fsPromises.readFile(keyPath, encoding), +const [test, verifier] = await Promise.all([ fsPromises.readFile(testPath, encoding), fsPromises.readFile(verifierPath, encoding), - fsPromises.readFile(basePath, encoding), ]); -var input = { +export const compilationInput = { language: "Solidity", sources: { - "Key.sol": { - content: key, - }, "Test.sol": { content: test, }, "Verifier.sol": { content: verifier, }, - "BaseUltraVerifier.sol": { - content: base, - }, }, settings: { // we require the optimizer @@ -72,7 +71,30 @@ var input = { }, }; -var output = JSON.parse(solc.compile(JSON.stringify(input))); +// If testing honk is set, then we compile the honk test suite +const testingHonk = getEnvVarCanBeUndefined("TESTING_HONK"); +const NUMBER_OF_FIELDS_IN_PROOF = testingHonk ? NUMBER_OF_FIELDS_IN_HONK_PROOF : NUMBER_OF_FIELDS_IN_PLONK_PROOF; +if (!testingHonk) { + + const keyPath = getEnvVar("KEY_PATH"); + const basePath = getEnvVar("BASE_PATH"); + const [key, base] = await Promise.all( + [ + fsPromises.readFile(keyPath, encoding), + fsPromises.readFile(basePath, encoding), + ] + ); + + compilationInput.sources["BaseUltraVerifier.sol"] = { + content: base, + }; + compilationInput.sources["Key.sol"] = { + content: key, + }; +} + +var output = JSON.parse(solc.compile(JSON.stringify(compilationInput))); + const contract = output.contracts["Test.sol"]["Test"]; const bytecode = contract.evm.bytecode.object; const abi = contract.abi; @@ -127,8 +149,15 @@ const readPublicInputs = (proofAsFields) => { const publicInputs = []; // A proof with no public inputs is 93 fields long const numPublicInputs = proofAsFields.length - NUMBER_OF_FIELDS_IN_PROOF; + let publicInputsOffset = 0; + + // Honk proofs contain 3 pieces of metadata before the public inputs, while plonk does not + if (testingHonk) { + publicInputsOffset = 3; + } + for (let i = 0; i < numPublicInputs; i++) { - publicInputs.push(proofAsFields[i]); + publicInputs.push(proofAsFields[publicInputsOffset + i]); } return [numPublicInputs, publicInputs]; }; @@ -179,8 +208,20 @@ try { const proofPath = getEnvVar("PROOF"); const proof = readFileSync(proofPath); - // Cut the number of public inputs off of the proof string - const proofStr = `0x${proof.toString("hex").substring(64 * numPublicInputs)}`; + // Cut the number of public inputs out of the proof string + let proofStr = proof.toString("hex"); + if (testingHonk) { + // Cut off the serialised buffer size at start + proofStr = proofStr.substring(8); + // Get the part before and after the public inputs + const proofStart = proofStr.slice(0, 64 * 3); + const proofEnd = proofStr.substring((64 * 3) + (64 * numPublicInputs)); + proofStr = proofStart + proofEnd; + } else { + proofStr = proofStr.substring(64 * numPublicInputs); + } + + proofStr = "0x" + proofStr; const key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"; diff --git a/barretenberg/cpp/src/barretenberg/bb/main.cpp b/barretenberg/cpp/src/barretenberg/bb/main.cpp index 38316c7a1be..68619202503 100644 --- a/barretenberg/cpp/src/barretenberg/bb/main.cpp +++ b/barretenberg/cpp/src/barretenberg/bb/main.cpp @@ -3,10 +3,14 @@ #include "barretenberg/common/map.hpp" #include "barretenberg/common/serialize.hpp" #include "barretenberg/dsl/acir_format/acir_format.hpp" +#include "barretenberg/dsl/acir_proofs/honk_contract.hpp" #include "barretenberg/honk/proof_system/types/proof.hpp" #include "barretenberg/plonk/proof_system/proving_key/serialize.hpp" #include "barretenberg/serialize/cbind.hpp" #include "barretenberg/stdlib/honk_recursion/verifier/client_ivc_recursive_verifier.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_flavor.hpp" +#include "barretenberg/stdlib_circuit_builders/ultra_keccak.hpp" + #include #ifndef DISABLE_AZTEC_VM #include "barretenberg/vm/avm_trace/avm_common.hpp" @@ -791,6 +795,40 @@ void contract(const std::string& output_path, const std::string& vk_path) } } +/** + * @brief Writes a Honk Solidity verifier contract for an ACIR circuit to a file + * + * Communication: + * - stdout: The Solidity verifier contract is written to stdout as a string + * - Filesystem: The Solidity verifier contract is written to the path specified by outputPath + * + * Note: The fact that the contract was computed is for an ACIR circuit is not of importance + * because this method uses the verification key to compute the Solidity verifier contract + * + * @param output_path Path to write the contract to + * @param vk_path Path to the file containing the serialized verification key + */ +void contract_honk(const std::string& output_path, const std::string& vk_path) +{ + using VerificationKey = UltraFlavor::VerificationKey; + using VerifierCommitmentKey = bb::VerifierCommitmentKey; + + auto g2_data = get_bn254_g2_data(CRS_PATH); + srs::init_crs_factory({}, g2_data); + auto vk = std::make_shared(from_buffer(read_file(vk_path))); + vk->pcs_verification_key = std::make_shared(); + + std::string contract = get_honk_solidity_verifier(std::move(vk)); + + if (output_path == "-") { + writeStringToStdout(contract); + vinfo("contract written to stdout"); + } else { + write_file(output_path, { contract.begin(), contract.end() }); + vinfo("contract written to: ", output_path); + } +} + /** * @brief Converts a proof from a byte array into a list of field elements * @@ -1360,6 +1398,9 @@ int main(int argc, char* argv[]) } else if (command == "contract") { std::string output_path = get_option(args, "-o", "./target/contract.sol"); contract(output_path, vk_path); + } else if (command == "contract_ultra_honk") { + std::string output_path = get_option(args, "-o", "./target/contract.sol"); + contract_honk(output_path, vk_path); } else if (command == "write_vk") { std::string output_path = get_option(args, "-o", "./target/vk"); write_vk(bytecode_path, output_path); @@ -1390,8 +1431,13 @@ int main(int argc, char* argv[]) } else if (command == "prove_ultra_honk") { std::string output_path = get_option(args, "-o", "./proofs/proof"); prove_honk(bytecode_path, witness_path, output_path); + } else if (command == "prove_keccak_ultra_honk") { + std::string output_path = get_option(args, "-o", "./proofs/proof"); + prove_honk(bytecode_path, witness_path, output_path); } else if (command == "verify_ultra_honk") { return verify_honk(proof_path, vk_path) ? 0 : 1; + } else if (command == "verify_keccak_ultra_honk") { + return verify_honk(proof_path, vk_path) ? 0 : 1; } else if (command == "write_vk_ultra_honk") { std::string output_path = get_option(args, "-o", "./target/vk"); write_vk_honk(bytecode_path, output_path); diff --git a/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp new file mode 100644 index 00000000000..f44a7c05485 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/dsl/acir_proofs/honk_contract.hpp @@ -0,0 +1,1653 @@ +#pragma once +#include "barretenberg/honk/utils/honk_key_gen.hpp" +#include + +// Source code for the Ultrahonk Solidity verifier. +// It's expected that the AcirComposer will inject a library which will load the verification key into memory. +const std::string HONK_CONTRACT_SOURCE = R"( +type Fr is uint256; + +using { add as + } for Fr global; +using { sub as - } for Fr global; +using { mul as * } for Fr global; +using { exp as ^ } for Fr global; +using { notEqual as != } for Fr global; +using { equal as == } for Fr global; + +uint256 constant MODULUS = + 21888242871839275222246405745257275088548364400416034343698204186575808495617; // Prime field order + +// Instantiation +library FrLib +{ + function from(uint256 value) internal pure returns(Fr) + { + return Fr.wrap(value % MODULUS); + } + + function fromBytes32(bytes32 value) internal pure returns(Fr) + { + return Fr.wrap(uint256(value) % MODULUS); + } + + function toBytes32(Fr value) internal pure returns(bytes32) + { + return bytes32(Fr.unwrap(value)); + } + + function invert(Fr value) internal view returns(Fr) + { + uint256 v = Fr.unwrap(value); + uint256 result; + + // Call the modexp precompile to invert in the field + assembly + { + let free := mload(0x40) + mstore(free, 0x20) + mstore(add(free, 0x20), 0x20) + mstore(add(free, 0x40), 0x20) + mstore(add(free, 0x60), v) + mstore(add(free, 0x80), sub(MODULUS, 2)) + mstore(add(free, 0xa0), MODULUS) + let success := staticcall(gas(), 0x05, free, 0xc0, 0x00, 0x20) + if iszero(success) { + revert(0, 0) + } + result := mload(0x00) + } + + return Fr.wrap(result); + } + + function pow(Fr base, uint256 v) internal view returns(Fr) + { + uint256 b = Fr.unwrap(base); + uint256 result; + + // Call the modexp precompile to invert in the field + assembly + { + let free := mload(0x40) + mstore(free, 0x20) + mstore(add(free, 0x20), 0x20) + mstore(add(free, 0x40), 0x20) + mstore(add(free, 0x60), b) + mstore(add(free, 0x80), v) + mstore(add(free, 0xa0), MODULUS) + let success := staticcall(gas(), 0x05, free, 0xc0, 0x00, 0x20) + if iszero(success) { + revert(0, 0) + } + result := mload(0x00) + } + + return Fr.wrap(result); + } + + function div(Fr numerator, Fr denominator) internal view returns(Fr) + { + Fr inversion = invert(denominator); + return numerator * invert(denominator); + } +} + +// Free functions +function add(Fr a, Fr b) pure returns(Fr) +{ + return Fr.wrap(addmod(Fr.unwrap(a), Fr.unwrap(b), MODULUS)); +} + +function mul(Fr a, Fr b) pure returns(Fr) +{ + return Fr.wrap(mulmod(Fr.unwrap(a), Fr.unwrap(b), MODULUS)); +} + +function sub(Fr a, Fr b) pure returns(Fr) +{ + return Fr.wrap(addmod(Fr.unwrap(a), MODULUS - Fr.unwrap(b), MODULUS)); +} + +function exp(Fr base, Fr exponent) pure returns(Fr) +{ + if (Fr.unwrap(exponent) == 0) + return Fr.wrap(1); + // Implement exponent with a loop as we will overflow otherwise + for (uint256 i = 1; i < Fr.unwrap(exponent); i += i) { + base = base * base; + } + return base; +} + +function notEqual(Fr a, Fr b) pure returns(bool) +{ + return Fr.unwrap(a) != Fr.unwrap(b); +} + +function equal(Fr a, Fr b) pure returns(bool) +{ + return Fr.unwrap(a) == Fr.unwrap(b); +} + +uint256 constant CONST_PROOF_SIZE_LOG_N = 28; + +uint256 constant NUMBER_OF_SUBRELATIONS = 18; +uint256 constant BATCHED_RELATION_PARTIAL_LENGTH = 7; +uint256 constant NUMBER_OF_ENTITIES = 42; +uint256 constant NUMBER_OF_ALPHAS = 17; + +// Prime field order +uint256 constant Q = 21888242871839275222246405745257275088696311157297823662689037894645226208583; // EC group order +uint256 constant P = 21888242871839275222246405745257275088548364400416034343698204186575808495617; // Prime field order + +// ENUM FOR WIRES +enum WIRE { + Q_M, + Q_C, + Q_L, + Q_R, + Q_O, + Q_4, + Q_ARITH, + Q_RANGE, + Q_ELLIPTIC, + Q_AUX, + Q_LOOKUP, + SIGMA_1, + SIGMA_2, + SIGMA_3, + SIGMA_4, + ID_1, + ID_2, + ID_3, + ID_4, + TABLE_1, + TABLE_2, + TABLE_3, + TABLE_4, + LAGRANGE_FIRST, + LAGRANGE_LAST, + W_L, + W_R, + W_O, + W_4, + Z_PERM, + LOOKUP_INVERSES, + LOOKUP_READ_COUNTS, + LOOKUP_READ_TAGS, + TABLE_1_SHIFT, + TABLE_2_SHIFT, + TABLE_3_SHIFT, + TABLE_4_SHIFT, + W_L_SHIFT, + W_R_SHIFT, + W_O_SHIFT, + W_4_SHIFT, + Z_PERM_SHIFT +} + +library Honk { + struct G1Point { + uint256 x; + uint256 y; + } + + struct G1ProofPoint { + uint256 x_0; + uint256 x_1; + uint256 y_0; + uint256 y_1; + } + + struct VerificationKey { + // Misc Params + uint256 circuitSize; + uint256 logCircuitSize; + uint256 publicInputsSize; + // Selectors + G1Point qm; + G1Point qc; + G1Point ql; + G1Point qr; + G1Point qo; + G1Point q4; + G1Point qArith; // Arithmetic widget + G1Point qDeltaRange; // Delta Range sort + G1Point qAux; // Auxillary + G1Point qElliptic; // Auxillary + G1Point qLookup; // Lookup + // Copy cnstraints + G1Point s1; + G1Point s2; + G1Point s3; + G1Point s4; + // Copy identity + G1Point id1; + G1Point id2; + G1Point id3; + G1Point id4; + // Precomputed lookup table + G1Point t1; + G1Point t2; + G1Point t3; + G1Point t4; + // Fixed first and last + G1Point lagrangeFirst; + G1Point lagrangeLast; + } + + struct Proof { + uint256 circuitSize; + uint256 publicInputsSize; + uint256 publicInputsOffset; + // Free wires + Honk.G1ProofPoint w1; + Honk.G1ProofPoint w2; + Honk.G1ProofPoint w3; + Honk.G1ProofPoint w4; + // Lookup helpers - Permutations + Honk.G1ProofPoint zPerm; + // Lookup helpers - logup + Honk.G1ProofPoint lookupReadCounts; + Honk.G1ProofPoint lookupReadTags; + Honk.G1ProofPoint lookupInverses; + // Sumcheck + Fr[BATCHED_RELATION_PARTIAL_LENGTH][CONST_PROOF_SIZE_LOG_N] sumcheckUnivariates; + Fr[NUMBER_OF_ENTITIES] sumcheckEvaluations; + // Zero morph + Honk.G1ProofPoint[CONST_PROOF_SIZE_LOG_N] zmCqs; + Honk.G1ProofPoint zmCq; + Honk.G1ProofPoint zmPi; + } +} + + +// Transcript library to generate fiat shamir challenges +struct Transcript { + Fr eta; + Fr etaTwo; + Fr etaThree; + Fr beta; + Fr gamma; + Fr[NUMBER_OF_ALPHAS] alphas; + Fr[LOG_N] gateChallenges; + Fr[CONST_PROOF_SIZE_LOG_N] sumCheckUChallenges; + Fr rho; + // Zero morph + Fr zmX; + Fr zmY; + Fr zmZ; + Fr zmQuotient; + // Derived + Fr publicInputsDelta; + Fr lookupGrandProductDelta; +} + +library TranscriptLib +{ + function generateTranscript(Honk.Proof memory proof, + Honk.VerificationKey memory vk, + bytes32[] calldata publicInputs) internal view returns(Transcript memory t) + { + (t.eta, t.etaTwo, t.etaThree) = generateEtaChallenge(proof, publicInputs); + + (t.beta, t.gamma) = generateBetaAndGammaChallenges(t.etaThree, proof); + + t.alphas = generateAlphaChallenges(t.gamma, proof); + + t.gateChallenges = generateGateChallenges(t.alphas[NUMBER_OF_ALPHAS - 1]); + + t.sumCheckUChallenges = generateSumcheckChallenges(proof, t.gateChallenges[LOG_N - 1]); + t.rho = generateRhoChallenge(proof, t.sumCheckUChallenges[CONST_PROOF_SIZE_LOG_N - 1]); + + t.zmY = generateZMYChallenge(t.rho, proof); + + (t.zmX, t.zmZ) = generateZMXZChallenges(t.zmY, proof); + + return t; + } + + function generateEtaChallenge(Honk.Proof memory proof, bytes32[] calldata publicInputs) + internal view returns(Fr eta, Fr etaTwo, Fr etaThree) + { + bytes32[3 + NUMBER_OF_PUBLIC_INPUTS + 12] memory round0; + round0[0] = bytes32(proof.circuitSize); + round0[1] = bytes32(proof.publicInputsSize); + round0[2] = bytes32(proof.publicInputsOffset); + for (uint256 i = 0; i < NUMBER_OF_PUBLIC_INPUTS; i++) { + round0[3 + i] = bytes32(publicInputs[i]); + } + + // Create the first challenge + // Note: w4 is added to the challenge later on + round0[3 + NUMBER_OF_PUBLIC_INPUTS] = bytes32(proof.w1.x_0); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 1] = bytes32(proof.w1.x_1); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 2] = bytes32(proof.w1.y_0); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 3] = bytes32(proof.w1.y_1); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 4] = bytes32(proof.w2.x_0); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 5] = bytes32(proof.w2.x_1); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 6] = bytes32(proof.w2.y_0); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 7] = bytes32(proof.w2.y_1); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 8] = bytes32(proof.w3.x_0); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 9] = bytes32(proof.w3.x_1); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 10] = bytes32(proof.w3.y_0); + round0[3 + NUMBER_OF_PUBLIC_INPUTS + 11] = bytes32(proof.w3.y_1); + + eta = FrLib.fromBytes32(keccak256(abi.encodePacked(round0))); + etaTwo = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(eta)))); + etaThree = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(etaTwo)))); + } + + function generateBetaAndGammaChallenges(Fr previousChallenge, Honk.Proof memory proof) + internal view returns(Fr beta, Fr gamma) + { + bytes32[13] memory round1; + round1[0] = FrLib.toBytes32(previousChallenge); + round1[1] = bytes32(proof.lookupReadCounts.x_0); + round1[2] = bytes32(proof.lookupReadCounts.x_1); + round1[3] = bytes32(proof.lookupReadCounts.y_0); + round1[4] = bytes32(proof.lookupReadCounts.y_1); + round1[5] = bytes32(proof.lookupReadTags.x_0); + round1[6] = bytes32(proof.lookupReadTags.x_1); + round1[7] = bytes32(proof.lookupReadTags.y_0); + round1[8] = bytes32(proof.lookupReadTags.y_1); + round1[9] = bytes32(proof.w4.x_0); + round1[10] = bytes32(proof.w4.x_1); + round1[11] = bytes32(proof.w4.y_0); + round1[12] = bytes32(proof.w4.y_1); + + beta = FrLib.fromBytes32(keccak256(abi.encodePacked(round1))); + gamma = FrLib.fromBytes32(keccak256(abi.encodePacked(beta))); + } + + // Alpha challenges non-linearise the gate contributions + function generateAlphaChallenges(Fr previousChallenge, Honk.Proof memory proof) + internal view returns(Fr[NUMBER_OF_ALPHAS] memory alphas) + { + // Generate the original sumcheck alpha 0 by hashing zPerm and zLookup + uint256[9] memory alpha0; + alpha0[0] = Fr.unwrap(previousChallenge); + alpha0[1] = proof.lookupInverses.x_0; + alpha0[2] = proof.lookupInverses.x_1; + alpha0[3] = proof.lookupInverses.y_0; + alpha0[4] = proof.lookupInverses.y_1; + alpha0[5] = proof.zPerm.x_0; + alpha0[6] = proof.zPerm.x_1; + alpha0[7] = proof.zPerm.y_0; + alpha0[8] = proof.zPerm.y_1; + + alphas[0] = FrLib.fromBytes32(keccak256(abi.encodePacked(alpha0))); + + Fr prevChallenge = alphas[0]; + for (uint256 i = 1; i < NUMBER_OF_ALPHAS; i++) { + prevChallenge = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(prevChallenge)))); + alphas[i] = prevChallenge; + } + } + + function generateGateChallenges(Fr previousChallenge) internal view returns(Fr[LOG_N] memory gateChallenges) + { + for (uint256 i = 0; i < LOG_N; i++) { + previousChallenge = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(previousChallenge)))); + gateChallenges[i] = previousChallenge; + } + } + + function generateSumcheckChallenges(Honk.Proof memory proof, Fr prevChallenge) + internal view returns(Fr[CONST_PROOF_SIZE_LOG_N] memory sumcheckChallenges) + { + for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; i++) { + Fr[BATCHED_RELATION_PARTIAL_LENGTH + 1] memory univariateChal; + univariateChal[0] = prevChallenge; + + for (uint256 j = 0; j < BATCHED_RELATION_PARTIAL_LENGTH; j++) { + univariateChal[j + 1] = proof.sumcheckUnivariates[i][j]; + } + + sumcheckChallenges[i] = FrLib.fromBytes32(keccak256(abi.encodePacked(univariateChal))); + prevChallenge = sumcheckChallenges[i]; + } + } + + function generateRhoChallenge(Honk.Proof memory proof, Fr prevChallenge) internal view returns(Fr rho) + { + Fr[NUMBER_OF_ENTITIES + 1] memory rhoChallengeElements; + rhoChallengeElements[0] = prevChallenge; + + for (uint256 i = 0; i < NUMBER_OF_ENTITIES; i++) { + rhoChallengeElements[i + 1] = proof.sumcheckEvaluations[i]; + } + + rho = FrLib.fromBytes32(keccak256(abi.encodePacked(rhoChallengeElements))); + } + + function generateZMYChallenge(Fr previousChallenge, Honk.Proof memory proof) internal view returns(Fr zeromorphY) + { + uint256[CONST_PROOF_SIZE_LOG_N * 4 + 1] memory zmY; + zmY[0] = Fr.unwrap(previousChallenge); + + for (uint256 i; i < CONST_PROOF_SIZE_LOG_N; ++i) { + zmY[1 + i * 4] = proof.zmCqs[i].x_0; + zmY[2 + i * 4] = proof.zmCqs[i].x_1; + zmY[3 + i * 4] = proof.zmCqs[i].y_0; + zmY[4 + i * 4] = proof.zmCqs[i].y_1; + } + + zeromorphY = FrLib.fromBytes32(keccak256(abi.encodePacked(zmY))); + } + + function generateZMXZChallenges(Fr previousChallenge, Honk.Proof memory proof) + internal view returns(Fr zeromorphX, Fr zeromorphZ) + { + uint256[4 + 1] memory buf; + buf[0] = Fr.unwrap(previousChallenge); + + buf[1] = proof.zmCq.x_0; + buf[2] = proof.zmCq.x_1; + buf[3] = proof.zmCq.y_0; + buf[4] = proof.zmCq.y_1; + + zeromorphX = FrLib.fromBytes32(keccak256(abi.encodePacked(buf))); + zeromorphZ = FrLib.fromBytes32(keccak256(abi.encodePacked(zeromorphX))); + } +} + +// EC Point utilities +function convertProofPoint(Honk.G1ProofPoint memory input) pure returns (Honk.G1Point memory) { + return Honk.G1Point({x: input.x_0 | (input.x_1 << 136), y: input.y_0 | (input.y_1 << 136)}); +} + +function ecMul(Honk.G1Point memory point, Fr scalar) view returns (Honk.G1Point memory) { + bytes memory input = abi.encodePacked(point.x, point.y, Fr.unwrap(scalar)); + (bool success, bytes memory result) = address(0x07).staticcall(input); + require(success, "ecMul failed"); + + (uint256 x, uint256 y) = abi.decode(result, (uint256, uint256)); + return Honk.G1Point({x: x, y: y}); +} + +function ecAdd(Honk.G1Point memory point0, Honk.G1Point memory point1) view returns (Honk.G1Point memory) { + bytes memory input = abi.encodePacked(point0.x, point0.y, point1.x, point1.y); + (bool success, bytes memory result) = address(0x06).staticcall(input); + require(success, "ecAdd failed"); + + (uint256 x, uint256 y) = abi.decode(result, (uint256, uint256)); + return Honk.G1Point({x: x, y: y}); +} + +function ecSub(Honk.G1Point memory point0, Honk.G1Point memory point1) view returns (Honk.G1Point memory) { + // We negate the second point + uint256 negativePoint1Y = (Q - point1.y) % Q; + bytes memory input = abi.encodePacked(point0.x, point0.y, point1.x, negativePoint1Y); + (bool success, bytes memory result) = address(0x06).staticcall(input); + require(success, "ecAdd failed"); + + (uint256 x, uint256 y) = abi.decode(result, (uint256, uint256)); + return Honk.G1Point({x: x, y: y}); +} + +function negateInplace(Honk.G1Point memory point) pure returns (Honk.G1Point memory) { + point.y = (Q - point.y) % Q; + return point; +} + +// Errors +error PublicInputsLengthWrong(); +error SumcheckFailed(); +error ZeromorphFailed(); + +interface IVerifier { + function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool); +} + +/// Smart contract verifier of honk proofs +contract HonkVerifier is IVerifier +{ + Fr internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = Fr.wrap(17); // -(-17) + + function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns(bool) + { + Honk.VerificationKey memory vk = loadVerificationKey(); + Honk.Proof memory p = loadProof(proof); + + if (publicInputs.length != vk.publicInputsSize) { + revert PublicInputsLengthWrong(); + } + + // Generate the fiat shamir challenges for the whole protocol + Transcript memory t = TranscriptLib.generateTranscript(p, vk, publicInputs); + + // Compute the public input delta + t.publicInputsDelta = + computePublicInputDelta(publicInputs, t.beta, t.gamma, vk.circuitSize, p.publicInputsOffset); + + // Sumcheck + bool sumcheckVerified = verifySumcheck(p, t); + if (!sumcheckVerified) + revert SumcheckFailed(); + + // Zeromorph + bool zeromorphVerified = verifyZeroMorph(p, vk, t); + if (!zeromorphVerified) + revert ZeromorphFailed(); + + return sumcheckVerified && zeromorphVerified; // Boolean condition not required - nice for vanity :) + } + + function loadVerificationKey() internal view returns(Honk.VerificationKey memory) + { + return HonkVerificationKey.loadVerificationKey(); + } + + function loadProof(bytes calldata proof) internal view returns(Honk.Proof memory) + { + Honk.Proof memory p; + + // Metadata + p.circuitSize = uint256(bytes32(proof [0x00:0x20])); + p.publicInputsSize = uint256(bytes32(proof [0x20:0x40])); + p.publicInputsOffset = uint256(bytes32(proof [0x40:0x60])); + + // Commitments + p.w1 = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x60:0x80])), + x_1 : uint256(bytes32(proof [0x80:0xa0])), + y_0 : uint256(bytes32(proof [0xa0:0xc0])), + y_1 : uint256(bytes32(proof [0xc0:0xe0])) + }); + + p.w2 = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0xe0:0x100])), + x_1 : uint256(bytes32(proof [0x100:0x120])), + y_0 : uint256(bytes32(proof [0x120:0x140])), + y_1 : uint256(bytes32(proof [0x140:0x160])) + }); + p.w3 = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x160:0x180])), + x_1 : uint256(bytes32(proof [0x180:0x1a0])), + y_0 : uint256(bytes32(proof [0x1a0:0x1c0])), + y_1 : uint256(bytes32(proof [0x1c0:0x1e0])) + }); + + // Lookup / Permutation Helper Commitments + p.lookupReadCounts = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x1e0:0x200])), + x_1 : uint256(bytes32(proof [0x200:0x220])), + y_0 : uint256(bytes32(proof [0x220:0x240])), + y_1 : uint256(bytes32(proof [0x240:0x260])) + }); + p.lookupReadTags = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x260:0x280])), + x_1 : uint256(bytes32(proof [0x280:0x2a0])), + y_0 : uint256(bytes32(proof [0x2a0:0x2c0])), + y_1 : uint256(bytes32(proof [0x2c0:0x2e0])) + }); + p.w4 = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x2e0:0x300])), + x_1 : uint256(bytes32(proof [0x300:0x320])), + y_0 : uint256(bytes32(proof [0x320:0x340])), + y_1 : uint256(bytes32(proof [0x340:0x360])) + }); + p.lookupInverses = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x360:0x380])), + x_1 : uint256(bytes32(proof [0x380:0x3a0])), + y_0 : uint256(bytes32(proof [0x3a0:0x3c0])), + y_1 : uint256(bytes32(proof [0x3c0:0x3e0])) + }); + p.zPerm = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [0x3e0:0x400])), + x_1 : uint256(bytes32(proof [0x400:0x420])), + y_0 : uint256(bytes32(proof [0x420:0x440])), + y_1 : uint256(bytes32(proof [0x440:0x460])) + }); + + // TEMP the boundary of what has already been read + uint256 boundary = 0x460; + + // Sumcheck univariates + for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; i++) { + // The loop boundary of i, this will shift forward on each evaluation + uint256 loop_boundary = boundary + (i * 0x20 * BATCHED_RELATION_PARTIAL_LENGTH); + + for (uint256 j = 0; j < BATCHED_RELATION_PARTIAL_LENGTH; j++) { + uint256 start = loop_boundary + (j * 0x20); + uint256 end = start + 0x20; + p.sumcheckUnivariates[i][j] = FrLib.fromBytes32(bytes32(proof [start:end])); + } + } + + boundary = boundary + (CONST_PROOF_SIZE_LOG_N * BATCHED_RELATION_PARTIAL_LENGTH * 0x20); + // Sumcheck evaluations + for (uint256 i = 0; i < NUMBER_OF_ENTITIES; i++) { + uint256 start = boundary + (i * 0x20); + uint256 end = start + 0x20; + p.sumcheckEvaluations[i] = FrLib.fromBytes32(bytes32(proof [start:end])); + } + + boundary = boundary + (NUMBER_OF_ENTITIES * 0x20); + // Zero morph Commitments + for (uint256 i = 0; i < CONST_PROOF_SIZE_LOG_N; i++) { + // Explicitly stating the x0, x1, y0, y1 start and end boundaries to make the calldata slicing bearable + uint256 xStart = boundary + (i * 0x80); + uint256 xEnd = xStart + 0x20; + + uint256 x1Start = xEnd; + uint256 x1End = x1Start + 0x20; + + uint256 yStart = x1End; + uint256 yEnd = yStart + 0x20; + + uint256 y1Start = yEnd; + uint256 y1End = y1Start + 0x20; + + p.zmCqs[i] = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [xStart:xEnd])), + x_1 : uint256(bytes32(proof [x1Start:x1End])), + y_0 : uint256(bytes32(proof [yStart:yEnd])), + y_1 : uint256(bytes32(proof [y1Start:y1End])) + }); + } + + boundary = boundary + (CONST_PROOF_SIZE_LOG_N * 0x80); + + p.zmCq = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [boundary:boundary + 0x20])), + x_1 : uint256(bytes32(proof [boundary + 0x20:boundary + 0x40])), + y_0 : uint256(bytes32(proof [boundary + 0x40:boundary + 0x60])), + y_1 : uint256(bytes32(proof [boundary + 0x60:boundary + 0x80])) + }); + + p.zmPi = Honk.G1ProofPoint({ + x_0 : uint256(bytes32(proof [boundary + 0x80:boundary + 0xa0])), + x_1 : uint256(bytes32(proof [boundary + 0xa0:boundary + 0xc0])), + y_0 : uint256(bytes32(proof [boundary + 0xc0:boundary + 0xe0])), + y_1 : uint256(bytes32(proof [boundary + 0xe0:boundary + 0x100])) + }); + + return p; + } + + function computePublicInputDelta( + bytes32[] memory publicInputs, Fr beta, Fr gamma, uint256 domainSize, uint256 offset) + internal view returns(Fr publicInputDelta) + { + Fr numerator = Fr.wrap(1); + Fr denominator = Fr.wrap(1); + + Fr numeratorAcc = gamma + (beta * FrLib.from(domainSize + offset)); + Fr denominatorAcc = gamma - (beta * FrLib.from(offset + 1)); + + { + for (uint256 i = 0; i < NUMBER_OF_PUBLIC_INPUTS; i++) { + Fr pubInput = FrLib.fromBytes32(publicInputs[i]); + + numerator = numerator * (numeratorAcc + pubInput); + denominator = denominator * (denominatorAcc + pubInput); + + numeratorAcc = numeratorAcc + beta; + denominatorAcc = denominatorAcc - beta; + } + } + + // Fr delta = numerator / denominator; // TOOO: batch invert later? + publicInputDelta = FrLib.div(numerator, denominator); + } + + uint256 constant ROUND_TARGET = 0; + + function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns(bool verified) + { + Fr roundTarget; + Fr powPartialEvaluation = Fr.wrap(1); + + // We perform sumcheck reductions over log n rounds ( the multivariate degree ) + for (uint256 round; round < LOG_N; ++round) { + Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate = proof.sumcheckUnivariates[round]; + bool valid = checkSum(roundUnivariate, roundTarget); + if (!valid) + revert SumcheckFailed(); + + Fr roundChallenge = tp.sumCheckUChallenges[round]; + + // Update the round target for the next rounf + roundTarget = computeNextTargetSum(roundUnivariate, roundChallenge); + powPartialEvaluation = partiallyEvaluatePOW(tp, powPartialEvaluation, roundChallenge, round); + } + + // Last round + Fr grandHonkRelationSum = accumulateRelationEvaluations(proof, tp, powPartialEvaluation); + verified = (grandHonkRelationSum == roundTarget); + } + + function checkSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariate, Fr roundTarget) + internal view returns(bool checked) + { + Fr totalSum = roundUnivariate[0] + roundUnivariate[1]; + checked = totalSum == roundTarget; + } + + // Return the new target sum for the next sumcheck round + function computeNextTargetSum(Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory roundUnivariates, Fr roundChallenge) + internal view returns(Fr targetSum) + { + Fr[7] memory BARYCENTRIC_LAGRANGE_DENOMINATORS = [ + Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0), + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff89), + Fr.wrap(0x0000000000000000000000000000000000000000000000000000000000000030), + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffffdd), + Fr.wrap(0x0000000000000000000000000000000000000000000000000000000000000030), + Fr.wrap(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593efffff89), + Fr.wrap(0x00000000000000000000000000000000000000000000000000000000000002d0) + ]; + + Fr[7] memory BARYCENTRIC_DOMAIN = + [ Fr.wrap(0x00), Fr.wrap(0x01), Fr.wrap(0x02), Fr.wrap(0x03), Fr.wrap(0x04), Fr.wrap(0x05), Fr.wrap(0x06) ]; + // To compute the next target sum, we evaluate the given univariate at a point u (challenge). + + // Performing Barycentric evaluations + // Compute B(x) + Fr numeratorValue = Fr.wrap(1); + for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { + numeratorValue = numeratorValue * (roundChallenge - Fr.wrap(i)); + } + + // Calculate domain size N of inverses + Fr[BATCHED_RELATION_PARTIAL_LENGTH] memory denominatorInverses; + for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { + Fr inv = BARYCENTRIC_LAGRANGE_DENOMINATORS[i]; + inv = inv * (roundChallenge - BARYCENTRIC_DOMAIN[i]); + inv = FrLib.invert(inv); + denominatorInverses[i] = inv; + } + + for (uint256 i; i < BATCHED_RELATION_PARTIAL_LENGTH; ++i) { + Fr term = roundUnivariates[i]; + term = term * denominatorInverses[i]; + targetSum = targetSum + term; + } + + // Scale the sum by the value of B(x) + targetSum = targetSum * numeratorValue; + } + + // Univariate evaluation of the monomial ((1-X_l) + X_l.B_l) at the challenge point X_l=u_l + function partiallyEvaluatePOW(Transcript memory tp, Fr currentEvaluation, Fr roundChallenge, uint256 round) + internal view returns(Fr newEvaluation) + { + Fr univariateEval = Fr.wrap(1) + (roundChallenge * (tp.gateChallenges[round] - Fr.wrap(1))); + newEvaluation = currentEvaluation * univariateEval; + } + + // Calculate the contributions of each relation to the expected value of the full honk relation + // + // For each relation, we use the purported values ( the ones provided by the prover ) of the multivariates to + // calculate a contribution to the purported value of the full Honk relation. + // These are stored in the evaluations part of the proof object. + // We add these together, with the appropiate scaling factor ( the alphas calculated in challenges ) + // This value is checked against the final value of the target total sum - et voila! + function accumulateRelationEvaluations(Honk.Proof memory proof, Transcript memory tp, Fr powPartialEval) + internal view returns(Fr accumulator) + { + Fr[NUMBER_OF_ENTITIES] memory purportedEvaluations = proof.sumcheckEvaluations; + Fr[NUMBER_OF_SUBRELATIONS] memory evaluations; + + // Accumulate all 6 custom gates - each with varying number of subrelations + accumulateArithmeticRelation(purportedEvaluations, evaluations, powPartialEval); + accumulatePermutationRelation(purportedEvaluations, tp, evaluations, powPartialEval); + accumulateLogDerivativeLookupRelation(purportedEvaluations, tp, evaluations, powPartialEval); + accumulateDeltaRangeRelation(purportedEvaluations, evaluations, powPartialEval); + accumulateEllipticRelation(purportedEvaluations, evaluations, powPartialEval); + accumulateAuxillaryRelation(purportedEvaluations, tp, evaluations, powPartialEval); + + // Apply alpha challenges to challenge evaluations + // Returns grand honk realtion evaluation + accumulator = scaleAndBatchSubrelations(evaluations, tp.alphas); + } + + /** + * WIRE + * + * Wire is an aesthetic helper function that is used to index by enum into proof.sumcheckEvaluations, it avoids + * the relation checking code being cluttered with uint256 type casting, which is often a different colour in code + * editors, and thus is noisy. + */ + function wire(Fr[NUMBER_OF_ENTITIES] memory p, WIRE _wire) internal pure returns(Fr) + { + return p[uint256(_wire)]; + } + + /** + * Ultra Arithmetic Relation + * + */ + function accumulateArithmeticRelation( + Fr[NUMBER_OF_ENTITIES] memory p, Fr[NUMBER_OF_SUBRELATIONS] memory evals, Fr domainSep) internal view + { + // Relation 0 + Fr q_arith = wire(p, WIRE.Q_ARITH); + { + Fr neg_half = Fr.wrap(0) - (FrLib.invert(Fr.wrap(2))); + + Fr accum = (q_arith - Fr.wrap(3)) * (wire(p, WIRE.Q_M) * wire(p, WIRE.W_R) * wire(p, WIRE.W_L)) * neg_half; + accum = accum + (wire(p, WIRE.Q_L) * wire(p, WIRE.W_L)) + (wire(p, WIRE.Q_R) * wire(p, WIRE.W_R)) + + (wire(p, WIRE.Q_O) * wire(p, WIRE.W_O)) + (wire(p, WIRE.Q_4) * wire(p, WIRE.W_4)) + + wire(p, WIRE.Q_C); + accum = accum + (q_arith - Fr.wrap(1)) * wire(p, WIRE.W_4_SHIFT); + accum = accum * q_arith; + accum = accum * domainSep; + evals[0] = accum; + } + + // Relation 1 + { + Fr accum = wire(p, WIRE.W_L) + wire(p, WIRE.W_4) - wire(p, WIRE.W_L_SHIFT) + wire(p, WIRE.Q_M); + accum = accum * (q_arith - Fr.wrap(2)); + accum = accum * (q_arith - Fr.wrap(1)); + accum = accum * q_arith; + accum = accum * domainSep; + evals[1] = accum; + } + } + + function accumulatePermutationRelation( + Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, Fr[NUMBER_OF_SUBRELATIONS] memory evals, Fr domainSep) + internal view + { + Fr grand_product_numerator; + Fr grand_product_denominator; + + { + Fr num = wire(p, WIRE.W_L) + wire(p, WIRE.ID_1) * tp.beta + tp.gamma; + num = num * (wire(p, WIRE.W_R) + wire(p, WIRE.ID_2) * tp.beta + tp.gamma); + num = num * (wire(p, WIRE.W_O) + wire(p, WIRE.ID_3) * tp.beta + tp.gamma); + num = num * (wire(p, WIRE.W_4) + wire(p, WIRE.ID_4) * tp.beta + tp.gamma); + + grand_product_numerator = num; + } + { + Fr den = wire(p, WIRE.W_L) + wire(p, WIRE.SIGMA_1) * tp.beta + tp.gamma; + den = den * (wire(p, WIRE.W_R) + wire(p, WIRE.SIGMA_2) * tp.beta + tp.gamma); + den = den * (wire(p, WIRE.W_O) + wire(p, WIRE.SIGMA_3) * tp.beta + tp.gamma); + den = den * (wire(p, WIRE.W_4) + wire(p, WIRE.SIGMA_4) * tp.beta + tp.gamma); + + grand_product_denominator = den; + } + + // Contribution 2 + { + Fr acc = (wire(p, WIRE.Z_PERM) + wire(p, WIRE.LAGRANGE_FIRST)) * grand_product_numerator; + + acc = acc - ((wire(p, WIRE.Z_PERM_SHIFT) + (wire(p, WIRE.LAGRANGE_LAST) * tp.publicInputsDelta)) * + grand_product_denominator); + acc = acc * domainSep; + evals[2] = acc; + } + + // Contribution 3 + { + Fr acc = (wire(p, WIRE.LAGRANGE_LAST) * wire(p, WIRE.Z_PERM_SHIFT)) * domainSep; + evals[3] = acc; + } + } + + function accumulateLogDerivativeLookupRelation( + Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, Fr[NUMBER_OF_SUBRELATIONS] memory evals, Fr domainSep) + internal view + { + Fr write_term; + Fr read_term; + + // Calculate the write term (the table accumulation) + { + write_term = wire(p, WIRE.TABLE_1) + tp.gamma + (wire(p, WIRE.TABLE_2) * tp.eta) + + (wire(p, WIRE.TABLE_3) * tp.etaTwo) + (wire(p, WIRE.TABLE_4) * tp.etaThree); + } + + // Calculate the write term + { + Fr derived_entry_1 = wire(p, WIRE.W_L) + tp.gamma + (wire(p, WIRE.Q_R) * wire(p, WIRE.W_L_SHIFT)); + Fr derived_entry_2 = wire(p, WIRE.W_R) + wire(p, WIRE.Q_M) * wire(p, WIRE.W_R_SHIFT); + Fr derived_entry_3 = wire(p, WIRE.W_O) + wire(p, WIRE.Q_C) * wire(p, WIRE.W_O_SHIFT); + + read_term = derived_entry_1 + (derived_entry_2 * tp.eta) + (derived_entry_3 * tp.etaTwo) + + (wire(p, WIRE.Q_O) * tp.etaThree); + } + + Fr read_inverse = wire(p, WIRE.LOOKUP_INVERSES) * write_term; + Fr write_inverse = wire(p, WIRE.LOOKUP_INVERSES) * read_term; + + Fr inverse_exists_xor = wire(p, WIRE.LOOKUP_READ_TAGS) + wire(p, WIRE.Q_LOOKUP) - + (wire(p, WIRE.LOOKUP_READ_TAGS) * wire(p, WIRE.Q_LOOKUP)); + + // Inverse calculated correctly relation + Fr accumulatorNone = read_term * write_term * wire(p, WIRE.LOOKUP_INVERSES) - inverse_exists_xor; + accumulatorNone = accumulatorNone * domainSep; + + // Inverse + Fr accumulatorOne = wire(p, WIRE.Q_LOOKUP) * read_inverse - wire(p, WIRE.LOOKUP_READ_COUNTS) * write_inverse; + + evals[4] = accumulatorNone; + evals[5] = accumulatorOne; + } + + function accumulateDeltaRangeRelation( + Fr[NUMBER_OF_ENTITIES] memory p, Fr[NUMBER_OF_SUBRELATIONS] memory evals, Fr domainSep) internal view + { + Fr minus_one = Fr.wrap(0) - Fr.wrap(1); + Fr minus_two = Fr.wrap(0) - Fr.wrap(2); + Fr minus_three = Fr.wrap(0) - Fr.wrap(3); + + // Compute wire differences + Fr delta_1 = wire(p, WIRE.W_R) - wire(p, WIRE.W_L); + Fr delta_2 = wire(p, WIRE.W_O) - wire(p, WIRE.W_R); + Fr delta_3 = wire(p, WIRE.W_4) - wire(p, WIRE.W_O); + Fr delta_4 = wire(p, WIRE.W_L_SHIFT) - wire(p, WIRE.W_4); + + // Contribution 6 + { + Fr acc = delta_1; + acc = acc * (delta_1 + minus_one); + acc = acc * (delta_1 + minus_two); + acc = acc * (delta_1 + minus_three); + acc = acc * wire(p, WIRE.Q_RANGE); + acc = acc * domainSep; + evals[6] = acc; + } + + // Contribution 7 + { + Fr acc = delta_2; + acc = acc * (delta_2 + minus_one); + acc = acc * (delta_2 + minus_two); + acc = acc * (delta_2 + minus_three); + acc = acc * wire(p, WIRE.Q_RANGE); + acc = acc * domainSep; + evals[7] = acc; + } + + // Contribution 8 + { + Fr acc = delta_3; + acc = acc * (delta_3 + minus_one); + acc = acc * (delta_3 + minus_two); + acc = acc * (delta_3 + minus_three); + acc = acc * wire(p, WIRE.Q_RANGE); + acc = acc * domainSep; + evals[8] = acc; + } + + // Contribution 9 + { + Fr acc = delta_4; + acc = acc * (delta_4 + minus_one); + acc = acc * (delta_4 + minus_two); + acc = acc * (delta_4 + minus_three); + acc = acc * wire(p, WIRE.Q_RANGE); + acc = acc * domainSep; + evals[9] = acc; + } + } + + struct EllipticParams { + // Points + Fr x_1; + Fr y_1; + Fr x_2; + Fr y_2; + Fr y_3; + Fr x_3; + // push accumulators into memory + Fr x_double_identity; + } + + function + accumulateEllipticRelation(Fr[NUMBER_OF_ENTITIES] memory p, Fr[NUMBER_OF_SUBRELATIONS] memory evals, Fr domainSep) + internal view + { + EllipticParams memory ep; + ep.x_1 = wire(p, WIRE.W_R); + ep.y_1 = wire(p, WIRE.W_O); + + ep.x_2 = wire(p, WIRE.W_L_SHIFT); + ep.y_2 = wire(p, WIRE.W_4_SHIFT); + ep.y_3 = wire(p, WIRE.W_O_SHIFT); + ep.x_3 = wire(p, WIRE.W_R_SHIFT); + + Fr q_sign = wire(p, WIRE.Q_L); + Fr q_is_double = wire(p, WIRE.Q_M); + + // Contribution 10 point addition, x-coordinate check + // q_elliptic * (x3 + x2 + x1)(x2 - x1)(x2 - x1) - y2^2 - y1^2 + 2(y2y1)*q_sign = 0 + Fr x_diff = (ep.x_2 - ep.x_1); + Fr y1_sqr = (ep.y_1 * ep.y_1); + { + // Move to top + Fr partialEval = domainSep; + + Fr y2_sqr = (ep.y_2 * ep.y_2); + Fr y1y2 = ep.y_1 * ep.y_2 * q_sign; + Fr x_add_identity = (ep.x_3 + ep.x_2 + ep.x_1); + x_add_identity = x_add_identity * x_diff * x_diff; + x_add_identity = x_add_identity - y2_sqr - y1_sqr + y1y2 + y1y2; + + evals[10] = x_add_identity * partialEval * wire(p, WIRE.Q_ELLIPTIC) * (Fr.wrap(1) - q_is_double); + } + + // Contribution 11 point addition, x-coordinate check + // q_elliptic * (q_sign * y1 + y3)(x2 - x1) + (x3 - x1)(y2 - q_sign * y1) = 0 + { + Fr y1_plus_y3 = ep.y_1 + ep.y_3; + Fr y_diff = ep.y_2 * q_sign - ep.y_1; + Fr y_add_identity = y1_plus_y3 * x_diff + (ep.x_3 - ep.x_1) * y_diff; + evals[11] = y_add_identity * domainSep * wire(p, WIRE.Q_ELLIPTIC) * (Fr.wrap(1) - q_is_double); + } + + // Contribution 10 point doubling, x-coordinate check + // (x3 + x1 + x1) (4y1*y1) - 9 * x1 * x1 * x1 * x1 = 0 + // N.B. we're using the equivalence x1*x1*x1 === y1*y1 - curve_b to reduce degree by 1 + { + Fr x_pow_4 = (y1_sqr + GRUMPKIN_CURVE_B_PARAMETER_NEGATED) * ep.x_1; + Fr y1_sqr_mul_4 = y1_sqr + y1_sqr; + y1_sqr_mul_4 = y1_sqr_mul_4 + y1_sqr_mul_4; + Fr x1_pow_4_mul_9 = x_pow_4 * Fr.wrap(9); + + // NOTE: pushed into memory (stack >:'( ) + ep.x_double_identity = (ep.x_3 + ep.x_1 + ep.x_1) * y1_sqr_mul_4 - x1_pow_4_mul_9; + + Fr acc = ep.x_double_identity * domainSep * wire(p, WIRE.Q_ELLIPTIC) * q_is_double; + evals[10] = evals[10] + acc; + } + + // Contribution 11 point doubling, y-coordinate check + // (y1 + y1) (2y1) - (3 * x1 * x1)(x1 - x3) = 0 + { + Fr x1_sqr_mul_3 = (ep.x_1 + ep.x_1 + ep.x_1) * ep.x_1; + Fr y_double_identity = x1_sqr_mul_3 * (ep.x_1 - ep.x_3) - (ep.y_1 + ep.y_1) * (ep.y_1 + ep.y_3); + evals[11] = evals[11] + y_double_identity * domainSep * wire(p, WIRE.Q_ELLIPTIC) * q_is_double; + } + } + + // Constants for the auxiliary relation + Fr constant LIMB_SIZE = Fr.wrap(uint256(1) << 68); + Fr constant SUBLIMB_SHIFT = Fr.wrap(uint256(1) << 14); + Fr constant MINUS_ONE = Fr.wrap(P - 1); + + // Parameters used within the Auxiliary Relation + // A struct is used to work around stack too deep. This relation has alot of variables + struct AuxParams { + Fr limb_subproduct; + Fr non_native_field_gate_1; + Fr non_native_field_gate_2; + Fr non_native_field_gate_3; + Fr limb_accumulator_1; + Fr limb_accumulator_2; + Fr memory_record_check; + Fr partial_record_check; + Fr next_gate_access_type; + Fr record_delta; + Fr index_delta; + Fr adjacent_values_match_if_adjacent_indices_match; + Fr adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation; + Fr access_check; + Fr next_gate_access_type_is_boolean; + Fr ROM_consistency_check_identity; + Fr RAM_consistency_check_identity; + Fr timestamp_delta; + Fr RAM_timestamp_check_identity; + Fr memory_identity; + Fr index_is_monotonically_increasing; + Fr auxiliary_identity; + } + + function + accumulateAuxillaryRelation( + Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, Fr[NUMBER_OF_SUBRELATIONS] memory evals, Fr domainSep) + internal pure + { + AuxParams memory ap; + + /** + * Contribution 12 + * Non native field arithmetic gate 2 + * deg 4 + * + * _ _ + * / _ _ _ 14 \ + * q_2 . q_4 | (w_1 . w_2) + (w_1 . w_2) + (w_1 . w_4 + w_2 . w_3 - w_3) . 2 - w_3 - w_4 | + * \_ _/ + * + * + */ + ap.limb_subproduct = wire(p, WIRE.W_L) * wire(p, WIRE.W_R_SHIFT) + wire(p, WIRE.W_L_SHIFT) * wire(p, WIRE.W_R); + ap.non_native_field_gate_2 = + (wire(p, WIRE.W_L) * wire(p, WIRE.W_4) + wire(p, WIRE.W_R) * wire(p, WIRE.W_O) - wire(p, WIRE.W_O_SHIFT)); + ap.non_native_field_gate_2 = ap.non_native_field_gate_2 * LIMB_SIZE; + ap.non_native_field_gate_2 = ap.non_native_field_gate_2 - wire(p, WIRE.W_4_SHIFT); + ap.non_native_field_gate_2 = ap.non_native_field_gate_2 + ap.limb_subproduct; + ap.non_native_field_gate_2 = ap.non_native_field_gate_2 * wire(p, WIRE.Q_4); + + ap.limb_subproduct = ap.limb_subproduct * LIMB_SIZE; + ap.limb_subproduct = ap.limb_subproduct + (wire(p, WIRE.W_L_SHIFT) * wire(p, WIRE.W_R_SHIFT)); + ap.non_native_field_gate_1 = ap.limb_subproduct; + ap.non_native_field_gate_1 = ap.non_native_field_gate_1 - (wire(p, WIRE.W_O) + wire(p, WIRE.W_4)); + ap.non_native_field_gate_1 = ap.non_native_field_gate_1 * wire(p, WIRE.Q_O); + + ap.non_native_field_gate_3 = ap.limb_subproduct; + ap.non_native_field_gate_3 = ap.non_native_field_gate_3 + wire(p, WIRE.W_4); + ap.non_native_field_gate_3 = ap.non_native_field_gate_3 - (wire(p, WIRE.W_O_SHIFT) + wire(p, WIRE.W_4_SHIFT)); + ap.non_native_field_gate_3 = ap.non_native_field_gate_3 * wire(p, WIRE.Q_M); + + Fr non_native_field_identity = + ap.non_native_field_gate_1 + ap.non_native_field_gate_2 + ap.non_native_field_gate_3; + non_native_field_identity = non_native_field_identity * wire(p, WIRE.Q_R); + + // ((((w2' * 2^14 + w1') * 2^14 + w3) * 2^14 + w2) * 2^14 + w1 - w4) * qm + // deg 2 + ap.limb_accumulator_1 = wire(p, WIRE.W_R_SHIFT) * SUBLIMB_SHIFT; + ap.limb_accumulator_1 = ap.limb_accumulator_1 + wire(p, WIRE.W_L_SHIFT); + ap.limb_accumulator_1 = ap.limb_accumulator_1 * SUBLIMB_SHIFT; + ap.limb_accumulator_1 = ap.limb_accumulator_1 + wire(p, WIRE.W_O); + ap.limb_accumulator_1 = ap.limb_accumulator_1 * SUBLIMB_SHIFT; + ap.limb_accumulator_1 = ap.limb_accumulator_1 + wire(p, WIRE.W_R); + ap.limb_accumulator_1 = ap.limb_accumulator_1 * SUBLIMB_SHIFT; + ap.limb_accumulator_1 = ap.limb_accumulator_1 + wire(p, WIRE.W_L); + ap.limb_accumulator_1 = ap.limb_accumulator_1 - wire(p, WIRE.W_4); + ap.limb_accumulator_1 = ap.limb_accumulator_1 * wire(p, WIRE.Q_4); + + // ((((w3' * 2^14 + w2') * 2^14 + w1') * 2^14 + w4) * 2^14 + w3 - w4') * qm + // deg 2 + ap.limb_accumulator_2 = wire(p, WIRE.W_O_SHIFT) * SUBLIMB_SHIFT; + ap.limb_accumulator_2 = ap.limb_accumulator_2 + wire(p, WIRE.W_R_SHIFT); + ap.limb_accumulator_2 = ap.limb_accumulator_2 * SUBLIMB_SHIFT; + ap.limb_accumulator_2 = ap.limb_accumulator_2 + wire(p, WIRE.W_L_SHIFT); + ap.limb_accumulator_2 = ap.limb_accumulator_2 * SUBLIMB_SHIFT; + ap.limb_accumulator_2 = ap.limb_accumulator_2 + wire(p, WIRE.W_4); + ap.limb_accumulator_2 = ap.limb_accumulator_2 * SUBLIMB_SHIFT; + ap.limb_accumulator_2 = ap.limb_accumulator_2 + wire(p, WIRE.W_O); + ap.limb_accumulator_2 = ap.limb_accumulator_2 - wire(p, WIRE.W_4_SHIFT); + ap.limb_accumulator_2 = ap.limb_accumulator_2 * wire(p, WIRE.Q_M); + + Fr limb_accumulator_identity = ap.limb_accumulator_1 + ap.limb_accumulator_2; + limb_accumulator_identity = limb_accumulator_identity * wire(p, WIRE.Q_O); // deg 3 + + /** + * MEMORY + * + * A RAM memory record contains a tuple of the following fields: + * * i: `index` of memory cell being accessed + * * t: `timestamp` of memory cell being accessed (used for RAM, set to 0 for ROM) + * * v: `value` of memory cell being accessed + * * a: `access` type of record. read: 0 = read, 1 = write + * * r: `record` of memory cell. record = access + index * eta + timestamp * eta_two + value * eta_three + * + * A ROM memory record contains a tuple of the following fields: + * * i: `index` of memory cell being accessed + * * v: `value1` of memory cell being accessed (ROM tables can store up to 2 values per index) + * * v2:`value2` of memory cell being accessed (ROM tables can store up to 2 values per index) + * * r: `record` of memory cell. record = index * eta + value2 * eta_two + value1 * eta_three + * + * When performing a read/write access, the values of i, t, v, v2, a, r are stored in the following wires + + * selectors, depending on whether the gate is a RAM read/write or a ROM read + * + * | gate type | i | v2/t | v | a | r | + * | --------- | -- | ----- | -- | -- | -- | + * | ROM | w1 | w2 | w3 | -- | w4 | + * | RAM | w1 | w2 | w3 | qc | w4 | + * + * (for accesses where `index` is a circuit constant, it is assumed the circuit will apply a copy constraint on + * `w2` to fix its value) + * + * + */ + + /** + * Memory Record Check + * Partial degree: 1 + * Total degree: 4 + * + * A ROM/ROM access gate can be evaluated with the identity: + * + * qc + w1 \eta + w2 \eta_two + w3 \eta_three - w4 = 0 + * + * For ROM gates, qc = 0 + */ + ap.memory_record_check = wire(p, WIRE.W_O) * tp.etaThree; + ap.memory_record_check = ap.memory_record_check + (wire(p, WIRE.W_R) * tp.etaTwo); + ap.memory_record_check = ap.memory_record_check + (wire(p, WIRE.W_L) * tp.eta); + ap.memory_record_check = ap.memory_record_check + wire(p, WIRE.Q_C); + ap.partial_record_check = ap.memory_record_check; // used in RAM consistency check; deg 1 or 4 + ap.memory_record_check = ap.memory_record_check - wire(p, WIRE.W_4); + + /** + * Contribution 13 & 14 + * ROM Consistency Check + * Partial degree: 1 + * Total degree: 4 + * + * For every ROM read, a set equivalence check is applied between the record witnesses, and a second set of + * records that are sorted. + * + * We apply the following checks for the sorted records: + * + * 1. w1, w2, w3 correctly map to 'index', 'v1, 'v2' for a given record value at w4 + * 2. index values for adjacent records are monotonically increasing + * 3. if, at gate i, index_i == index_{i + 1}, then value1_i == value1_{i + 1} and value2_i == value2_{i + 1} + * + */ + ap.index_delta = wire(p, WIRE.W_L_SHIFT) - wire(p, WIRE.W_L); + ap.record_delta = wire(p, WIRE.W_4_SHIFT) - wire(p, WIRE.W_4); + + ap.index_is_monotonically_increasing = ap.index_delta * ap.index_delta - ap.index_delta; // deg 2 + + ap.adjacent_values_match_if_adjacent_indices_match = + (ap.index_delta * MINUS_ONE + Fr.wrap(1)) * ap.record_delta; // deg 2 + + evals[13] = ap.adjacent_values_match_if_adjacent_indices_match * (wire(p, WIRE.Q_L) * wire(p, WIRE.Q_R)) * + (wire(p, WIRE.Q_AUX) * domainSep); // deg 5 + evals[14] = ap.index_is_monotonically_increasing * (wire(p, WIRE.Q_L) * wire(p, WIRE.Q_R)) * + (wire(p, WIRE.Q_AUX) * domainSep); // deg 5 + + ap.ROM_consistency_check_identity = + ap.memory_record_check * (wire(p, WIRE.Q_L) * wire(p, WIRE.Q_R)); // deg 3 or 7 + + /** + * Contributions 15,16,17 + * RAM Consistency Check + * + * The 'access' type of the record is extracted with the expression `w_4 - ap.partial_record_check` + * (i.e. for an honest Prover `w1 * eta + w2 * eta^2 + w3 * eta^3 - w4 = access`. + * This is validated by requiring `access` to be boolean + * + * For two adjacent entries in the sorted list if _both_ + * A) index values match + * B) adjacent access value is 0 (i.e. next gate is a READ) + * then + * C) both values must match. + * The gate boolean check is + * (A && B) => C === !(A && B) || C === !A || !B || C + * + * N.B. it is the responsibility of the circuit writer to ensure that every RAM cell is initialized + * with a WRITE operation. + */ + Fr access_type = (wire(p, WIRE.W_4) - ap.partial_record_check); // will be 0 or 1 for honest Prover; deg 1 or 4 + ap.access_check = access_type * access_type - access_type; // check value is 0 or 1; deg 2 or 8 + + // 1 - ((w3' * eta + w2') * eta + w1') * eta + // deg 1 or 4 + ap.next_gate_access_type = wire(p, WIRE.W_O_SHIFT) * tp.etaThree; + ap.next_gate_access_type = ap.next_gate_access_type + (wire(p, WIRE.W_R_SHIFT) * tp.etaTwo); + ap.next_gate_access_type = ap.next_gate_access_type + (wire(p, WIRE.W_L_SHIFT) * tp.eta); + ap.next_gate_access_type = wire(p, WIRE.W_4_SHIFT) - ap.next_gate_access_type; + + Fr value_delta = wire(p, WIRE.W_O_SHIFT) - wire(p, WIRE.W_O); + ap.adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation = + (ap.index_delta * MINUS_ONE + Fr.wrap(1)) * value_delta * + (ap.next_gate_access_type * MINUS_ONE + Fr.wrap(1)); // deg 3 or 6 + + // We can't apply the RAM consistency check identity on the final entry in the sorted list (the wires in the + // next gate would make the identity fail). We need to validate that its 'access type' bool is correct. Can't + // do with an arithmetic gate because of the `eta` factors. We need to check that the *next* gate's access + // type is correct, to cover this edge case + // deg 2 or 4 + ap.next_gate_access_type_is_boolean = + ap.next_gate_access_type * ap.next_gate_access_type - ap.next_gate_access_type; + + // Putting it all together... + evals[15] = ap.adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation * + (wire(p, WIRE.Q_ARITH)) * (wire(p, WIRE.Q_AUX) * domainSep); // deg 5 or 8 + evals[16] = + ap.index_is_monotonically_increasing * (wire(p, WIRE.Q_ARITH)) * (wire(p, WIRE.Q_AUX) * domainSep); // deg 4 + evals[17] = ap.next_gate_access_type_is_boolean * (wire(p, WIRE.Q_ARITH)) * + (wire(p, WIRE.Q_AUX) * domainSep); // deg 4 or 6 + + ap.RAM_consistency_check_identity = ap.access_check * (wire(p, WIRE.Q_ARITH)); // deg 3 or 9 + + /** + * RAM Timestamp Consistency Check + * + * | w1 | w2 | w3 | w4 | + * | index | timestamp | timestamp_check | -- | + * + * Let delta_index = index_{i + 1} - index_{i} + * + * Iff delta_index == 0, timestamp_check = timestamp_{i + 1} - timestamp_i + * Else timestamp_check = 0 + */ + ap.timestamp_delta = wire(p, WIRE.W_R_SHIFT) - wire(p, WIRE.W_R); + ap.RAM_timestamp_check_identity = + (ap.index_delta * MINUS_ONE + Fr.wrap(1)) * ap.timestamp_delta - wire(p, WIRE.W_O); // deg 3 + + /** + * Complete Contribution 12 + * The complete RAM/ROM memory identity + * Partial degree: + */ + ap.memory_identity = ap.ROM_consistency_check_identity; // deg 3 or 6 + ap.memory_identity = + ap.memory_identity + ap.RAM_timestamp_check_identity * (wire(p, WIRE.Q_4) * wire(p, WIRE.Q_L)); // deg 4 + ap.memory_identity = + ap.memory_identity + ap.memory_record_check * (wire(p, WIRE.Q_M) * wire(p, WIRE.Q_L)); // deg 3 or 6 + ap.memory_identity = ap.memory_identity + ap.RAM_consistency_check_identity; // deg 3 or 9 + + // (deg 3 or 9) + (deg 4) + (deg 3) + ap.auxiliary_identity = ap.memory_identity + non_native_field_identity + limb_accumulator_identity; + ap.auxiliary_identity = ap.auxiliary_identity * (wire(p, WIRE.Q_AUX) * domainSep); // deg 4 or 10 + evals[12] = ap.auxiliary_identity; + } + + function scaleAndBatchSubrelations(Fr[NUMBER_OF_SUBRELATIONS] memory evaluations, + Fr[NUMBER_OF_ALPHAS] memory subrelationChallenges) + internal view returns(Fr accumulator) + { + accumulator = accumulator + evaluations[0]; + + for (uint256 i = 1; i < NUMBER_OF_SUBRELATIONS; ++i) { + accumulator = accumulator + evaluations[i] * subrelationChallenges[i - 1]; + } + } + + function verifyZeroMorph(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) + internal view returns(bool verified) + { + // Construct batched evaluation v = sum_{i=0}^{m-1}\rho^i*f_i(u) + sum_{i=0}^{l-1}\rho^{m+i}*h_i(u) + Fr batchedEval = Fr.wrap(0); + Fr batchedScalar = Fr.wrap(1); + + // We linearly combine all evaluations (unshifted first, then shifted) + for (uint256 i = 0; i < NUMBER_OF_ENTITIES; ++i) { + batchedEval = batchedEval + proof.sumcheckEvaluations[i] * batchedScalar; + batchedScalar = batchedScalar * tp.rho; + } + + // Get k commitments + Honk.G1Point memory c_zeta = computeCZeta(proof, tp); + Honk.G1Point memory c_zeta_x = computeCZetaX(proof, vk, tp, batchedEval); + Honk.G1Point memory c_zeta_Z = ecAdd(c_zeta, ecMul(c_zeta_x, tp.zmZ)); + + // KZG pairing accumulator + Fr evaluation = Fr.wrap(0); + verified = zkgReduceVerify(proof, tp, evaluation, c_zeta_Z); + } + + // Compute commitment to lifted degree quotient identity + function computeCZeta(Honk.Proof memory proof, Transcript memory tp) internal view returns(Honk.G1Point memory) + { + Fr[LOG_N + 1] memory scalars; + Honk.G1ProofPoint[LOG_N + 1] memory commitments; + + // Initial contribution + commitments[0] = proof.zmCq; + scalars[0] = Fr.wrap(1); + + for (uint256 k = 0; k < LOG_N; ++k) { + Fr degree = Fr.wrap((1 << k) - 1); + Fr scalar = FrLib.pow(tp.zmY, k); + scalar = scalar * FrLib.pow(tp.zmX, (1 << LOG_N) - Fr.unwrap(degree) - 1); + scalar = scalar * MINUS_ONE; + + scalars[k + 1] = scalar; + commitments[k + 1] = proof.zmCqs[k]; + } + + // Convert all commitments for batch mul + Honk.G1Point[LOG_N + 1] memory comms = convertPoints(commitments); + + return batchMul(comms, scalars); + } + + struct CZetaXParams { + Fr phi_numerator; + Fr phi_n_x; + Fr rho_pow; + Fr phi_1; + Fr phi_2; + Fr x_pow_2k; + Fr x_pow_2kp1; + } + + function + computeCZetaX(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp, Fr batchedEval) + internal view returns(Honk.G1Point memory) + { + Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory scalars; + Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory commitments; + CZetaXParams memory cp; + + // Phi_n(x) = (x^N - 1) / (x - 1) + cp.phi_numerator = FrLib.pow(tp.zmX, (1 << LOG_N)) - Fr.wrap(1); + cp.phi_n_x = FrLib.div(cp.phi_numerator, tp.zmX - Fr.wrap(1)); + + // Add contribution: -v * x * \Phi_n(x) * [1]_1 + // Add base + scalars[0] = MINUS_ONE * batchedEval * tp.zmX * cp.phi_n_x; + commitments[0] = Honk.G1Point({ x : 1, y : 2 }); // One + + // f - Add all unshifted commitments + // g - Add add to be shifted commitments + + // f commitments are accumulated at (zm_x * r) + cp.rho_pow = Fr.wrap(1); + for (uint256 i = 1; i < 34; ++i) { + scalars[i] = tp.zmX * cp.rho_pow; + cp.rho_pow = cp.rho_pow * tp.rho; + } + // g commitments are accumulated at r + for (uint256 i = 34; i < 43; ++i) { + scalars[i] = cp.rho_pow; + cp.rho_pow = cp.rho_pow * tp.rho; + } + + commitments[1] = vk.qm; + commitments[2] = vk.qc; + commitments[3] = vk.ql; + commitments[4] = vk.qr; + commitments[5] = vk.qo; + commitments[6] = vk.q4; + commitments[7] = vk.qArith; + commitments[8] = vk.qDeltaRange; + commitments[9] = vk.qElliptic; + commitments[10] = vk.qAux; + commitments[11] = vk.qLookup; + commitments[12] = vk.s1; + commitments[13] = vk.s2; + commitments[14] = vk.s3; + commitments[15] = vk.s4; + commitments[16] = vk.id1; + commitments[17] = vk.id2; + commitments[18] = vk.id3; + commitments[19] = vk.id4; + commitments[20] = vk.t1; + commitments[21] = vk.t2; + commitments[22] = vk.t3; + commitments[23] = vk.t4; + commitments[24] = vk.lagrangeFirst; + commitments[25] = vk.lagrangeLast; + + // Accumulate proof points + commitments[26] = convertProofPoint(proof.w1); + commitments[27] = convertProofPoint(proof.w2); + commitments[28] = convertProofPoint(proof.w3); + commitments[29] = convertProofPoint(proof.w4); + commitments[30] = convertProofPoint(proof.zPerm); + commitments[31] = convertProofPoint(proof.lookupInverses); + commitments[32] = convertProofPoint(proof.lookupReadCounts); + commitments[33] = convertProofPoint(proof.lookupReadTags); + + // to be Shifted + commitments[34] = vk.t1; + commitments[35] = vk.t2; + commitments[36] = vk.t3; + commitments[37] = vk.t4; + commitments[38] = convertProofPoint(proof.w1); + commitments[39] = convertProofPoint(proof.w2); + commitments[40] = convertProofPoint(proof.w3); + commitments[41] = convertProofPoint(proof.w4); + commitments[42] = convertProofPoint(proof.zPerm); + + // Add scalar contributions + // Add contributions: scalar * [q_k], k = 0,...,log_N, where + // scalar = -x * (x^{2^k} * \Phi_{n-k-1}(x^{2^{k+1}}) - u_k * \Phi_{n-k}(x^{2^k})) + cp.x_pow_2k = tp.zmX; + cp.x_pow_2kp1 = tp.zmX * tp.zmX; + for (uint256 k; k < CONST_PROOF_SIZE_LOG_N; ++k) { + bool dummy_round = k >= LOG_N; + + // note: defaults to 0 + Fr scalar; + if (!dummy_round) { + cp.phi_1 = FrLib.div(cp.phi_numerator, cp.x_pow_2kp1 - Fr.wrap(1)); + cp.phi_2 = FrLib.div(cp.phi_numerator, cp.x_pow_2k - Fr.wrap(1)); + + scalar = cp.x_pow_2k * cp.phi_1; + scalar = scalar - (tp.sumCheckUChallenges[k] * cp.phi_2); + scalar = scalar * tp.zmX; + scalar = scalar * MINUS_ONE; + + cp.x_pow_2k = cp.x_pow_2kp1; + cp.x_pow_2kp1 = cp.x_pow_2kp1 * cp.x_pow_2kp1; + } + + scalars[43 + k] = scalar; + commitments[43 + k] = convertProofPoint(proof.zmCqs[k]); + } + + return batchMul2(commitments, scalars); + } + + // Scalar Mul and acumulate into total + function batchMul(Honk.G1Point[LOG_N + 1] memory base, Fr[LOG_N + 1] memory scalars) + internal view returns(Honk.G1Point memory result) + { + uint256 limit = LOG_N + 1; + assembly + { + let success := 0x01 + let free := mload(0x40) + + // Write the original into the accumulator + // Load into memory for ecMUL, leave offset for eccAdd result + // base is an array of pointers, so we have to dereference them + mstore(add(free, 0x40), mload(mload(base))) + mstore(add(free, 0x60), mload(add(0x20, mload(base)))) + // Add scalar + mstore(add(free, 0x80), mload(scalars)) + success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, free, 0x40)) + + let count := 0x01 + for {} lt(count, limit) { count := add(count, 1) } { + // Get loop offsets + let base_base := add(base, mul(count, 0x20)) + let scalar_base := add(scalars, mul(count, 0x20)) + + mstore(add(free, 0x40), mload(mload(base_base))) + mstore(add(free, 0x60), mload(add(0x20, mload(base_base)))) + // Add scalar + mstore(add(free, 0x80), mload(scalar_base)) + + success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, add(free, 0x40), 0x40)) + success := and(success, staticcall(gas(), 6, free, 0x80, free, 0x40)) + } + + mstore(result, mload(free)) mstore(add(result, 0x20), mload(add(free, 0x20))) + } + } + + // This implementation is the same as above with different constants + function batchMul2(Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory base, + Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory scalars) + internal view returns(Honk.G1Point memory result) + { + uint256 limit = NUMBER_OF_ENTITIES + LOG_N + 1; + assembly + { + let success := 0x01 + let free := mload(0x40) + + // Write the original into the accumulator + // Load into memory for ecMUL, leave offset for eccAdd result + // base is an array of pointers, so we have to dereference them + mstore(add(free, 0x40), mload(mload(base))) + mstore(add(free, 0x60), mload(add(0x20, mload(base)))) + // Add scalar + mstore(add(free, 0x80), mload(scalars)) + success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, free, 0x40)) + + let count := 0x01 + for {} lt(count, limit){ count := add(count, 1) } { + // Get loop offsets + let base_base := add(base, mul(count, 0x20)) + let scalar_base := add(scalars, mul(count, 0x20)) + + mstore(add(free, 0x40), mload(mload(base_base))) + mstore(add(free, 0x60), mload(add(0x20, mload(base_base)))) + // Add scalar + mstore(add(free, 0x80), mload(scalar_base)) + + success := and(success, staticcall(gas(), 7, add(free, 0x40), 0x60, add(free, 0x40), 0x40)) + // accumulator = accumulator + accumulator_2 + success := and(success, staticcall(gas(), 6, free, 0x80, free, 0x40)) + } + + // Return the result - i hate this + mstore(result, mload(free)) mstore(add(result, 0x20), mload(add(free, 0x20))) + } + } + + function zkgReduceVerify( + Honk.Proof memory proof, Transcript memory tp, Fr evaluation, Honk.G1Point memory commitment) + internal view returns(bool) + { + Honk.G1Point memory quotient_commitment = convertProofPoint(proof.zmPi); + Honk.G1Point memory ONE = Honk.G1Point({ x : 1, y : 2 }); + + Honk.G1Point memory P0 = commitment; + P0 = ecAdd(P0, ecMul(quotient_commitment, tp.zmX)); + + Honk.G1Point memory evalAsPoint = ecMul(ONE, evaluation); + P0 = ecSub(P0, evalAsPoint); + + Honk.G1Point memory P1 = negateInplace(quotient_commitment); + + // Perform pairing check + return pairing(P0, P1); + } + + function pairing(Honk.G1Point memory rhs, Honk.G1Point memory lhs) internal view returns(bool) + { + bytes memory input = + abi.encodePacked(rhs.x, + rhs.y, + // Fixed G1 point + uint256(0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2), + uint256(0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed), + uint256(0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b), + uint256(0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa), + lhs.x, + lhs.y, + // G1 point from VK + uint256(0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1), + uint256(0x0118c4d5b837bcc2bc89b5b398b5974e9f5944073b32078b7e231fec938883b0), + uint256(0x04fc6369f7110fe3d25156c1bb9a72859cf2a04641f99ba4ee413c80da6a5fe4), + uint256(0x22febda3c0c0632a56475b4214e5615e11e6dd3f96e6cea2854a87d4dacc5e55)); + + (bool success, bytes memory result) = address(0x08).staticcall(input); + return abi.decode(result, (bool)); + } +} + +// Conversion util - Duplicated as we cannot template LOG_N +function convertPoints(Honk.G1ProofPoint[LOG_N + 1] memory commitments) pure + returns(Honk.G1Point[LOG_N + 1] memory converted) +{ + for (uint256 i; i < LOG_N + 1; ++i) { + converted[i] = convertProofPoint(commitments[i]); + } +} +)"; + +inline std::string get_honk_solidity_verifier(auto const& verification_key) +{ + std::ostringstream stream; + output_vk_sol_ultra_honk(stream, verification_key, "HonkVerificationKey"); + return stream.str() + HONK_CONTRACT_SOURCE; +} \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/solidity_helpers/honk_sol_gen.hpp b/barretenberg/cpp/src/barretenberg/honk/utils/honk_key_gen.hpp similarity index 59% rename from barretenberg/cpp/src/barretenberg/solidity_helpers/honk_sol_gen.hpp rename to barretenberg/cpp/src/barretenberg/honk/utils/honk_key_gen.hpp index 5ed2407bd95..be1e37b4218 100644 --- a/barretenberg/cpp/src/barretenberg/solidity_helpers/honk_sol_gen.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/utils/honk_key_gen.hpp @@ -1,31 +1,30 @@ -// TODO: check if we need these -#include "barretenberg/ultra_honk/ultra_prover.hpp" -#include "barretenberg/ultra_honk/ultra_verifier.hpp" +#pragma once -#include -#include - -namespace bb { /** * Write a solidity file containing the vk params to the given stream. * Uses UltraHonk + * + * @param os + * @param key - verification key object + * @param class_name - the name to give the verification key class + * @param include_types_import - include a "HonkTypes" import, only required for local tests, not with the bundled + *contract from bb contract_honk **/ -inline void output_vk_sol_ultra_honk( - std::ostream& os, - // TODO: get the VerificationKey outside of the falvor - it's not a part of the flavor - auto const& key, - std::string const& class_name) +inline void output_vk_sol_ultra_honk(std::ostream& os, + auto const& key, + std::string const& class_name, + bool include_types_import = false) { - const auto print_u256_const = [&](const bb::fr& element, const std::string& name) { + const auto print_u256_const = [&](const auto& element, const std::string& name) { os << "uint256 constant " << name << " = " << element << ";" << std::endl; }; - const auto print_u256 = [&](const bb::fr& element, const std::string& name) { + const auto print_u256 = [&](const auto& element, const std::string& name) { os << " " << name << ": uint256(" << element << ")," << std::endl; }; - const auto print_g1 = [&](const bb::g1::affine_element& element, const std::string& name, const bool last = false) { + const auto print_g1 = [&](const auto& element, const std::string& name, const bool last = false) { os << " " << name << ": Honk.G1Point({ \n" << " " << "x: " @@ -43,6 +42,13 @@ inline void output_vk_sol_ultra_honk( } }; + // Include the types import if working with the local test suite + const auto print_types_import = [&]() { + if (include_types_import) { + os << "import { Honk } from \"../HonkTypes.sol\";\n"; + } + }; + // clang-format off os << // "// Verification Key Hash: " << key->sha256_hash() << "\n" @@ -50,16 +56,13 @@ inline void output_vk_sol_ultra_honk( "// Copyright 2022 Aztec\n" "pragma solidity >=0.8.21;\n" "\n" - "import { Honk } from \"../HonkTypes.sol\";\n" ""; + print_types_import(); print_u256_const(key->circuit_size, "N"); print_u256_const(key->log_circuit_size, "LOG_N"); print_u256_const(key->num_public_inputs, "NUMBER_OF_PUBLIC_INPUTS"); os << "" "library " << class_name << " {\n" - // " function verificationKeyHash() internal pure returns(bytes32) {\n" - // " return 0x" << key->sha256_hash() << ";\n" - // " }\n\n" " function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) {\n" " Honk.VerificationKey memory vk = Honk.VerificationKey({\n"; print_u256(key->circuit_size, "circuitSize"); @@ -92,21 +95,10 @@ inline void output_vk_sol_ultra_honk( print_g1(key->lagrange_first, "lagrangeFirst"); print_g1(key->lagrange_last, "lagrangeLast", /*last=*/ true); os << - - // TODO: no recursive proofs - // TODO: no pairing check yet - // " mstore(add(_vk, 0x640), " << (key->contains_recursive_proof ? "0x01" : "0x00") << ") // vk.contains_recursive_proof\n" - // " mstore(add(_vk, 0x660), " << (key->contains_recursive_proof ? key->recursive_proof_public_input_indices[0] : 0) << ") // vk.recursive_proof_public_input_indices\n" - - // " mstore(add(_vk, 0x680), " << key->reference_string->get_g2x().x.c1 << ") // vk.g2_x.X.c1 \n" - // " mstore(add(_vk, 0x6a0), " << key->reference_string->get_g2x().x.c0 << ") // vk.g2_x.X.c0 \n" - // " mstore(add(_vk, 0x6c0), " << key->reference_string->get_g2x().y.c1 << ") // vk.g2_x.Y.c1 \n" - // " mstore(add(_vk, 0x6e0), " << key->reference_string->get_g2x().y.c0 << ") // vk.g2_x.Y.c0 \n" " });\n" " return vk;\n" " }\n" "}\n"; os << std::flush; -} } \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/solidity_helpers/honk_key_gen.cpp b/barretenberg/cpp/src/barretenberg/solidity_helpers/honk_key_gen.cpp index a225e6ec262..b3f24dda249 100644 --- a/barretenberg/cpp/src/barretenberg/solidity_helpers/honk_key_gen.cpp +++ b/barretenberg/cpp/src/barretenberg/solidity_helpers/honk_key_gen.cpp @@ -2,12 +2,11 @@ #include #include +#include "barretenberg/honk/utils/honk_key_gen.hpp" #include "barretenberg/stdlib_circuit_builders/ultra_circuit_builder.hpp" #include "barretenberg/ultra_honk/ultra_prover.hpp" #include "barretenberg/ultra_honk/ultra_verifier.hpp" -#include "./honk_sol_gen.hpp" - #include "circuits/add_2_circuit.hpp" #include "circuits/blake_circuit.hpp" #include "circuits/ecdsa_circuit.hpp" @@ -38,7 +37,7 @@ void generate_keys_honk(std::string output_path, std::string flavour_prefix, std { auto vk_filename = output_path + "/keys/" + vk_class_name + ".sol"; std::ofstream os(vk_filename); - bb::output_vk_sol_ultra_honk(os, verification_key, vk_class_name); + output_vk_sol_ultra_honk(os, verification_key, vk_class_name, true); info("VK contract written to: ", vk_filename); } } diff --git a/barretenberg/sol/src/honk/HonkTypes.sol b/barretenberg/sol/src/honk/HonkTypes.sol index 17d749c9de5..275c26ab7f2 100644 --- a/barretenberg/sol/src/honk/HonkTypes.sol +++ b/barretenberg/sol/src/honk/HonkTypes.sol @@ -123,17 +123,17 @@ library Honk { Honk.G1ProofPoint w2; Honk.G1ProofPoint w3; Honk.G1ProofPoint w4; - // Lookup helpers - permutations + // Lookup helpers - Permutations Honk.G1ProofPoint zPerm; - // Lookup helpers - logup plookup + // Lookup helpers - logup Honk.G1ProofPoint lookupReadCounts; Honk.G1ProofPoint lookupReadTags; Honk.G1ProofPoint lookupInverses; // Sumcheck - Fr[BATCHED_RELATION_PARTIAL_LENGTH][LOG_N] sumcheckUnivariates; + Fr[BATCHED_RELATION_PARTIAL_LENGTH][CONST_PROOF_SIZE_LOG_N] sumcheckUnivariates; Fr[NUMBER_OF_ENTITIES] sumcheckEvaluations; // Zero morph - Honk.G1ProofPoint[LOG_N] zmCqs; + Honk.G1ProofPoint[CONST_PROOF_SIZE_LOG_N] zmCqs; Honk.G1ProofPoint zmCq; Honk.G1ProofPoint zmPi; } diff --git a/barretenberg/sol/src/honk/HonkVerifier.sol b/barretenberg/sol/src/honk/HonkVerifier.sol index ed448b255db..942adc6814f 100644 --- a/barretenberg/sol/src/honk/HonkVerifier.sol +++ b/barretenberg/sol/src/honk/HonkVerifier.sol @@ -32,12 +32,11 @@ error ZeromorphFailed(); abstract contract BaseHonkVerifier is IVerifier { Fr internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = Fr.wrap(17); // -(-17) - // TODO(md): I would perfer the publicInputs to be uint256 function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { Honk.VerificationKey memory vk = loadVerificationKey(); Honk.Proof memory p = loadProof(proof); - if (vk.publicInputsSize != publicInputs.length) { + if (publicInputs.length != vk.publicInputsSize) { revert PublicInputsLengthWrong(); } @@ -96,7 +95,6 @@ abstract contract BaseHonkVerifier is IVerifier { }); // Lookup / Permutation Helper Commitments - // TODO(md): update the log deriv prover commitment rounds p.lookupReadCounts = Honk.G1ProofPoint({ x_0: uint256(bytes32(proof[0x1e0:0x200])), x_1: uint256(bytes32(proof[0x200:0x220])), @@ -332,7 +330,6 @@ abstract contract BaseHonkVerifier is IVerifier { Fr[NUMBER_OF_SUBRELATIONS] memory evaluations; // Accumulate all 6 custom gates - each with varying number of subrelations - // TODO: annotate how many subrealtions each has accumulateArithmeticRelation(purportedEvaluations, evaluations, powPartialEval); accumulatePermutationRelation(purportedEvaluations, tp, evaluations, powPartialEval); accumulateLogDerivativeLookupRelation(purportedEvaluations, tp, evaluations, powPartialEval); @@ -436,19 +433,6 @@ abstract contract BaseHonkVerifier is IVerifier { } } - // Lookup parameters have been yoinked into memory to avoid stack too deep - struct LookupParams { - Fr eta_sqr; - Fr eta_cube; - Fr one_plus_beta; - Fr gamma_by_one_plus_beta; - Fr wire_accum; - Fr table_accum; - Fr table_accum_shift; - } - - // TODO(md): calculate eta one two and three above - function accumulateLogDerivativeLookupRelation( Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, @@ -969,11 +953,12 @@ abstract contract BaseHonkVerifier is IVerifier { Fr x_pow_2kp1; } - function computeCZetaX(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp, Fr batchedEval) - internal - view - returns (Honk.G1Point memory) - { + function computeCZetaX( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + Transcript memory tp, + Fr batchedEval + ) internal view returns (Honk.G1Point memory) { Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory scalars; Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory commitments; CZetaXParams memory cp; @@ -1163,11 +1148,12 @@ abstract contract BaseHonkVerifier is IVerifier { } } - function zkgReduceVerify(Honk.Proof memory proof, Transcript memory tp, Fr evaluation, Honk.G1Point memory commitment) - internal - view - returns (bool) - { + function zkgReduceVerify( + Honk.Proof memory proof, + Transcript memory tp, + Fr evaluation, + Honk.G1Point memory commitment + ) internal view returns (bool) { Honk.G1Point memory quotient_commitment = convertProofPoint(proof.zmPi); Honk.G1Point memory ONE = Honk.G1Point({x: 1, y: 2}); diff --git a/barretenberg/sol/src/honk/Transcript.sol b/barretenberg/sol/src/honk/Transcript.sol index 986c446bbc5..2c209cf6305 100644 --- a/barretenberg/sol/src/honk/Transcript.sol +++ b/barretenberg/sol/src/honk/Transcript.sol @@ -26,7 +26,6 @@ struct Transcript { Fr zmQuotient; // Derived Fr publicInputsDelta; - Fr lookupGrandProductDelta; } library TranscriptLib { @@ -35,7 +34,6 @@ library TranscriptLib { Honk.VerificationKey memory vk, bytes32[] calldata publicInputs ) internal view returns (Transcript memory t) { - // TODO: calcaulte full series of eta; (t.eta, t.etaTwo, t.etaThree) = generateEtaChallenge(proof, publicInputs); (t.beta, t.gamma) = generateBetaAndGammaChallenges(t.etaThree, proof); @@ -59,8 +57,6 @@ library TranscriptLib { view returns (Fr eta, Fr etaTwo, Fr etaThree) { - // TODO(md): the 12 here will need to be halved when we fix the transcript to not be over field elements - // TODO: use assembly bytes32[3 + NUMBER_OF_PUBLIC_INPUTS + 12] memory round0; round0[0] = bytes32(proof.circuitSize); round0[1] = bytes32(proof.publicInputsSize); @@ -71,7 +67,6 @@ library TranscriptLib { // Create the first challenge // Note: w4 is added to the challenge later on - // TODO: UPDATE ALL VALUES IN HERE round0[3 + NUMBER_OF_PUBLIC_INPUTS] = bytes32(proof.w1.x_0); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 1] = bytes32(proof.w1.x_1); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 2] = bytes32(proof.w1.y_0); @@ -95,7 +90,6 @@ library TranscriptLib { view returns (Fr beta, Fr gamma) { - // TODO(md): adjust round size when the proof points are generated correctly - 5 bytes32[13] memory round1; round1[0] = FrLib.toBytes32(previousChallenge); round1[1] = bytes32(proof.lookupReadCounts.x_0); @@ -122,7 +116,6 @@ library TranscriptLib { returns (Fr[NUMBER_OF_ALPHAS] memory alphas) { // Generate the original sumcheck alpha 0 by hashing zPerm and zLookup - // TODO(md): 5 post correct proof size fix uint256[9] memory alpha0; alpha0[0] = Fr.unwrap(previousChallenge); alpha0[1] = proof.lookupInverses.x_0; diff --git a/barretenberg/sol/src/honk/instance/Add2Honk.sol b/barretenberg/sol/src/honk/instance/Add2Honk.sol index ec533e83270..a353795093c 100644 --- a/barretenberg/sol/src/honk/instance/Add2Honk.sol +++ b/barretenberg/sol/src/honk/instance/Add2Honk.sol @@ -25,30 +25,6 @@ import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "../utils.so // Field arithmetic libraries import {Fr, FrLib} from "../Fr.sol"; -struct Proof { - uint256 circuitSize; - uint256 publicInputsSize; - uint256 publicInputsOffset; - // Free wires - Honk.G1ProofPoint w1; - Honk.G1ProofPoint w2; - Honk.G1ProofPoint w3; - Honk.G1ProofPoint w4; - // Lookup helpers - Permutations - Honk.G1ProofPoint zPerm; - // Lookup helpers - logup - Honk.G1ProofPoint lookupReadCounts; - Honk.G1ProofPoint lookupReadTags; - Honk.G1ProofPoint lookupInverses; - // Sumcheck - Fr[BATCHED_RELATION_PARTIAL_LENGTH][CONST_PROOF_SIZE_LOG_N] sumcheckUnivariates; - Fr[NUMBER_OF_ENTITIES] sumcheckEvaluations; - // Zero morph - Honk.G1ProofPoint[CONST_PROOF_SIZE_LOG_N] zmCqs; - Honk.G1ProofPoint zmCq; - Honk.G1ProofPoint zmPi; -} - // Transcript library to generate fiat shamir challenges struct Transcript { Fr eta; @@ -67,16 +43,14 @@ struct Transcript { Fr zmQuotient; // Derived Fr publicInputsDelta; - Fr lookupGrandProductDelta; } library TranscriptLib { - function generateTranscript(Proof memory proof, Honk.VerificationKey memory vk, bytes32[] calldata publicInputs) - internal - view - returns (Transcript memory t) - { - // TODO: calcaulte full series of eta; + function generateTranscript( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + bytes32[] calldata publicInputs + ) internal view returns (Transcript memory t) { (t.eta, t.etaTwo, t.etaThree) = generateEtaChallenge(proof, publicInputs); (t.beta, t.gamma) = generateBetaAndGammaChallenges(t.etaThree, proof); @@ -95,13 +69,11 @@ library TranscriptLib { return t; } - function generateEtaChallenge(Proof memory proof, bytes32[] calldata publicInputs) + function generateEtaChallenge(Honk.Proof memory proof, bytes32[] calldata publicInputs) internal view returns (Fr eta, Fr etaTwo, Fr etaThree) { - // TODO(md): the 12 here will need to be halved when we fix the transcript to not be over field elements - // TODO: use assembly bytes32[3 + NUMBER_OF_PUBLIC_INPUTS + 12] memory round0; round0[0] = bytes32(proof.circuitSize); round0[1] = bytes32(proof.publicInputsSize); @@ -112,7 +84,6 @@ library TranscriptLib { // Create the first challenge // Note: w4 is added to the challenge later on - // TODO: UPDATE ALL VALUES IN HERE round0[3 + NUMBER_OF_PUBLIC_INPUTS] = bytes32(proof.w1.x_0); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 1] = bytes32(proof.w1.x_1); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 2] = bytes32(proof.w1.y_0); @@ -131,12 +102,11 @@ library TranscriptLib { etaThree = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(etaTwo)))); } - function generateBetaAndGammaChallenges(Fr previousChallenge, Proof memory proof) + function generateBetaAndGammaChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr beta, Fr gamma) { - // TODO(md): adjust round size when the proof points are generated correctly - 5 bytes32[13] memory round1; round1[0] = FrLib.toBytes32(previousChallenge); round1[1] = bytes32(proof.lookupReadCounts.x_0); @@ -157,13 +127,12 @@ library TranscriptLib { } // Alpha challenges non-linearise the gate contributions - function generateAlphaChallenges(Fr previousChallenge, Proof memory proof) + function generateAlphaChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr[NUMBER_OF_ALPHAS] memory alphas) { // Generate the original sumcheck alpha 0 by hashing zPerm and zLookup - // TODO(md): 5 post correct proof size fix uint256[9] memory alpha0; alpha0[0] = Fr.unwrap(previousChallenge); alpha0[1] = proof.lookupInverses.x_0; @@ -191,7 +160,7 @@ library TranscriptLib { } } - function generateSumcheckChallenges(Proof memory proof, Fr prevChallenge) + function generateSumcheckChallenges(Honk.Proof memory proof, Fr prevChallenge) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N] memory sumcheckChallenges) @@ -210,7 +179,7 @@ library TranscriptLib { } } - function generateRhoChallenge(Proof memory proof, Fr prevChallenge) internal view returns (Fr rho) { + function generateRhoChallenge(Honk.Proof memory proof, Fr prevChallenge) internal view returns (Fr rho) { Fr[NUMBER_OF_ENTITIES + 1] memory rhoChallengeElements; rhoChallengeElements[0] = prevChallenge; @@ -222,7 +191,11 @@ library TranscriptLib { rho = FrLib.fromBytes32(keccak256(abi.encodePacked(rhoChallengeElements))); } - function generateZMYChallenge(Fr previousChallenge, Proof memory proof) internal view returns (Fr zeromorphY) { + function generateZMYChallenge(Fr previousChallenge, Honk.Proof memory proof) + internal + view + returns (Fr zeromorphY) + { uint256[CONST_PROOF_SIZE_LOG_N * 4 + 1] memory zmY; zmY[0] = Fr.unwrap(previousChallenge); @@ -236,7 +209,7 @@ library TranscriptLib { zeromorphY = FrLib.fromBytes32(keccak256(abi.encodePacked(zmY))); } - function generateZMXZChallenges(Fr previousChallenge, Proof memory proof) + function generateZMXZChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr zeromorphX, Fr zeromorphZ) @@ -263,12 +236,11 @@ error ZeromorphFailed(); contract Add2HonkVerifier is IVerifier { Fr internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = Fr.wrap(17); // -(-17) - // TODO(md): I would perfer the publicInputs to be uint256 function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { Honk.VerificationKey memory vk = loadVerificationKey(); - Proof memory p = loadProof(proof); + Honk.Proof memory p = loadProof(proof); - if (vk.publicInputsSize != NUMBER_OF_PUBLIC_INPUTS) { + if (publicInputs.length != vk.publicInputsSize) { revert PublicInputsLengthWrong(); } @@ -297,8 +269,8 @@ contract Add2HonkVerifier is IVerifier { // TODO: mod q proof points // TODO: Preprocess all of the memory locations // TODO: Adjust proof point serde away from poseidon forced field elements - function loadProof(bytes calldata proof) internal view returns (Proof memory) { - Proof memory p; + function loadProof(bytes calldata proof) internal view returns (Honk.Proof memory) { + Honk.Proof memory p; // Metadata p.circuitSize = uint256(bytes32(proof[0x00:0x20])); @@ -458,7 +430,7 @@ contract Add2HonkVerifier is IVerifier { uint256 constant ROUND_TARGET = 0; - function verifySumcheck(Proof memory proof, Transcript memory tp) internal view returns (bool verified) { + function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { Fr roundTarget; Fr powPartialEvaluation = Fr.wrap(1); @@ -554,7 +526,7 @@ contract Add2HonkVerifier is IVerifier { // These are stored in the evaluations part of the proof object. // We add these together, with the appropiate scaling factor ( the alphas calculated in challenges ) // This value is checked against the final value of the target total sum - et voila! - function accumulateRelationEvaluations(Proof memory proof, Transcript memory tp, Fr powPartialEval) + function accumulateRelationEvaluations(Honk.Proof memory proof, Transcript memory tp, Fr powPartialEval) internal view returns (Fr accumulator) @@ -563,7 +535,6 @@ contract Add2HonkVerifier is IVerifier { Fr[NUMBER_OF_SUBRELATIONS] memory evaluations; // Accumulate all 6 custom gates - each with varying number of subrelations - // TODO: annotate how many subrealtions each has accumulateArithmeticRelation(purportedEvaluations, evaluations, powPartialEval); accumulatePermutationRelation(purportedEvaluations, tp, evaluations, powPartialEval); accumulateLogDerivativeLookupRelation(purportedEvaluations, tp, evaluations, powPartialEval); @@ -667,19 +638,6 @@ contract Add2HonkVerifier is IVerifier { } } - // Lookup parameters have been yoinked into memory to avoid stack too deep - struct LookupParams { - Fr eta_sqr; - Fr eta_cube; - Fr one_plus_beta; - Fr gamma_by_one_plus_beta; - Fr wire_accum; - Fr table_accum; - Fr table_accum_shift; - } - - // TODO(md): calculate eta one two and three above - function accumulateLogDerivativeLookupRelation( Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, @@ -1138,7 +1096,7 @@ contract Add2HonkVerifier is IVerifier { } } - function verifyZeroMorph(Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) + function verifyZeroMorph(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) internal view returns (bool verified) @@ -1159,13 +1117,12 @@ contract Add2HonkVerifier is IVerifier { Honk.G1Point memory c_zeta_Z = ecAdd(c_zeta, ecMul(c_zeta_x, tp.zmZ)); // KZG pairing accumulator - // WORKTODO: concerned that this is zero - it is multiplied by a point later on Fr evaluation = Fr.wrap(0); verified = zkgReduceVerify(proof, tp, evaluation, c_zeta_Z); } // Compute commitment to lifted degree quotient identity - function computeCZeta(Proof memory proof, Transcript memory tp) internal view returns (Honk.G1Point memory) { + function computeCZeta(Honk.Proof memory proof, Transcript memory tp) internal view returns (Honk.G1Point memory) { Fr[LOG_N + 1] memory scalars; Honk.G1ProofPoint[LOG_N + 1] memory commitments; @@ -1200,11 +1157,12 @@ contract Add2HonkVerifier is IVerifier { Fr x_pow_2kp1; } - function computeCZetaX(Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp, Fr batchedEval) - internal - view - returns (Honk.G1Point memory) - { + function computeCZetaX( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + Transcript memory tp, + Fr batchedEval + ) internal view returns (Honk.G1Point memory) { Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory scalars; Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory commitments; CZetaXParams memory cp; @@ -1394,11 +1352,12 @@ contract Add2HonkVerifier is IVerifier { } } - function zkgReduceVerify(Proof memory proof, Transcript memory tp, Fr evaluation, Honk.G1Point memory commitment) - internal - view - returns (bool) - { + function zkgReduceVerify( + Honk.Proof memory proof, + Transcript memory tp, + Fr evaluation, + Honk.G1Point memory commitment + ) internal view returns (bool) { Honk.G1Point memory quotient_commitment = convertProofPoint(proof.zmPi); Honk.G1Point memory ONE = Honk.G1Point({x: 1, y: 2}); diff --git a/barretenberg/sol/src/honk/instance/BlakeHonk.sol b/barretenberg/sol/src/honk/instance/BlakeHonk.sol index e1037cef0d0..f94fa465c60 100644 --- a/barretenberg/sol/src/honk/instance/BlakeHonk.sol +++ b/barretenberg/sol/src/honk/instance/BlakeHonk.sol @@ -22,30 +22,6 @@ import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "../utils.so // Field arithmetic libraries - prevent littering the code with modmul / addmul import {Fr, FrLib} from "../Fr.sol"; -struct Proof { - uint256 circuitSize; - uint256 publicInputsSize; - uint256 publicInputsOffset; - // Free wires - Honk.G1ProofPoint w1; - Honk.G1ProofPoint w2; - Honk.G1ProofPoint w3; - Honk.G1ProofPoint w4; - // Lookup helpers - Permutations - Honk.G1ProofPoint zPerm; - // Lookup helpers - logup - Honk.G1ProofPoint lookupReadCounts; - Honk.G1ProofPoint lookupReadTags; - Honk.G1ProofPoint lookupInverses; - // Sumcheck - Fr[BATCHED_RELATION_PARTIAL_LENGTH][CONST_PROOF_SIZE_LOG_N] sumcheckUnivariates; - Fr[NUMBER_OF_ENTITIES] sumcheckEvaluations; - // Zero morph - Honk.G1ProofPoint[CONST_PROOF_SIZE_LOG_N] zmCqs; - Honk.G1ProofPoint zmCq; - Honk.G1ProofPoint zmPi; -} - // Transcript library to generate fiat shamir challenges struct Transcript { Fr eta; @@ -64,16 +40,14 @@ struct Transcript { Fr zmQuotient; // Derived Fr publicInputsDelta; - Fr lookupGrandProductDelta; } library TranscriptLib { - function generateTranscript(Proof memory proof, Honk.VerificationKey memory vk, bytes32[] calldata publicInputs) - internal - view - returns (Transcript memory t) - { - // TODO: calcaulte full series of eta; + function generateTranscript( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + bytes32[] calldata publicInputs + ) internal view returns (Transcript memory t) { (t.eta, t.etaTwo, t.etaThree) = generateEtaChallenge(proof, publicInputs); (t.beta, t.gamma) = generateBetaAndGammaChallenges(t.etaThree, proof); @@ -92,13 +66,11 @@ library TranscriptLib { return t; } - function generateEtaChallenge(Proof memory proof, bytes32[] calldata publicInputs) + function generateEtaChallenge(Honk.Proof memory proof, bytes32[] calldata publicInputs) internal view returns (Fr eta, Fr etaTwo, Fr etaThree) { - // TODO(md): the 12 here will need to be halved when we fix the transcript to not be over field elements - // TODO: use assembly bytes32[3 + NUMBER_OF_PUBLIC_INPUTS + 12] memory round0; round0[0] = bytes32(proof.circuitSize); round0[1] = bytes32(proof.publicInputsSize); @@ -109,7 +81,6 @@ library TranscriptLib { // Create the first challenge // Note: w4 is added to the challenge later on - // TODO: UPDATE ALL VALUES IN HERE round0[3 + NUMBER_OF_PUBLIC_INPUTS] = bytes32(proof.w1.x_0); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 1] = bytes32(proof.w1.x_1); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 2] = bytes32(proof.w1.y_0); @@ -128,12 +99,11 @@ library TranscriptLib { etaThree = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(etaTwo)))); } - function generateBetaAndGammaChallenges(Fr previousChallenge, Proof memory proof) + function generateBetaAndGammaChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr beta, Fr gamma) { - // TODO(md): adjust round size when the proof points are generated correctly - 5 bytes32[13] memory round1; round1[0] = FrLib.toBytes32(previousChallenge); round1[1] = bytes32(proof.lookupReadCounts.x_0); @@ -154,13 +124,12 @@ library TranscriptLib { } // Alpha challenges non-linearise the gate contributions - function generateAlphaChallenges(Fr previousChallenge, Proof memory proof) + function generateAlphaChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr[NUMBER_OF_ALPHAS] memory alphas) { // Generate the original sumcheck alpha 0 by hashing zPerm and zLookup - // TODO(md): 5 post correct proof size fix uint256[9] memory alpha0; alpha0[0] = Fr.unwrap(previousChallenge); alpha0[1] = proof.lookupInverses.x_0; @@ -188,7 +157,7 @@ library TranscriptLib { } } - function generateSumcheckChallenges(Proof memory proof, Fr prevChallenge) + function generateSumcheckChallenges(Honk.Proof memory proof, Fr prevChallenge) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N] memory sumcheckChallenges) @@ -207,7 +176,7 @@ library TranscriptLib { } } - function generateRhoChallenge(Proof memory proof, Fr prevChallenge) internal view returns (Fr rho) { + function generateRhoChallenge(Honk.Proof memory proof, Fr prevChallenge) internal view returns (Fr rho) { Fr[NUMBER_OF_ENTITIES + 1] memory rhoChallengeElements; rhoChallengeElements[0] = prevChallenge; @@ -219,7 +188,11 @@ library TranscriptLib { rho = FrLib.fromBytes32(keccak256(abi.encodePacked(rhoChallengeElements))); } - function generateZMYChallenge(Fr previousChallenge, Proof memory proof) internal view returns (Fr zeromorphY) { + function generateZMYChallenge(Fr previousChallenge, Honk.Proof memory proof) + internal + view + returns (Fr zeromorphY) + { uint256[CONST_PROOF_SIZE_LOG_N * 4 + 1] memory zmY; zmY[0] = Fr.unwrap(previousChallenge); @@ -233,7 +206,7 @@ library TranscriptLib { zeromorphY = FrLib.fromBytes32(keccak256(abi.encodePacked(zmY))); } - function generateZMXZChallenges(Fr previousChallenge, Proof memory proof) + function generateZMXZChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr zeromorphX, Fr zeromorphZ) @@ -260,12 +233,11 @@ error ZeromorphFailed(); contract BlakeHonkVerifier is IVerifier { Fr internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = Fr.wrap(17); // -(-17) - // TODO(md): I would perfer the publicInputs to be uint256 function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { Honk.VerificationKey memory vk = loadVerificationKey(); - Proof memory p = loadProof(proof); + Honk.Proof memory p = loadProof(proof); - if (vk.publicInputsSize != NUMBER_OF_PUBLIC_INPUTS) { + if (publicInputs.length != vk.publicInputsSize) { revert PublicInputsLengthWrong(); } @@ -294,8 +266,8 @@ contract BlakeHonkVerifier is IVerifier { // TODO: mod q proof points // TODO: Preprocess all of the memory locations // TODO: Adjust proof point serde away from poseidon forced field elements - function loadProof(bytes calldata proof) internal view returns (Proof memory) { - Proof memory p; + function loadProof(bytes calldata proof) internal view returns (Honk.Proof memory) { + Honk.Proof memory p; // Metadata p.circuitSize = uint256(bytes32(proof[0x00:0x20])); @@ -324,7 +296,6 @@ contract BlakeHonkVerifier is IVerifier { }); // Lookup / Permutation Helper Commitments - // TODO(md): update the log deriv prover commitment rounds p.lookupReadCounts = Honk.G1ProofPoint({ x_0: uint256(bytes32(proof[0x1e0:0x200])), x_1: uint256(bytes32(proof[0x200:0x220])), @@ -455,7 +426,7 @@ contract BlakeHonkVerifier is IVerifier { uint256 constant ROUND_TARGET = 0; - function verifySumcheck(Proof memory proof, Transcript memory tp) internal view returns (bool verified) { + function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { Fr roundTarget; Fr powPartialEvaluation = Fr.wrap(1); @@ -551,7 +522,7 @@ contract BlakeHonkVerifier is IVerifier { // These are stored in the evaluations part of the proof object. // We add these together, with the appropiate scaling factor ( the alphas calculated in challenges ) // This value is checked against the final value of the target total sum - et voila! - function accumulateRelationEvaluations(Proof memory proof, Transcript memory tp, Fr powPartialEval) + function accumulateRelationEvaluations(Honk.Proof memory proof, Transcript memory tp, Fr powPartialEval) internal view returns (Fr accumulator) @@ -560,7 +531,6 @@ contract BlakeHonkVerifier is IVerifier { Fr[NUMBER_OF_SUBRELATIONS] memory evaluations; // Accumulate all 6 custom gates - each with varying number of subrelations - // TODO: annotate how many subrealtions each has accumulateArithmeticRelation(purportedEvaluations, evaluations, powPartialEval); accumulatePermutationRelation(purportedEvaluations, tp, evaluations, powPartialEval); accumulateLogDerivativeLookupRelation(purportedEvaluations, tp, evaluations, powPartialEval); @@ -664,19 +634,6 @@ contract BlakeHonkVerifier is IVerifier { } } - // Lookup parameters have been yoinked into memory to avoid stack too deep - struct LookupParams { - Fr eta_sqr; - Fr eta_cube; - Fr one_plus_beta; - Fr gamma_by_one_plus_beta; - Fr wire_accum; - Fr table_accum; - Fr table_accum_shift; - } - - // TODO(md): calculate eta one two and three above - function accumulateLogDerivativeLookupRelation( Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, @@ -1135,7 +1092,7 @@ contract BlakeHonkVerifier is IVerifier { } } - function verifyZeroMorph(Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) + function verifyZeroMorph(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) internal view returns (bool verified) @@ -1156,13 +1113,12 @@ contract BlakeHonkVerifier is IVerifier { Honk.G1Point memory c_zeta_Z = ecAdd(c_zeta, ecMul(c_zeta_x, tp.zmZ)); // KZG pairing accumulator - // WORKTODO: concerned that this is zero - it is multiplied by a point later on Fr evaluation = Fr.wrap(0); verified = zkgReduceVerify(proof, tp, evaluation, c_zeta_Z); } // Compute commitment to lifted degree quotient identity - function computeCZeta(Proof memory proof, Transcript memory tp) internal view returns (Honk.G1Point memory) { + function computeCZeta(Honk.Proof memory proof, Transcript memory tp) internal view returns (Honk.G1Point memory) { Fr[LOG_N + 1] memory scalars; Honk.G1ProofPoint[LOG_N + 1] memory commitments; @@ -1197,11 +1153,12 @@ contract BlakeHonkVerifier is IVerifier { Fr x_pow_2kp1; } - function computeCZetaX(Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp, Fr batchedEval) - internal - view - returns (Honk.G1Point memory) - { + function computeCZetaX( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + Transcript memory tp, + Fr batchedEval + ) internal view returns (Honk.G1Point memory) { Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory scalars; Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory commitments; CZetaXParams memory cp; @@ -1391,11 +1348,12 @@ contract BlakeHonkVerifier is IVerifier { } } - function zkgReduceVerify(Proof memory proof, Transcript memory tp, Fr evaluation, Honk.G1Point memory commitment) - internal - view - returns (bool) - { + function zkgReduceVerify( + Honk.Proof memory proof, + Transcript memory tp, + Fr evaluation, + Honk.G1Point memory commitment + ) internal view returns (bool) { Honk.G1Point memory quotient_commitment = convertProofPoint(proof.zmPi); Honk.G1Point memory ONE = Honk.G1Point({x: 1, y: 2}); diff --git a/barretenberg/sol/src/honk/instance/EcdsaHonk.sol b/barretenberg/sol/src/honk/instance/EcdsaHonk.sol index 61e08027aa6..b90229127ae 100644 --- a/barretenberg/sol/src/honk/instance/EcdsaHonk.sol +++ b/barretenberg/sol/src/honk/instance/EcdsaHonk.sol @@ -22,30 +22,6 @@ import {ecMul, ecAdd, ecSub, negateInplace, convertProofPoint} from "../utils.so // Field arithmetic libraries - prevent littering the code with modmul / addmul import {Fr, FrLib} from "../Fr.sol"; -struct Proof { - uint256 circuitSize; - uint256 publicInputsSize; - uint256 publicInputsOffset; - // Free wires - Honk.G1ProofPoint w1; - Honk.G1ProofPoint w2; - Honk.G1ProofPoint w3; - Honk.G1ProofPoint w4; - // Lookup helpers - Permutations - Honk.G1ProofPoint zPerm; - // Lookup helpers - logup - Honk.G1ProofPoint lookupReadCounts; - Honk.G1ProofPoint lookupReadTags; - Honk.G1ProofPoint lookupInverses; - // Sumcheck - Fr[BATCHED_RELATION_PARTIAL_LENGTH][CONST_PROOF_SIZE_LOG_N] sumcheckUnivariates; - Fr[NUMBER_OF_ENTITIES] sumcheckEvaluations; - // Zero morph - Honk.G1ProofPoint[CONST_PROOF_SIZE_LOG_N] zmCqs; - Honk.G1ProofPoint zmCq; - Honk.G1ProofPoint zmPi; -} - // Transcript library to generate fiat shamir challenges struct Transcript { Fr eta; @@ -68,12 +44,11 @@ struct Transcript { } library TranscriptLib { - function generateTranscript(Proof memory proof, Honk.VerificationKey memory vk, bytes32[] calldata publicInputs) - internal - view - returns (Transcript memory t) - { - // TODO: calcaulte full series of eta; + function generateTranscript( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + bytes32[] calldata publicInputs + ) internal view returns (Transcript memory t) { (t.eta, t.etaTwo, t.etaThree) = generateEtaChallenge(proof, publicInputs); (t.beta, t.gamma) = generateBetaAndGammaChallenges(t.etaThree, proof); @@ -92,13 +67,11 @@ library TranscriptLib { return t; } - function generateEtaChallenge(Proof memory proof, bytes32[] calldata publicInputs) + function generateEtaChallenge(Honk.Proof memory proof, bytes32[] calldata publicInputs) internal view returns (Fr eta, Fr etaTwo, Fr etaThree) { - // TODO(md): the 12 here will need to be halved when we fix the transcript to not be over field elements - // TODO: use assembly bytes32[3 + NUMBER_OF_PUBLIC_INPUTS + 12] memory round0; round0[0] = bytes32(proof.circuitSize); round0[1] = bytes32(proof.publicInputsSize); @@ -109,7 +82,6 @@ library TranscriptLib { // Create the first challenge // Note: w4 is added to the challenge later on - // TODO: UPDATE ALL VALUES IN HERE round0[3 + NUMBER_OF_PUBLIC_INPUTS] = bytes32(proof.w1.x_0); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 1] = bytes32(proof.w1.x_1); round0[3 + NUMBER_OF_PUBLIC_INPUTS + 2] = bytes32(proof.w1.y_0); @@ -128,12 +100,11 @@ library TranscriptLib { etaThree = FrLib.fromBytes32(keccak256(abi.encodePacked(Fr.unwrap(etaTwo)))); } - function generateBetaAndGammaChallenges(Fr previousChallenge, Proof memory proof) + function generateBetaAndGammaChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr beta, Fr gamma) { - // TODO(md): adjust round size when the proof points are generated correctly - 5 bytes32[13] memory round1; round1[0] = FrLib.toBytes32(previousChallenge); round1[1] = bytes32(proof.lookupReadCounts.x_0); @@ -154,13 +125,12 @@ library TranscriptLib { } // Alpha challenges non-linearise the gate contributions - function generateAlphaChallenges(Fr previousChallenge, Proof memory proof) + function generateAlphaChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr[NUMBER_OF_ALPHAS] memory alphas) { // Generate the original sumcheck alpha 0 by hashing zPerm and zLookup - // TODO(md): 5 post correct proof size fix uint256[9] memory alpha0; alpha0[0] = Fr.unwrap(previousChallenge); alpha0[1] = proof.lookupInverses.x_0; @@ -188,7 +158,7 @@ library TranscriptLib { } } - function generateSumcheckChallenges(Proof memory proof, Fr prevChallenge) + function generateSumcheckChallenges(Honk.Proof memory proof, Fr prevChallenge) internal view returns (Fr[CONST_PROOF_SIZE_LOG_N] memory sumcheckChallenges) @@ -207,7 +177,7 @@ library TranscriptLib { } } - function generateRhoChallenge(Proof memory proof, Fr prevChallenge) internal view returns (Fr rho) { + function generateRhoChallenge(Honk.Proof memory proof, Fr prevChallenge) internal view returns (Fr rho) { Fr[NUMBER_OF_ENTITIES + 1] memory rhoChallengeElements; rhoChallengeElements[0] = prevChallenge; @@ -219,7 +189,11 @@ library TranscriptLib { rho = FrLib.fromBytes32(keccak256(abi.encodePacked(rhoChallengeElements))); } - function generateZMYChallenge(Fr previousChallenge, Proof memory proof) internal view returns (Fr zeromorphY) { + function generateZMYChallenge(Fr previousChallenge, Honk.Proof memory proof) + internal + view + returns (Fr zeromorphY) + { uint256[CONST_PROOF_SIZE_LOG_N * 4 + 1] memory zmY; zmY[0] = Fr.unwrap(previousChallenge); @@ -233,7 +207,7 @@ library TranscriptLib { zeromorphY = FrLib.fromBytes32(keccak256(abi.encodePacked(zmY))); } - function generateZMXZChallenges(Fr previousChallenge, Proof memory proof) + function generateZMXZChallenges(Fr previousChallenge, Honk.Proof memory proof) internal view returns (Fr zeromorphX, Fr zeromorphZ) @@ -260,12 +234,11 @@ error ZeromorphFailed(); contract EcdsaHonkVerifier is IVerifier { Fr internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = Fr.wrap(17); // -(-17) - // TODO(md): I would perfer the publicInputs to be uint256 function verify(bytes calldata proof, bytes32[] calldata publicInputs) public view override returns (bool) { Honk.VerificationKey memory vk = loadVerificationKey(); - Proof memory p = loadProof(proof); + Honk.Proof memory p = loadProof(proof); - if (vk.publicInputsSize != NUMBER_OF_PUBLIC_INPUTS) { + if (publicInputs.length != vk.publicInputsSize) { revert PublicInputsLengthWrong(); } @@ -294,8 +267,8 @@ contract EcdsaHonkVerifier is IVerifier { // TODO: mod q proof points // TODO: Preprocess all of the memory locations // TODO: Adjust proof point serde away from poseidon forced field elements - function loadProof(bytes calldata proof) internal view returns (Proof memory) { - Proof memory p; + function loadProof(bytes calldata proof) internal view returns (Honk.Proof memory) { + Honk.Proof memory p; // Metadata p.circuitSize = uint256(bytes32(proof[0x00:0x20])); @@ -324,7 +297,6 @@ contract EcdsaHonkVerifier is IVerifier { }); // Lookup / Permutation Helper Commitments - // TODO(md): update the log deriv prover commitment rounds p.lookupReadCounts = Honk.G1ProofPoint({ x_0: uint256(bytes32(proof[0x1e0:0x200])), x_1: uint256(bytes32(proof[0x200:0x220])), @@ -455,7 +427,7 @@ contract EcdsaHonkVerifier is IVerifier { uint256 constant ROUND_TARGET = 0; - function verifySumcheck(Proof memory proof, Transcript memory tp) internal view returns (bool verified) { + function verifySumcheck(Honk.Proof memory proof, Transcript memory tp) internal view returns (bool verified) { Fr roundTarget; Fr powPartialEvaluation = Fr.wrap(1); @@ -551,7 +523,7 @@ contract EcdsaHonkVerifier is IVerifier { // These are stored in the evaluations part of the proof object. // We add these together, with the appropiate scaling factor ( the alphas calculated in challenges ) // This value is checked against the final value of the target total sum - et voila! - function accumulateRelationEvaluations(Proof memory proof, Transcript memory tp, Fr powPartialEval) + function accumulateRelationEvaluations(Honk.Proof memory proof, Transcript memory tp, Fr powPartialEval) internal view returns (Fr accumulator) @@ -560,7 +532,6 @@ contract EcdsaHonkVerifier is IVerifier { Fr[NUMBER_OF_SUBRELATIONS] memory evaluations; // Accumulate all 6 custom gates - each with varying number of subrelations - // TODO: annotate how many subrealtions each has accumulateArithmeticRelation(purportedEvaluations, evaluations, powPartialEval); accumulatePermutationRelation(purportedEvaluations, tp, evaluations, powPartialEval); accumulateLogDerivativeLookupRelation(purportedEvaluations, tp, evaluations, powPartialEval); @@ -664,19 +635,6 @@ contract EcdsaHonkVerifier is IVerifier { } } - // Lookup parameters have been yoinked into memory to avoid stack too deep - struct LookupParams { - Fr eta_sqr; - Fr eta_cube; - Fr one_plus_beta; - Fr gamma_by_one_plus_beta; - Fr wire_accum; - Fr table_accum; - Fr table_accum_shift; - } - - // TODO(md): calculate eta one two and three above - function accumulateLogDerivativeLookupRelation( Fr[NUMBER_OF_ENTITIES] memory p, Transcript memory tp, @@ -1135,7 +1093,7 @@ contract EcdsaHonkVerifier is IVerifier { } } - function verifyZeroMorph(Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) + function verifyZeroMorph(Honk.Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp) internal view returns (bool verified) @@ -1162,7 +1120,7 @@ contract EcdsaHonkVerifier is IVerifier { } // Compute commitment to lifted degree quotient identity - function computeCZeta(Proof memory proof, Transcript memory tp) internal view returns (Honk.G1Point memory) { + function computeCZeta(Honk.Proof memory proof, Transcript memory tp) internal view returns (Honk.G1Point memory) { Fr[LOG_N + 1] memory scalars; Honk.G1ProofPoint[LOG_N + 1] memory commitments; @@ -1197,11 +1155,12 @@ contract EcdsaHonkVerifier is IVerifier { Fr x_pow_2kp1; } - function computeCZetaX(Proof memory proof, Honk.VerificationKey memory vk, Transcript memory tp, Fr batchedEval) - internal - view - returns (Honk.G1Point memory) - { + function computeCZetaX( + Honk.Proof memory proof, + Honk.VerificationKey memory vk, + Transcript memory tp, + Fr batchedEval + ) internal view returns (Honk.G1Point memory) { Fr[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory scalars; Honk.G1Point[NUMBER_OF_ENTITIES + CONST_PROOF_SIZE_LOG_N + 1] memory commitments; CZetaXParams memory cp; @@ -1391,11 +1350,12 @@ contract EcdsaHonkVerifier is IVerifier { } } - function zkgReduceVerify(Proof memory proof, Transcript memory tp, Fr evaluation, Honk.G1Point memory commitment) - internal - view - returns (bool) - { + function zkgReduceVerify( + Honk.Proof memory proof, + Transcript memory tp, + Fr evaluation, + Honk.G1Point memory commitment + ) internal view returns (bool) { Honk.G1Point memory quotient_commitment = convertProofPoint(proof.zmPi); Honk.G1Point memory ONE = Honk.G1Point({x: 1, y: 2}); diff --git a/barretenberg/sol/src/honk/keys/Add2HonkVerificationKey.sol b/barretenberg/sol/src/honk/keys/Add2HonkVerificationKey.sol index 8089bf3c203..1b9ba31c154 100644 --- a/barretenberg/sol/src/honk/keys/Add2HonkVerificationKey.sol +++ b/barretenberg/sol/src/honk/keys/Add2HonkVerificationKey.sol @@ -4,17 +4,16 @@ pragma solidity >=0.8.21; import {Honk} from "../HonkTypes.sol"; -uint256 constant N = 0x0000000000000000000000000000000000000000000000000000000000000020; -// Note(md): fixed proof size at 2^28 dictates this value -uint256 constant LOG_N = 0x0000000000000000000000000000000000000000000000000000000000000005; -uint256 constant NUMBER_OF_PUBLIC_INPUTS = 0x0000000000000000000000000000000000000000000000000000000000000003; +uint256 constant N = 32; +uint256 constant LOG_N = 5; +uint256 constant NUMBER_OF_PUBLIC_INPUTS = 3; library Add2HonkVerificationKey { function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { Honk.VerificationKey memory vk = Honk.VerificationKey({ - circuitSize: uint256(0x0000000000000000000000000000000000000000000000000000000000000020), - logCircuitSize: uint256(0x0000000000000000000000000000000000000000000000000000000000000005), - publicInputsSize: uint256(0x0000000000000000000000000000000000000000000000000000000000000003), + circuitSize: uint256(32), + logCircuitSize: uint256(5), + publicInputsSize: uint256(3), ql: Honk.G1Point({ x: uint256(0x043d063b130adfb37342af45d0155a28edd1a7e46c840d9c943fdf45521c64ce), y: uint256(0x261522c4089330646aff96736194949330952ae74c573d1686d9cb4a00733854) diff --git a/barretenberg/sol/src/honk/keys/BlakeHonkVerificationKey.sol b/barretenberg/sol/src/honk/keys/BlakeHonkVerificationKey.sol index 9880af55106..49f2ae375c0 100644 --- a/barretenberg/sol/src/honk/keys/BlakeHonkVerificationKey.sol +++ b/barretenberg/sol/src/honk/keys/BlakeHonkVerificationKey.sol @@ -4,16 +4,16 @@ pragma solidity >=0.8.21; import {Honk} from "../HonkTypes.sol"; -uint256 constant N = 0x0000000000000000000000000000000000000000000000000000000000008000; -uint256 constant LOG_N = 0x000000000000000000000000000000000000000000000000000000000000000f; -uint256 constant NUMBER_OF_PUBLIC_INPUTS = 0x0000000000000000000000000000000000000000000000000000000000000004; +uint256 constant N = 32768; +uint256 constant LOG_N = 15; +uint256 constant NUMBER_OF_PUBLIC_INPUTS = 4; library BlakeHonkVerificationKey { function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { Honk.VerificationKey memory vk = Honk.VerificationKey({ - circuitSize: uint256(0x0000000000000000000000000000000000000000000000000000000000008000), - logCircuitSize: uint256(0x000000000000000000000000000000000000000000000000000000000000000f), - publicInputsSize: uint256(0x0000000000000000000000000000000000000000000000000000000000000004), + circuitSize: uint256(32768), + logCircuitSize: uint256(15), + publicInputsSize: uint256(4), ql: Honk.G1Point({ x: uint256(0x2e5f133c25f7e05bd6660196c892121f7fa686cb9a8717a5deea6cd0881e618e), y: uint256(0x1189bba9eeea96ba8935052434f4b0a60b0a481e3464dd81dfcd89e23def001b) diff --git a/barretenberg/sol/src/honk/keys/EcdsaHonkVerificationKey.sol b/barretenberg/sol/src/honk/keys/EcdsaHonkVerificationKey.sol index a0521edeb1a..1ef78325263 100644 --- a/barretenberg/sol/src/honk/keys/EcdsaHonkVerificationKey.sol +++ b/barretenberg/sol/src/honk/keys/EcdsaHonkVerificationKey.sol @@ -4,16 +4,16 @@ pragma solidity >=0.8.21; import {Honk} from "../HonkTypes.sol"; -uint256 constant N = 0x0000000000000000000000000000000000000000000000000000000000010000; -uint256 constant LOG_N = 0x0000000000000000000000000000000000000000000000000000000000000010; -uint256 constant NUMBER_OF_PUBLIC_INPUTS = 0x0000000000000000000000000000000000000000000000000000000000000006; +uint256 constant N = 65536; +uint256 constant LOG_N = 16; +uint256 constant NUMBER_OF_PUBLIC_INPUTS = 6; library EcdsaHonkVerificationKey { function loadVerificationKey() internal pure returns (Honk.VerificationKey memory) { Honk.VerificationKey memory vk = Honk.VerificationKey({ - circuitSize: uint256(0x0000000000000000000000000000000000000000000000000000000000010000), - logCircuitSize: uint256(0x0000000000000000000000000000000000000000000000000000000000000010), - publicInputsSize: uint256(0x0000000000000000000000000000000000000000000000000000000000000006), + circuitSize: uint256(65536), + logCircuitSize: uint256(16), + publicInputsSize: uint256(6), ql: Honk.G1Point({ x: uint256(0x0b1acdcf739e1e6c27df046577122a292a77f4fcdf8056d8b8ae12f105d3a888), y: uint256(0x145dad3bdd9a262411aaa657129df49dbf44a63f510e9ab8191622c643ebd9bd) diff --git a/barretenberg/sol/src/ultra/keys/EcdsaUltraVerificationKey.sol b/barretenberg/sol/src/ultra/keys/EcdsaUltraVerificationKey.sol index 2d6ea8d8991..82e67a786f6 100644 --- a/barretenberg/sol/src/ultra/keys/EcdsaUltraVerificationKey.sol +++ b/barretenberg/sol/src/ultra/keys/EcdsaUltraVerificationKey.sol @@ -1,11 +1,11 @@ -// Verification Key Hash: 37857017e148045fbf1d4c2a44593a697c670712dd978c475904b8dc36169f18 +// Verification Key Hash: 3b1c156f02c5934c94573e30a9d55a6398e8d1f616136797c008194d26892a55 // SPDX-License-Identifier: Apache-2.0 // Copyright 2022 Aztec pragma solidity >=0.8.4; library EcdsaUltraVerificationKey { function verificationKeyHash() internal pure returns (bytes32) { - return 0x37857017e148045fbf1d4c2a44593a697c670712dd978c475904b8dc36169f18; + return 0x3b1c156f02c5934c94573e30a9d55a6398e8d1f616136797c008194d26892a55; } function loadVerificationKey(uint256 _vk, uint256 _omegaInverseLoc) internal pure { @@ -14,34 +14,34 @@ library EcdsaUltraVerificationKey { mstore(add(_vk, 0x20), 0x0000000000000000000000000000000000000000000000000000000000000006) // vk.num_inputs mstore(add(_vk, 0x40), 0x00eeb2cb5981ed45649abebde081dcff16c8601de4347e7dd1628ba2daac43b7) // vk.work_root mstore(add(_vk, 0x60), 0x30641e0e92bebef818268d663bcad6dbcfd6c0149170f6d7d350b1b1fa6c1001) // vk.domain_inverse - mstore(add(_vk, 0x80), 0x0740309101d13ea04888ecf9966d509116884ac038bdf2c07646806c87882e5f) // vk.Q1.x - mstore(add(_vk, 0xa0), 0x2fa84f63c12178cc8ac1f4e8588df585bde0d219e278e4ee201ffba99a3411e4) // vk.Q1.y - mstore(add(_vk, 0xc0), 0x0558758bd7554eeff4f8134d8ae5698a017ce2a129925bd48801548c77fbe63c) // vk.Q2.x - mstore(add(_vk, 0xe0), 0x0fb3ffe7ccc0570d878afafb118b383e81957c7b45b1b6ec827687c32e041b48) // vk.Q2.y - mstore(add(_vk, 0x100), 0x29b23512911ac602dad1322d794b14d93969dfce065e7d7f3c27eca231a7ce6b) // vk.Q3.x - mstore(add(_vk, 0x120), 0x181fa6dd3e9381e50a4803813066991c57a2da9124bc3578a50c368e64806238) // vk.Q3.y - mstore(add(_vk, 0x140), 0x00eaa74d50fa51c361607c4975d0901d985c3702e451ff6f90919ef5f30fb953) // vk.Q4.x - mstore(add(_vk, 0x160), 0x2943e906ab4660823fe6cc999101066309f99157af7a6fa948f75bac0527237a) // vk.Q4.y - mstore(add(_vk, 0x180), 0x0bb76b97b2e497edbda41d64eeeb4240482861bfe02220944a1f1b92939d31b7) // vk.Q_M.x - mstore(add(_vk, 0x1a0), 0x2777a52a5cbeb31f3f16c2268492e233bdd5cf02fd0c4e68e404347eda2670ac) // vk.Q_M.y - mstore(add(_vk, 0x1c0), 0x0ec4edd551546f895838153731c4241ba099c255f4dc3d0bd88fc901cdc941bc) // vk.Q_C.x - mstore(add(_vk, 0x1e0), 0x09bcf0a0c13ae4e765801b6b8acc8eef3da55fb43ff5a9305dd75c66d1287a0d) // vk.Q_C.y - mstore(add(_vk, 0x200), 0x2bf60d0b3adda58dfe2be290fcebb58851071646f7f715a5a0063dba93168b83) // vk.Q_ARITHMETIC.x - mstore(add(_vk, 0x220), 0x24562762b7b7219f77188e1e0c1b4db96493b1c7e41a5f1e7c0130ab5b720f43) // vk.Q_ARITHMETIC.y - mstore(add(_vk, 0x240), 0x04cf13b71400ed5ba47983d1d0795124e7ea3caf4f7d24f7713fe2205e2f80df) // vk.QSORT.x - mstore(add(_vk, 0x260), 0x15cc2c54fdc490a08833bd1a449bb0fbf26cd0590c648b45f7d83da966d5e9e8) // vk.QSORT.y + mstore(add(_vk, 0x80), 0x149e839df8f43c6975d85aa1007d219354b3389f7c93c96935e531fe03d01f88) // vk.Q1.x + mstore(add(_vk, 0xa0), 0x15af1728559ee0f81053b668fa9978c5fc81ee84d017bc955ccfa37c19bd42a0) // vk.Q1.y + mstore(add(_vk, 0xc0), 0x29ba522730da8fa2a791946868afba96af78b025ba860d8e1d02e0325e677101) // vk.Q2.x + mstore(add(_vk, 0xe0), 0x1434909cf7d729b2f4227d83569641d90c4a72d393390825de20cea7ddad8044) // vk.Q2.y + mstore(add(_vk, 0x100), 0x03b90587c8746a60d96bc184e03c8469d813956caba6137040b350360357fe4f) // vk.Q3.x + mstore(add(_vk, 0x120), 0x211f025196191d107ae492f80f0effeb1e9242069f333d405698365df4838d43) // vk.Q3.y + mstore(add(_vk, 0x140), 0x0eae4a0952b07a5dbaf7750d79dae8fda3cfa4b5e7882413b6ada72c4297561e) // vk.Q4.x + mstore(add(_vk, 0x160), 0x0fa2558fd5e0afe53d359b1ec584eb6c0fabad27e4909227d9a4457d588b2830) // vk.Q4.y + mstore(add(_vk, 0x180), 0x01e7626aeb0ca204c26be5b01b3171994011b03f8966bb201303fc196c6c1a7e) // vk.Q_M.x + mstore(add(_vk, 0x1a0), 0x07972ee3ae6e0a0cf4978b64cd08783f42c7ce9905f1fd35da4ff6fa0e1a18e2) // vk.Q_M.y + mstore(add(_vk, 0x1c0), 0x03bd15837131c97d246c0aa57786e302b6d8227826104f70f56cba936a7b408e) // vk.Q_C.x + mstore(add(_vk, 0x1e0), 0x1a2e3be55cd01c1a4f4ef33fa96986e37c56abc06876e7f7d76229fb9f122c4c) // vk.Q_C.y + mstore(add(_vk, 0x200), 0x26d1d1578bb09f2f047035f103c3b32180c89b338e7d04ace8872b1154be6fb5) // vk.Q_ARITHMETIC.x + mstore(add(_vk, 0x220), 0x10c4691982c731ec4e2bb8216e8af8405fbe96fe8fe305ef2c3e03444fe68f85) // vk.Q_ARITHMETIC.y + mstore(add(_vk, 0x240), 0x1feb6cf63471a70e29caeee13eb393760c0f7d9e556327beb09a22b6b35e89f7) // vk.QSORT.x + mstore(add(_vk, 0x260), 0x1a834941cde87aa7a82450b4f093f149df9937db2edbdab47fa7216fbcb33580) // vk.QSORT.y mstore(add(_vk, 0x280), 0x21245d6c0a4d2ff12b21a825f39f30e8f8cf9b259448d111183e975828539576) // vk.Q_ELLIPTIC.x mstore(add(_vk, 0x2a0), 0x16a409532c8a1693536e93b6ce9920bfc2e6796e8dfe404675a0cdf6ee77ee7a) // vk.Q_ELLIPTIC.y - mstore(add(_vk, 0x2c0), 0x212c745dcc4a77000c8811ed40afb24af308b4df977301025f700d34da365259) // vk.Q_AUX.x - mstore(add(_vk, 0x2e0), 0x1f948ad81820af56f3eca2ae3d7e99f0d0a7edb380cf5ebb9f33a9cba3cc3003) // vk.Q_AUX.y - mstore(add(_vk, 0x300), 0x2fb17d2149521c0656a73395466a295e30cde468aca4124e34235609ede6bcf7) // vk.SIGMA1.x - mstore(add(_vk, 0x320), 0x2e9d5f4cb83ec03ecdd8bb1c8e8319f991e80d8b1af6f420f75ffc1851da6d77) // vk.SIGMA1.y - mstore(add(_vk, 0x340), 0x003841079f4625f2c236862b62391a7770a7ac22c538c4af8d70b8dba8e62a4e) // vk.SIGMA2.x - mstore(add(_vk, 0x360), 0x0b55d7bab80f1febf20f1faca1e71c5f917d1eab85fe03df8bcbec192f25da58) // vk.SIGMA2.y - mstore(add(_vk, 0x380), 0x26ee5759c003a3e371be8fbbe1cb783bce386b4583d1ee22e3abb86a1b0c59f2) // vk.SIGMA3.x - mstore(add(_vk, 0x3a0), 0x00678775ed828729a75de81d07419b825d017c80d7e1520f659dc23bb476cf0f) // vk.SIGMA3.y - mstore(add(_vk, 0x3c0), 0x282a0813e6ce49d1d2e70a13fcdc9aa4622b2218a83e8eca83e7c72747c37eea) // vk.SIGMA4.x - mstore(add(_vk, 0x3e0), 0x218a25ab5d221802dab140aad610b6ed71674a8cb9d899b85cafed786a778213) // vk.SIGMA4.y + mstore(add(_vk, 0x2c0), 0x24005a1e8120ffcb3e5fc06ff50794b9d4b0bd70eabb1f8dfb342bec8a64dd61) // vk.Q_AUX.x + mstore(add(_vk, 0x2e0), 0x2c18b52f602a5a9b4461872eff0712f56d128bb9364471f838d7b07f008660e3) // vk.Q_AUX.y + mstore(add(_vk, 0x300), 0x02497b2d5e01266cea1f1bf4d9ad66e54045b3e388066db97b9623668728f65d) // vk.SIGMA1.x + mstore(add(_vk, 0x320), 0x0156cae236ca46f64832b4b826804da6c7221ab5ca4cdadd53a1b787992307fe) // vk.SIGMA1.y + mstore(add(_vk, 0x340), 0x2673cb9276dcc16be61e4c2ec24f6a881e771a273198ab0b392c26085a5f03b4) // vk.SIGMA2.x + mstore(add(_vk, 0x360), 0x1384aef6995f8e632b76cce98d900e2535d92719be668a8f0e20c893c87f391a) // vk.SIGMA2.y + mstore(add(_vk, 0x380), 0x11d48b7fc901d1e72489d937970ee3baea2662d268f9b1c08d71820a21ac6a39) // vk.SIGMA3.x + mstore(add(_vk, 0x3a0), 0x126e543f1951015c8a56ff6d571e67da3cc52d2671f3ce8d258378edcfe8a8f5) // vk.SIGMA3.y + mstore(add(_vk, 0x3c0), 0x0b41b102b59ecae092c04a4f09755db1dc4286c3072034ca23b7f885bcfec814) // vk.SIGMA4.x + mstore(add(_vk, 0x3e0), 0x13bf888757f9fad73f21ab3a0ef53a286329dbf0aaaa935d1689d8554db05813) // vk.SIGMA4.y mstore(add(_vk, 0x400), 0x18f7cf965339d9c9d190296fa92f915767b0a8da455975f3e03fa98439fd7110) // vk.TABLE1.x mstore(add(_vk, 0x420), 0x0eecc02f9d44125407adbf00d56b086afd1adc5de536450afe05de382761b32f) // vk.TABLE1.y mstore(add(_vk, 0x440), 0x0bdfe662ea9f40f125ca5f7e99a8c6ba09b87ba8313864316745df862946c5c4) // vk.TABLE2.x @@ -50,16 +50,16 @@ library EcdsaUltraVerificationKey { mstore(add(_vk, 0x4a0), 0x1fda66dfb58273345f2471dff55c51b6856241460272e64b4cc67cde65231e89) // vk.TABLE3.y mstore(add(_vk, 0x4c0), 0x024ccc0fcff3b515cdc97dde2fae5c516bf3c97207891801707142af02538a83) // vk.TABLE4.x mstore(add(_vk, 0x4e0), 0x27827250d02b7b67d084bfc52b26c722f33f75ae5098c109573bfe92b782e559) // vk.TABLE4.y - mstore(add(_vk, 0x500), 0x2b11866ecb5b6a1e891e14792b0c04fdb6484cc1f910410459d3a9dfcaa75435) // vk.TABLE_TYPE.x - mstore(add(_vk, 0x520), 0x1422d413ccda11d6519d87f9379c29ef515ff37bdd366ab1aa7fc2029263cfe7) // vk.TABLE_TYPE.y - mstore(add(_vk, 0x540), 0x255d46db0711edd487412cba825c76c98f793128a8ce85c35133541e4e02abb2) // vk.ID1.x - mstore(add(_vk, 0x560), 0x160fad047875fd62e546eae73bce930febf8477c87769a5b1063eae02638c560) // vk.ID1.y - mstore(add(_vk, 0x580), 0x01548254d6a72ea1faeef8e50364e8103b527e4df00944d0a0b53d231a6df578) // vk.ID2.x - mstore(add(_vk, 0x5a0), 0x040430723dc214dcc9d20d0ed95d24d62a5f935e6243eec6302917c1b5d2b38f) // vk.ID2.y - mstore(add(_vk, 0x5c0), 0x1bde6129bc64b0816a88717b9b5b2904c85e7b88c73ac1b0ba75d371ecf235fc) // vk.ID3.x - mstore(add(_vk, 0x5e0), 0x0f7e40cedb79332543fd5c040a0eb687b1124869437414aee4497e9d66a6ecb9) // vk.ID3.y - mstore(add(_vk, 0x600), 0x25fbe717194485caaf7607cd9043bc528100b7e757f957680865edf2d7794616) // vk.ID4.x - mstore(add(_vk, 0x620), 0x119e345a421dcd7c7c9d8ffd14843eeb1536f640d777340dc4321e59941a3ef7) // vk.ID4.y + mstore(add(_vk, 0x500), 0x302e6c8067a7ca14e1d75776754c1a3ad99d21056ae8e607ea66029cbe534906) // vk.TABLE_TYPE.x + mstore(add(_vk, 0x520), 0x07f2eb44fd686bf54e604a6b40c9151b7123db580a23c064ef703af4013dbc2f) // vk.TABLE_TYPE.y + mstore(add(_vk, 0x540), 0x00992a2f510c6371b9231c1d68d0e0fdbe10c5f4344de9441cc7c845afb37a1d) // vk.ID1.x + mstore(add(_vk, 0x560), 0x13eb38f67d8c03245e6f0655f5d40c145b2c06dd1657d8da26dc75af0cefa0f7) // vk.ID1.y + mstore(add(_vk, 0x580), 0x2ce905fbf9f932ae4f9b7b0feda15271b80921e9bf4e58c302ae99f1207fa4e7) // vk.ID2.x + mstore(add(_vk, 0x5a0), 0x2c2a6dc03599757fc625b0e55984d3fb28a954d40eb54f988b52c55936076988) // vk.ID2.y + mstore(add(_vk, 0x5c0), 0x0f547249b9aa5b9a951757893c059f8ed590366da4dd3ccd36aeac3069c7471f) // vk.ID3.x + mstore(add(_vk, 0x5e0), 0x2be2746434bfe0ccb2390357b17f8ec70ff12fc3aad4500b8d1723ec6709a170) // vk.ID3.y + mstore(add(_vk, 0x600), 0x19d1ed6b528ae5095d83167c3ba3578b36c7cd9249e47d10ceff352890d0938f) // vk.ID4.x + mstore(add(_vk, 0x620), 0x1dcd2caa39e180a497ff98414548e5de682d19fc598b3cd44242f1bb53a0e078) // vk.ID4.y mstore(add(_vk, 0x640), 0x00) // vk.contains_recursive_proof mstore(add(_vk, 0x660), 0) // vk.recursive_proof_public_input_indices mstore(add(_vk, 0x680), 0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1) // vk.g2_x.X.c1 diff --git a/barretenberg/sol/src/ultra/keys/RecursiveUltraVerificationKey.sol b/barretenberg/sol/src/ultra/keys/RecursiveUltraVerificationKey.sol index e6e5a11f22e..1cabff016cb 100644 --- a/barretenberg/sol/src/ultra/keys/RecursiveUltraVerificationKey.sol +++ b/barretenberg/sol/src/ultra/keys/RecursiveUltraVerificationKey.sol @@ -1,11 +1,11 @@ -// Verification Key Hash: bfb307aa80c044280f7b66742bc68eb8075aef056ac704ffd7d5cbe1d83268fa +// Verification Key Hash: 9e6cf5dacef11085d9ea83e98b85ebdc37749931c90443898dcd8d18f639dad8 // SPDX-License-Identifier: Apache-2.0 // Copyright 2022 Aztec pragma solidity >=0.8.4; library RecursiveUltraVerificationKey { function verificationKeyHash() internal pure returns (bytes32) { - return 0xbfb307aa80c044280f7b66742bc68eb8075aef056ac704ffd7d5cbe1d83268fa; + return 0x9e6cf5dacef11085d9ea83e98b85ebdc37749931c90443898dcd8d18f639dad8; } function loadVerificationKey(uint256 _vk, uint256 _omegaInverseLoc) internal pure { @@ -14,34 +14,34 @@ library RecursiveUltraVerificationKey { mstore(add(_vk, 0x20), 0x0000000000000000000000000000000000000000000000000000000000000010) // vk.num_inputs mstore(add(_vk, 0x40), 0x19ddbcaf3a8d46c15c0176fbb5b95e4dc57088ff13f4d1bd84c6bfa57dcdc0e0) // vk.work_root mstore(add(_vk, 0x60), 0x30644259cd94e7dd5045d7a27013b7fcd21c9e3b7fa75222e7bda49b729b0401) // vk.domain_inverse - mstore(add(_vk, 0x80), 0x2872629d2db8cb63feef4ab8ab250d2b8bd2fbdb622942962872f05b5e742d95) // vk.Q1.x - mstore(add(_vk, 0xa0), 0x1948d2b7b800c7a93368e1987b7871e7df65246af142f6573596865684913cc9) // vk.Q1.y - mstore(add(_vk, 0xc0), 0x27c1cc0a26221de00043568e1ecb193fc349be273624f69a9c3e766470f936cb) // vk.Q2.x - mstore(add(_vk, 0xe0), 0x0ba47ff2bea29b40ad0a9c8650a8833662b461389862423a7a73bbb54c7d4f0e) // vk.Q2.y - mstore(add(_vk, 0x100), 0x11710dd57cc6fa657dccc53cc681de5fc97d324f1f0c72e20c08371ee4f65e5d) // vk.Q3.x - mstore(add(_vk, 0x120), 0x244fd7fe0b18f6ff86a773655b34fb76abcd5217877e7df959e3e0151000ccd5) // vk.Q3.y - mstore(add(_vk, 0x140), 0x065e12520e3b7419d0c3b557d7a26f24d68763744b74a922993992148cfef300) // vk.Q4.x - mstore(add(_vk, 0x160), 0x2d5af9ed366fe901d76b2d5dee01535679a6adec0d88d9192dfea384ad56b4ee) // vk.Q4.y - mstore(add(_vk, 0x180), 0x2c2cf9cdde5d02feb8f244b62f37d794c2dff66622a55c77188f30aa0eb96e2a) // vk.Q_M.x - mstore(add(_vk, 0x1a0), 0x188afa06880347bd0a7181a6ee022742889e5c325df2a46ed325816c20472717) // vk.Q_M.y - mstore(add(_vk, 0x1c0), 0x2db1866af181147eb2deb1a81a237a80ede43cd670afed4ed707f5452043e41c) // vk.Q_C.x - mstore(add(_vk, 0x1e0), 0x07686d5c3d3166a9f171b4c6ea91e787b0988b78a4bc833894f0ff38d9098ea7) // vk.Q_C.y - mstore(add(_vk, 0x200), 0x1a906065dbb4b466aa34c76b50d49941b8f0cc5257ae7825093eb555cf093ecd) // vk.Q_ARITHMETIC.x - mstore(add(_vk, 0x220), 0x16be7a84788d5d7f80fa66a850bd2a61977180e4caaaa44193458847e96f4df3) // vk.Q_ARITHMETIC.y - mstore(add(_vk, 0x240), 0x12c6fa152ae4645fab9ce20db8ea7dee0eac2cc6562840f4d025036e7a06cdd0) // vk.QSORT.x - mstore(add(_vk, 0x260), 0x26a190dcd1abf61afc12ff56da242a29c196d1f75b3c0c6922dc026702489c2b) // vk.QSORT.y - mstore(add(_vk, 0x280), 0x2d7d1a26c4e4d806f73753d6f33c887df548d60adfc903867189947022643261) // vk.Q_ELLIPTIC.x - mstore(add(_vk, 0x2a0), 0x2d71cb325bb3a6af51e99830f0700d4ccedacbfd21ec7dffebd7e9c8cab386a6) // vk.Q_ELLIPTIC.y - mstore(add(_vk, 0x2c0), 0x30094b60cb5228b662501dd1942ea7d989694530497df30d9158b3933773f7f0) // vk.Q_AUX.x - mstore(add(_vk, 0x2e0), 0x04f2cc1c0aa3a57dc8173be9dec656884aec11a6f154eb2d0a31e7a5b3bc65bb) // vk.Q_AUX.y - mstore(add(_vk, 0x300), 0x14c1d865fc203cf3248b978de69ca2cbed717176a83931dcaaabddf84aac1125) // vk.SIGMA1.x - mstore(add(_vk, 0x320), 0x03413319a4878452335c3c679a952a71dad895d92acd1db566a8b72726b34f34) // vk.SIGMA1.y - mstore(add(_vk, 0x340), 0x01130e52ac74730e6fad05aeb3ee1e3e14feba55956bd944159ce65e854f6bb4) // vk.SIGMA2.x - mstore(add(_vk, 0x360), 0x0ee3c32db6c8483ae225f58ffce0c57e757852401b26f666506619529e6d830e) // vk.SIGMA2.y - mstore(add(_vk, 0x380), 0x1cdf6e1d1a2c00394de5e91aeafd134cf75f2c8b8014bf21d6074c2f66aff68c) // vk.SIGMA3.x - mstore(add(_vk, 0x3a0), 0x24fe45588eeb54159a8e9aa2e3c3a3ab90f78f0e509dfcc3cf3c03fffd5e2693) // vk.SIGMA3.y - mstore(add(_vk, 0x3c0), 0x2294016984ba84bd39822af4780b4fa91f06ac9f46d081c6e4639dc0091eff2b) // vk.SIGMA4.x - mstore(add(_vk, 0x3e0), 0x2070976b4eb47279553017922492bd0e8b78e57dd8bf424309f133fd02e9f8b5) // vk.SIGMA4.y + mstore(add(_vk, 0x80), 0x02c6f00fd259ba9440c68d211969bbd81509b234882d65fc79ee90fdcb6ccfda) // vk.Q1.x + mstore(add(_vk, 0xa0), 0x07f4fc84032451c171ea7150385b54a383fb083cc0c93895e2ef931e8e448345) // vk.Q1.y + mstore(add(_vk, 0xc0), 0x02b407e4c824960a965b5193ad8c6ccf4baaa4c99da5d11b13a2d6af52973ef7) // vk.Q2.x + mstore(add(_vk, 0xe0), 0x021fe5c3dd23b42f696dcd08659b8aa403c8e927f8c6e7b1446f4e9205c0a1c2) // vk.Q2.y + mstore(add(_vk, 0x100), 0x14f63403b60fb3ccf8325ec20e463e1daa492faf4d0151a8e7366f07c68f1d83) // vk.Q3.x + mstore(add(_vk, 0x120), 0x207cbbaffb34a0fe5eba27fd30f67e5389b1de65b703ccb78726831208ab600d) // vk.Q3.y + mstore(add(_vk, 0x140), 0x00ef12b054f19d72f2a6d0e628c6387026afd8a8924eb144ccc9948d4f6c5549) // vk.Q4.x + mstore(add(_vk, 0x160), 0x0a1cbb57818ceec1d15878315046a7db1238d292307cabafbb97f569df6dcefa) // vk.Q4.y + mstore(add(_vk, 0x180), 0x0d098b0bcd6db60c47f8e7e9eb1c072972deb39b294907cbc353352ebc2bea85) // vk.Q_M.x + mstore(add(_vk, 0x1a0), 0x0ff57407d8b18914e30d8583a77f67732f8b2762429a712c55b0c00fb83fe1c2) // vk.Q_M.y + mstore(add(_vk, 0x1c0), 0x2b01c45f214633bfaea1589083ab9a3a0915a6da362baa3151b1a0e80fb79160) // vk.Q_C.x + mstore(add(_vk, 0x1e0), 0x0392d6380d2912befda8d98bcddd6050683a814bb84eb7f57e28176033783f11) // vk.Q_C.y + mstore(add(_vk, 0x200), 0x24a6e759b9d12a53f809367cb3cbd00d96dfaa3af623e984bd986886447b642d) // vk.Q_ARITHMETIC.x + mstore(add(_vk, 0x220), 0x265e4202aa18f537a514281d72aaea8ab10090da270d8f9901363b4f48bc0610) // vk.Q_ARITHMETIC.y + mstore(add(_vk, 0x240), 0x04e5e383b53cf0f3eb3e824dcbc95d7fbb2ca7770bf92a3e86b652a425534714) // vk.QSORT.x + mstore(add(_vk, 0x260), 0x1bb4418c97c423508baf8d7825f2f41066dc4769dc4c9643ebddca0a71b71a87) // vk.QSORT.y + mstore(add(_vk, 0x280), 0x00a2e0e8c69ad29b60904f91a9db016a32a3de05f6ccdf024b5f149e8388484c) // vk.Q_ELLIPTIC.x + mstore(add(_vk, 0x2a0), 0x24be2bffbba65b40f4eeabba7a3660511baad3936c4ec40a6f9e20d194ec3a07) // vk.Q_ELLIPTIC.y + mstore(add(_vk, 0x2c0), 0x28725b01fa9c481b39aef64f5f54f9f967fd976b7ff4be45a9ca50f7500fef4c) // vk.Q_AUX.x + mstore(add(_vk, 0x2e0), 0x264e3e4c4529b321c407f802c173d2fb73b03e8ce09fe3de3c11f84b87b99d32) // vk.Q_AUX.y + mstore(add(_vk, 0x300), 0x1ec8ec2e5a6f36a00042f1199bad7fb25e950c9ce97f59777fd1739f422ce750) // vk.SIGMA1.x + mstore(add(_vk, 0x320), 0x002526bd09111cbc4d6f6c6e200f627e7ae60fb59bd5f1357d82f386b1009dc9) // vk.SIGMA1.y + mstore(add(_vk, 0x340), 0x0cc83ed6a722c67efdd44d5b6de2490621fd59c7c1c7a1416c99a6dff933e5d9) // vk.SIGMA2.x + mstore(add(_vk, 0x360), 0x01eb69a024162e13bc58e174cef5c0d2c7a12bdf3619f78010cfe09cd165c19d) // vk.SIGMA2.y + mstore(add(_vk, 0x380), 0x257e795ed0c6598cb79a148110eb2ce1dfb2a6378267e0a33f3c1d4dd7aadbcc) // vk.SIGMA3.x + mstore(add(_vk, 0x3a0), 0x01d596a895131eb6dbf6c9a89ddd9321ec5ed272d921b4edfed20b8f8ddc80cb) // vk.SIGMA3.y + mstore(add(_vk, 0x3c0), 0x167af14f050f637263e94a86a2408a14178c7ea304ffaee2db4b2d20e173832b) // vk.SIGMA4.x + mstore(add(_vk, 0x3e0), 0x211fb82fbb784f81f12914fbdb876c4a4b1f3670bf7aa291f661f7541bc8779c) // vk.SIGMA4.y mstore(add(_vk, 0x400), 0x09796190fd3ba909c6530c89811df9b5b4f5f2fe6501ec21dd864b20673fc02c) // vk.TABLE1.x mstore(add(_vk, 0x420), 0x00b9c2423e310caa43e1eb83b55f53977fccbed85422df8935635d77d146bf39) // vk.TABLE1.y mstore(add(_vk, 0x440), 0x217dad26ccc0c543ec5750513e9365a5cae8164b08d364efcf4b5890ff05f334) // vk.TABLE2.x @@ -50,16 +50,16 @@ library RecursiveUltraVerificationKey { mstore(add(_vk, 0x4a0), 0x3032b9ff096a43ce326cc63ffc6a86dcb913fb1f7700939f5304f6c6beb24574) // vk.TABLE3.y mstore(add(_vk, 0x4c0), 0x1f4c58502ca713ed0bffb4ff31ed55e557e83a37d31b8e703aa9219d6158e2d2) // vk.TABLE4.x mstore(add(_vk, 0x4e0), 0x0b0d5ed5432c5e7b56344c1d26ce0d9f632e8f8aa52505d6c89f6da89f357fa8) // vk.TABLE4.y - mstore(add(_vk, 0x500), 0x14cc772886149da159eca12ceab8b0cb5d63043e354699ef54bc13e2aff0a57f) // vk.TABLE_TYPE.x - mstore(add(_vk, 0x520), 0x1874e1387aa83a45cc599a97c8027a53c92c0934c1cb59537aeda8b9edf8b019) // vk.TABLE_TYPE.y - mstore(add(_vk, 0x540), 0x16fe41844f95ddeb66b8d54eae7dcb87df2fafe782e09dd0034e016474742136) // vk.ID1.x - mstore(add(_vk, 0x560), 0x05f0dbcfa556fd26f1563cbdf8f8d2d5f92ad46ff561655383bf5efa341b58af) // vk.ID1.y - mstore(add(_vk, 0x580), 0x0e2e21b159efae413496f5987159ae35a22970d2e81865c2d19ae32170305b4d) // vk.ID2.x - mstore(add(_vk, 0x5a0), 0x01c6cd8f3faba6127b043644fa40aec9bc07ef5f5dd1fc73abadc8ff25b28bb4) // vk.ID2.y - mstore(add(_vk, 0x5c0), 0x0cf6b92268f315107f92225c8c7853c89907f4f016727c8e038b74aad81585af) // vk.ID3.x - mstore(add(_vk, 0x5e0), 0x1955c97edb1cd3637736cce7a911b64626fcdd5eee63a882f9058d901713b5e3) // vk.ID3.y - mstore(add(_vk, 0x600), 0x012ef051b06db65bdf78950b5d3ca6b1bdfef77e381e9220aa25d72c63518f27) // vk.ID4.x - mstore(add(_vk, 0x620), 0x265f2a5c1d100fdc81183e099e52de0751578056574fb56fc4c7814a31e5fc86) // vk.ID4.y + mstore(add(_vk, 0x500), 0x0869d6ec86b39958a4a10ed67954dc8931a1e5ee901099071c3c0684dd0eddde) // vk.TABLE_TYPE.x + mstore(add(_vk, 0x520), 0x1fc9d5e1b18c601f367b9551c00f5e541a48aa562cd0adb4369b51a7e99395b6) // vk.TABLE_TYPE.y + mstore(add(_vk, 0x540), 0x205b387095b6e538a6169c93c9db7d85ec219e2f0304b449f8849f5fde2c659f) // vk.ID1.x + mstore(add(_vk, 0x560), 0x07d8d408db8702ba4db7fec434fdee2b944313f72b0f94a9dcec74e7b715b3f8) // vk.ID1.y + mstore(add(_vk, 0x580), 0x2c758668e1cbf0572b139911af3f553c7898f7f07ffdcc58484a1a0acd14a03e) // vk.ID2.x + mstore(add(_vk, 0x5a0), 0x159322db7ac7485c5be7ce811a773c5fda9e26b0c47139eda1af6103c5c21b1c) // vk.ID2.y + mstore(add(_vk, 0x5c0), 0x026ba63c8620f00298a42a356b18392228d92c4301e8c51e44a3a2e14a6ebc89) // vk.ID3.x + mstore(add(_vk, 0x5e0), 0x2a962181e6a7df5a05d1750e7a22b6ec21fc84d8de08524aa75c4ee8f646bd0c) // vk.ID3.y + mstore(add(_vk, 0x600), 0x2c81aa9e4f466e56d2a6f1a971d431a487379970bb892424e12a0c71c41479b0) // vk.ID4.x + mstore(add(_vk, 0x620), 0x2e662e641087ed19b9ff866748197ab8a871deded79d2835f32e4bbadef1a889) // vk.ID4.y mstore(add(_vk, 0x640), 0x01) // vk.contains_recursive_proof mstore(add(_vk, 0x660), 0) // vk.recursive_proof_public_input_indices mstore(add(_vk, 0x680), 0x260e01b251f6f1c7e7ff4e580791dee8ea51d87a358e038b4efe30fac09383c1) // vk.g2_x.X.c1