Skip to content

Commit

Permalink
fix: integration l1 publisher test
Browse files Browse the repository at this point in the history
  • Loading branch information
LHerskind committed May 5, 2023
1 parent 0c949d4 commit dfe0448
Show file tree
Hide file tree
Showing 3 changed files with 98 additions and 55 deletions.
5 changes: 4 additions & 1 deletion l1-contracts/test/Decoder.t.sol
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,10 @@ import {Decoder} from "@aztec3/core/Decoder.sol";
import {Rollup} from "@aztec3/core/Rollup.sol";
import {DecoderHelper} from "./DecoderHelper.sol";

// Blocks are generated using the `solo_block_builder.test.ts` tests: "builds an empty L2 block" and "builds a mixed L2 block"
/**
* Blocks are generated using the `integration_l1_publisher.test.ts` tests.
* Main use of these test is shorter cycles when updating the decoder contract.
*/
contract DecoderTest is Test {
Rollup internal rollup;
DecoderHelper internal helper;
Expand Down
130 changes: 94 additions & 36 deletions yarn-project/end-to-end/src/integration_l1_publisher.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ import { EthAddress } from '@aztec/foundation/eth-address';
import { Fr } from '@aztec/foundation/fields';
import {
KERNEL_NEW_COMMITMENTS_LENGTH,
KERNEL_NEW_L2_TO_L1_MSGS_LENGTH,
KERNEL_NEW_NULLIFIERS_LENGTH,
KernelCircuitPublicInputs,
NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,
PublicDataRead,
PublicDataTransition,
STATE_TRANSITIONS_LENGTH,
range,
} from '@aztec/circuits.js';
import { fr, makeNewContractData, makeProof } from '@aztec/circuits.js/factories';
Expand All @@ -26,7 +27,7 @@ import {
makePublicTx,
} from '@aztec/sequencer-client';
import { MerkleTreeOperations, MerkleTrees } from '@aztec/world-state';
import { beforeAll, describe, expect, it } from '@jest/globals';
import { beforeEach, describe, expect, it } from '@jest/globals';
import { default as levelup } from 'levelup';
import {
Address,
Expand All @@ -53,6 +54,9 @@ const logger = createDebugLogger('aztec:integration_l1_publisher');

const config = getConfigEnvVars();

// @todo @LHerskind Figure out why l1 -> l2 messages are breaking >1 consecutive blocks for mixed blocks.
const numberOfConsecutiveBlocks = 1;

describe('L1Publisher integration', () => {
let publicClient: PublicClient<HttpTransport, Chain>;

Expand All @@ -69,7 +73,7 @@ describe('L1Publisher integration', () => {
let builder: SoloBlockBuilder;
let builderDb: MerkleTreeOperations;

beforeAll(async () => {
beforeEach(async () => {
const deployerAccount = privateKeyToAccount(deployerPK);
const {
rollupAddress: rollupAddress_,
Expand Down Expand Up @@ -122,46 +126,104 @@ describe('L1Publisher integration', () => {
return makeEmptyProcessedTxFromHistoricTreeRoots(historicTreeRoots);
};

const makeContractDeployProcessedTx = async (seed = 0x1) => {
const tx = await makeEmptyProcessedTx();
tx.data.end.newContracts = [makeNewContractData(seed + 0x1000)];
return tx;
};
const makeBloatedProcessedTx = async (seed = 0x1) => {
const publicTx = makePublicTx(seed);
const kernelOutput = KernelCircuitPublicInputs.empty();
kernelOutput.constants.historicTreeRoots = await getCombinedHistoricTreeRoots(builderDb);
kernelOutput.end.stateTransitions = range(STATE_TRANSITIONS_LENGTH, seed + 0x500).map(
i => new PublicDataTransition(fr(i), fr(0), fr(i + 10)),
);

const tx = await makeProcessedTx(publicTx, kernelOutput, makeProof());

const makePrivateProcessedTx = async (seed = 0x1) => {
const tx = await makeEmptyProcessedTx();
tx.data.end.newCommitments = range(KERNEL_NEW_COMMITMENTS_LENGTH, seed + 0x100).map(fr);
tx.data.end.newNullifiers = range(KERNEL_NEW_NULLIFIERS_LENGTH, seed + 0x200).map(fr);
tx.data.end.newL2ToL1Msgs = range(KERNEL_NEW_L2_TO_L1_MSGS_LENGTH, seed + 0x300).map(fr);
tx.data.end.newContracts = [makeNewContractData(seed + 0x1000)];

return tx;
};

const makePublicCallProcessedTx = async (seed = 0x1) => {
const publicTx = makePublicTx(seed);
const kernelOutput = KernelCircuitPublicInputs.empty();
kernelOutput.end.stateReads[0] = new PublicDataRead(fr(1), fr(0));
kernelOutput.end.stateTransitions[0] = new PublicDataTransition(fr(2), fr(0), fr(12));
kernelOutput.constants.historicTreeRoots = await getCombinedHistoricTreeRoots(builderDb);
return await makeProcessedTx(publicTx, kernelOutput, makeProof());
};
it(`Build ${numberOfConsecutiveBlocks} blocks of 4 bloated txs building on each other`, async () => {
const stateInRollup_ = await rollup.read.rollupStateHash();
expect(hexStringToBuffer(stateInRollup_.toString())).toEqual(Buffer.alloc(32, 0));

const blockNumber = await publicClient.getBlockNumber();

for (let i = 0; i < numberOfConsecutiveBlocks; i++) {
const l1ToL2Messages = range(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, 128 * i + 1 + 0x400).map(fr);
const txs = [
await makeBloatedProcessedTx(128 * i + 32),
await makeBloatedProcessedTx(128 * i + 64),
await makeBloatedProcessedTx(128 * i + 96),
await makeBloatedProcessedTx(128 * i + 128),
];
const [block] = await builder.buildL2Block(1 + i, txs, l1ToL2Messages);

// Useful for sol generation
const encoded = block.encode();
console.log(`Size (${encoded.length}): ${encoded.toString('hex')}`);
console.log(`calldata hash: 0x${block.getCalldataHash().toString('hex')}`);
console.log(`l1 to l2 message hash: 0x${block.getL1ToL2MessagesHash().toString('hex')}`);
console.log(`start state hash: 0x${block.getStartStateHash().toString('hex')}`);
console.log(`end state hash: 0x${block.getEndStateHash().toString('hex')}`);
console.log(`public data hash: 0x${block.getPublicInputsHash().toBuffer().toString('hex')}`);

await publisher.processL2Block(block);

const logs = await publicClient.getLogs({
address: rollupAddress,
event: getAbiItem({
abi: RollupAbi,
name: 'L2BlockProcessed',
}),
fromBlock: blockNumber + 1n,
});
expect(logs).toHaveLength(i + 1);
expect(logs[i].args.blockNum).toEqual(BigInt(i + 1));

const ethTx = await publicClient.getTransaction({
hash: logs[i].transactionHash!,
});

it('Build 2 blocks of 4 txs building on each other', async () => {
const expectedData = encodeFunctionData({
abi: RollupAbi,
functionName: 'process',
args: [`0x${l2Proof.toString('hex')}`, `0x${block.encode().toString('hex')}`],
});
expect(ethTx.input).toEqual(expectedData);

const decoderArgs = [`0x${block.encode().toString('hex')}`] as const;
const decodedHashes = await decoderHelper.read.computeDiffRootAndMessagesHash(decoderArgs);
const decodedRes = await decoderHelper.read.decode(decoderArgs);
const stateInRollup = await rollup.read.rollupStateHash();

expect(block.number).toEqual(Number(decodedRes[0]));
expect(block.getStartStateHash()).toEqual(hexStringToBuffer(decodedRes[1].toString()));
expect(block.getEndStateHash()).toEqual(hexStringToBuffer(decodedRes[2].toString()));
expect(block.getEndStateHash()).toEqual(hexStringToBuffer(stateInRollup.toString()));
expect(block.getPublicInputsHash().toBuffer()).toEqual(hexStringToBuffer(decodedRes[3].toString()));
expect(block.getCalldataHash()).toEqual(hexStringToBuffer(decodedHashes[0].toString()));
expect(block.getL1ToL2MessagesHash()).toEqual(hexStringToBuffer(decodedHashes[1].toString()));
}
}, 60_000);

it(`Build ${numberOfConsecutiveBlocks} blocks of 4 empty txs building on each other`, async () => {
const stateInRollup_ = await rollup.read.rollupStateHash();
expect(hexStringToBuffer(stateInRollup_.toString())).toEqual(Buffer.alloc(32, 0));

for (let i = 0; i < 2; i++) {
// @todo Should have advanced txs as well instead of these simple transactions.
// @todo Should have messages l1 -> l2
const blockNumber = await publicClient.getBlockNumber();

const txsLeft = [await makePrivateProcessedTx(i + 1), await makePublicCallProcessedTx(i + 1)];
const txsRight = [await makeContractDeployProcessedTx(i + 1), await makeEmptyProcessedTx()];
for (let i = 0; i < numberOfConsecutiveBlocks; i++) {
const l1ToL2Messages = new Array(NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP).fill(new Fr(0n));

// Actually build a block!
const txs = [...txsLeft, ...txsRight];
const txs = [
await makeEmptyProcessedTx(),
await makeEmptyProcessedTx(),
await makeEmptyProcessedTx(),
await makeEmptyProcessedTx(),
];
const [block] = await builder.buildL2Block(1 + i, txs, l1ToL2Messages);

// Now we can use the block we built!
const blockNumber = await publicClient.getBlockNumber();
await publisher.processL2Block(block);

const logs = await publicClient.getLogs({
Expand All @@ -172,11 +234,11 @@ describe('L1Publisher integration', () => {
}),
fromBlock: blockNumber + 1n,
});
expect(logs).toHaveLength(1);
expect(logs[0].args.blockNum).toEqual(BigInt(i + 1));
expect(logs).toHaveLength(i + 1);
expect(logs[i].args.blockNum).toEqual(BigInt(i + 1));

const ethTx = await publicClient.getTransaction({
hash: logs[0].transactionHash!,
hash: logs[i].transactionHash!,
});

const expectedData = encodeFunctionData({
Expand All @@ -191,17 +253,13 @@ describe('L1Publisher integration', () => {
const decodedRes = await decoderHelper.read.decode(decoderArgs);
const stateInRollup = await rollup.read.rollupStateHash();

// @note There seems to be something wrong here. The Bytes32 returned are actually strings :(
expect(block.number).toEqual(Number(decodedRes[0]));
expect(block.getStartStateHash()).toEqual(hexStringToBuffer(decodedRes[1].toString()));
expect(block.getEndStateHash()).toEqual(hexStringToBuffer(decodedRes[2].toString()));
expect(block.getEndStateHash()).toEqual(hexStringToBuffer(stateInRollup.toString()));
expect(block.getPublicInputsHash().toBuffer()).toEqual(hexStringToBuffer(decodedRes[3].toString()));
expect(block.getCalldataHash()).toEqual(hexStringToBuffer(decodedHashes[0].toString()));
expect(block.getL1ToL2MessagesHash()).toEqual(hexStringToBuffer(decodedHashes[1].toString()));

// @todo Broken if making two blocks in a row...
return;
}
}, 60_000);
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,15 +344,6 @@ describe('sequencer/solo_block_builder', () => {

const [l2Block] = await builder.buildL2Block(1, txs, mockL1ToL2Messages);
expect(l2Block.number).toEqual(1);
/*
Useful for sol generation
const encoded = l2Block.encode();
console.log(`Size (${encoded.length}): ${encoded.toString('hex')}`);
console.log(`calldata hash: 0x${l2Block.getCalldataHash().toString('hex')}`);
console.log(`l1 to l2 message hash: 0x${l2Block.getL1ToL2MessagesHash().toString('hex')}`);
console.log(`start state hash: 0x${l2Block.getStartStateHash().toString('hex')}`);
console.log(`end state hash: 0x${l2Block.getEndStateHash().toString('hex')}`);
console.log(`public data hash: 0x${l2Block.getPublicInputsHash().toBuffer().toString('hex')}`);*/
}, 10_000);

it('builds a mixed L2 block', async () => {
Expand All @@ -367,15 +358,6 @@ describe('sequencer/solo_block_builder', () => {

const [l2Block] = await builder.buildL2Block(1, txs, l1ToL2Messages);
expect(l2Block.number).toEqual(1);
/*
Useful for sol generation
const encoded = l2Block.encode();
console.log(`Size (${encoded.length}): ${encoded.toString('hex')}`);
console.log(`calldata hash: 0x${l2Block.getCalldataHash().toString('hex')}`);
console.log(`l1 to l2 message hash: 0x${l2Block.getL1ToL2MessagesHash().toString('hex')}`);
console.log(`start state hash: 0x${l2Block.getStartStateHash().toString('hex')}`);
console.log(`end state hash: 0x${l2Block.getEndStateHash().toString('hex')}`);
console.log(`public data hash: 0x${l2Block.getPublicInputsHash().toBuffer().toString('hex')}`);*/
}, 20_000);

it('builds an L2 block with private and public txs', async () => {
Expand Down

0 comments on commit dfe0448

Please sign in to comment.