Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Synclayer Batch Aggregation #470

Open
wants to merge 18 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions l1-contracts/contracts/common/interfaces/IBatchAggregator.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
// SPDX-License-Identifier: MIT

pragma solidity 0.8.24;

interface IBatchAggregator {
function commitBatch(
bytes calldata _totalL2ToL1PubdataAndStateDiffs,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why do we need to send both compressed diffs and their uncompressed version? I think we should decide on what we want. I would rather send only uncompressed ones

uint256 chainId,
uint256 batchNumber
) external;
function returnBatchesAndClearState() external returns (bytes memory);
}
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@ import {UnsafeBytes} from "../../../common/libraries/UnsafeBytes.sol";
import {L2_BOOTLOADER_ADDRESS, L2_TO_L1_MESSENGER_SYSTEM_CONTRACT_ADDR, L2_SYSTEM_CONTEXT_SYSTEM_CONTRACT_ADDR, L2_PUBDATA_CHUNK_PUBLISHER_ADDR} from "../../../common/L2ContractAddresses.sol";
import {PubdataPricingMode} from "../ZkSyncHyperchainStorage.sol";
import {IStateTransitionManager} from "../../IStateTransitionManager.sol";
import {IBatchAggregator} from "../../../common/interfaces/IBatchAggregator.sol";

// TODO: replace with actual system address
address constant BATCH_AGGREGATOR_ADDRESS = 0x0000000000000000000000000000000000009012;

// While formally the following import is not used, it is needed to inherit documentation from it
import {IZkSyncHyperchainBase} from "../../chain-interfaces/IZkSyncHyperchainBase.sol";
Expand All @@ -32,7 +36,7 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor {
StoredBatchInfo memory _previousBatch,
CommitBatchInfo calldata _newBatch,
bytes32 _expectedSystemContractUpgradeTxHash
) internal view returns (StoredBatchInfo memory) {
) internal returns (StoredBatchInfo memory) {
require(_newBatch.batchNumber == _previousBatch.batchNumber + 1, "f"); // only commit next batch

uint8 pubdataSource = uint8(bytes1(_newBatch.pubdataCommitments[0]));
Expand Down Expand Up @@ -69,6 +73,14 @@ contract ExecutorFacet is ZkSyncHyperchainBase, IExecutor {
.pubdataCommitments
.length]
);
} else if (pubdataSource == uint8(PubdataSource.ForwardedMessage)) {
// Similar to Calldata, forwards packed pubdataCommitments of hyperchains to the lower layer
// Add batch to BatchAggregator
IBatchAggregator(BATCH_AGGREGATOR_ADDRESS).commitBatch(
_newBatch.pubdataCommitments,
0,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

just in case, s.chainId is available in storage

_newBatch.batchNumber
);
}

require(_previousBatch.batchHash == logOutput.previousBatchHash, "l");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ enum SystemLogKey {
/// @dev Enum used to determine the source of pubdata. At first we will support calldata and blobs but this can be extended.
enum PubdataSource {
Calldata,
Blob
Blob,
ForwardedMessage
}

struct LogProcessingOutput {
Expand Down
202 changes: 202 additions & 0 deletions system-contracts/contracts/BatchAggregator.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,202 @@
// SPDX-License-Identifier: MIT

pragma solidity 0.8.20;

import {UnsafeBytesCalldata} from "./libraries/UnsafeBytesCalldata.sol";
import {IBatchAggregator, COMPRESSED_STATE_DIFF_SIZE} from "./interfaces/IBatchAggregator.sol";
import {OPERATION_BITMASK, LENGTH_BITS_OFFSET, MAX_ENUMERATION_INDEX_SIZE} from "./interfaces/ICompressor.sol";
import {ISystemContract} from "./interfaces/ISystemContract.sol";
import {L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH, L2_TO_L1_LOG_SERIALIZE_SIZE, STATE_DIFF_COMPRESSION_VERSION_NUMBER} from "./interfaces/IL1Messenger.sol";

Check failure on line 9 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH is not used

Check failure on line 9 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH is not used

Check failure on line 9 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name L2_L1_LOGS_TREE_DEFAULT_LEAF_HASH is not used
import {SystemLogKey, SYSTEM_CONTEXT_CONTRACT, KNOWN_CODE_STORAGE_CONTRACT, COMPRESSOR_CONTRACT, BATCH_AGGREGATOR, STATE_DIFF_ENTRY_SIZE, L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, PUBDATA_CHUNK_PUBLISHER, COMPUTATIONAL_PRICE_FOR_PUBDATA} from "./Constants.sol";

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name SystemLogKey is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name SYSTEM_CONTEXT_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name KNOWN_CODE_STORAGE_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name COMPRESSOR_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name BATCH_AGGREGATOR is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name PUBDATA_CHUNK_PUBLISHER is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name COMPUTATIONAL_PRICE_FOR_PUBDATA is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name SystemLogKey is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name SYSTEM_CONTEXT_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name KNOWN_CODE_STORAGE_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name COMPRESSOR_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name BATCH_AGGREGATOR is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name PUBDATA_CHUNK_PUBLISHER is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name COMPUTATIONAL_PRICE_FOR_PUBDATA is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name SystemLogKey is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name SYSTEM_CONTEXT_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name KNOWN_CODE_STORAGE_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name COMPRESSOR_CONTRACT is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name BATCH_AGGREGATOR is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name PUBDATA_CHUNK_PUBLISHER is not used

Check failure on line 10 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

imported name COMPUTATIONAL_PRICE_FOR_PUBDATA is not used

contract BatchAggregator is IBatchAggregator, ISystemContract {
using UnsafeBytesCalldata for bytes;

bytes[] batchStorage;
bytes[] chainData;
mapping(uint256 => bytes[]) messageStorage;
mapping(uint256 => bytes[]) logStorage;
mapping(uint256 => mapping(bytes32 => uint256)) stateDiffStorage;
mapping(uint256 => mapping(bytes32 => bool)) keyStatus;
mapping(uint256 => bytes32[]) touchedKeys;
mapping(uint256 => bytes[]) bytecodeStorage;
mapping(uint256 => bool) chainSet;
uint256[] chainList;
function addChain(uint256 chainId) internal {
if (chainSet[chainId] == false) {
chainList.push(chainId);
chainSet[chainId] = true;
}
}
function commitBatch(
bytes calldata _totalL2ToL1PubdataAndStateDiffs,
uint256 chainId,
uint256 batchNumber

Check failure on line 34 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

Variable "batchNumber" is unused

Check failure on line 34 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

Variable "batchNumber" is unused

Check failure on line 34 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

Variable "batchNumber" is unused
) external {
addChain(chainId);

uint256 calldataPtr = 0;

/// Check logs
uint32 numberOfL2ToL1Logs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]));
require(numberOfL2ToL1Logs <= L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, "Too many L2->L1 logs");
logStorage[chainId].push(
_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr +
4 +
numberOfL2ToL1Logs *
L2_TO_L1_LOG_SERIALIZE_SIZE]
);
calldataPtr += 4 + L2_TO_L1_LOG_SERIALIZE_SIZE * numberOfL2ToL1Logs;

/// Check messages
uint32 numberOfMessages = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]));
StanislavBreadless marked this conversation as resolved.
Show resolved Hide resolved
messageStorage[chainId].push(
_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4 + numberOfMessages * 4]
);
calldataPtr += 4 + numberOfMessages * 4;

/// Check bytecodes
uint32 numberOfBytecodes = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]));
uint256 bytecodeSliceStart = calldataPtr;
calldataPtr += 4;
bytes32 reconstructedChainedL1BytecodesRevealDataHash;

Check failure on line 62 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

Variable "reconstructedChainedL1BytecodesRevealDataHash" is unused

Check failure on line 62 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

Variable "reconstructedChainedL1BytecodesRevealDataHash" is unused

Check failure on line 62 in system-contracts/contracts/BatchAggregator.sol

View workflow job for this annotation

GitHub Actions / lint

Variable "reconstructedChainedL1BytecodesRevealDataHash" is unused
for (uint256 i = 0; i < numberOfBytecodes; ++i) {
uint32 currentBytecodeLength = uint32(
bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4])
);
calldataPtr += 4 + currentBytecodeLength;
}

bytecodeStorage[chainId].push(_totalL2ToL1PubdataAndStateDiffs[bytecodeSliceStart:calldataPtr]);
/// Check State Diffs
/// encoding is as follows:
/// header (1 byte version, 3 bytes total len of compressed, 1 byte enumeration index size)
/// body (`compressedStateDiffSize` bytes, 4 bytes number of state diffs, `numberOfStateDiffs` * `STATE_DIFF_ENTRY_SIZE` bytes for the uncompressed state diffs)
/// encoded state diffs: [20bytes address][32bytes key][32bytes derived key][8bytes enum index][32bytes initial value][32bytes final value]
require(
uint256(uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr]))) ==
STATE_DIFF_COMPRESSION_VERSION_NUMBER,
"state diff compression version mismatch"
);
calldataPtr++;

uint24 compressedStateDiffSize = uint24(bytes3(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 3]));
calldataPtr += 3;

uint8 enumerationIndexSize = uint8(bytes1(_totalL2ToL1PubdataAndStateDiffs[calldataPtr]));
calldataPtr++;

bytes calldata compressedStateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr +
compressedStateDiffSize];
calldataPtr += compressedStateDiffSize;

bytes calldata totalL2ToL1Pubdata = _totalL2ToL1PubdataAndStateDiffs[:calldataPtr];

uint32 numberOfStateDiffs = uint32(bytes4(_totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr + 4]));
calldataPtr += 4;

bytes calldata stateDiffs = _totalL2ToL1PubdataAndStateDiffs[calldataPtr:calldataPtr +
(numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE)];
calldataPtr += numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE;

require(enumerationIndexSize <= MAX_ENUMERATION_INDEX_SIZE, "enumeration index size is too large");
uint256 numberOfInitialWrites = uint256(compressedStateDiffs.readUint16(0));
bytes32[] memory derivedKeyTable = new bytes32[](numberOfInitialWrites);

uint256 stateDiffPtr = 2;
uint256 numInitialWritesProcessed = 0;
// Process initial writes
for (uint256 i = 0; i < numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; i += STATE_DIFF_ENTRY_SIZE) {
bytes calldata stateDiff = stateDiffs[i:i + STATE_DIFF_ENTRY_SIZE];
uint64 enumIndex = stateDiff.readUint64(84);
if (enumIndex != 0) {
// It is a repeated write, so we skip it.
continue;
}

bytes32 derivedKey = stateDiff.readBytes32(52);
derivedKeyTable[numInitialWritesProcessed] = derivedKey;
uint256 initValue = stateDiff.readUint256(92);
uint256 finalValue = stateDiff.readUint256(124);

numInitialWritesProcessed++;
require(derivedKey == compressedStateDiffs.readBytes32(stateDiffPtr), "iw: initial key mismatch");
if (keyStatus[chainId][derivedKey] == false) {
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I understand it is draft, just in case better to rename to isKeyTouched

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

done

keyStatus[chainId][derivedKey] = true;
touchedKeys[chainId].push(derivedKey);
}
stateDiffStorage[chainId][derivedKey] = finalValue;

stateDiffPtr += 32;

uint8 metadata = uint8(bytes1(compressedStateDiffs[stateDiffPtr]));
stateDiffPtr++;
uint8 operation = metadata & OPERATION_BITMASK;
uint8 len = operation == 0 ? 32 : metadata >> LENGTH_BITS_OFFSET;

stateDiffPtr += len;
}

require(numInitialWritesProcessed == numberOfInitialWrites, "Incorrect number of initial storage diffs");

// Process repeated writes
for (uint256 i = 0; i < numberOfStateDiffs * STATE_DIFF_ENTRY_SIZE; i += STATE_DIFF_ENTRY_SIZE) {
bytes calldata stateDiff = stateDiffs[i:i + STATE_DIFF_ENTRY_SIZE];
uint64 enumIndex = stateDiff.readUint64(84);
if (enumIndex == 0) {
continue;
}
bytes32 derivedKey = derivedKeyTable[enumIndex];
uint256 initValue = stateDiff.readUint256(92);
uint256 finalValue = stateDiff.readUint256(124);

stateDiffStorage[chainId][derivedKey] = finalValue;
stateDiffPtr += enumerationIndexSize;

uint8 metadata = uint8(bytes1(compressedStateDiffs[stateDiffPtr]));
stateDiffPtr += 1;
uint8 operation = metadata & OPERATION_BITMASK;
uint8 len = operation == 0 ? 32 : metadata >> LENGTH_BITS_OFFSET;

stateDiffPtr += len;
}

require(stateDiffPtr == compressedStateDiffs.length, "Extra data in _compressedStateDiffs");
}
function returnBatchesAndClearState() external returns (bytes memory batchInfo) {
for (uint256 i = 0; i < chainList.length; i += 1) {
uint256 chainId = chainList[i];
bytes memory compressedStateDiff = new bytes(touchedKeys[chainId].length * COMPRESSED_STATE_DIFF_SIZE);
uint256 stateDiffPtr = 0;
for (uint256 keyIndex = 0; keyIndex < touchedKeys[chainId].length; keyIndex += 1) {
bytes32 derivedKey = touchedKeys[chainId][keyIndex];
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

repeated writes should be encoded as pair (enum_index, value)

uint256 finalValue = stateDiffStorage[chainId][derivedKey];
assembly {
mstore(add(compressedStateDiff, stateDiffPtr), derivedKey)
mstore(add(compressedStateDiff, add(stateDiffPtr, 0x20)), finalValue)
}
delete stateDiffStorage[chainId][derivedKey];
delete keyStatus[chainId][derivedKey];
stateDiffPtr += COMPRESSED_STATE_DIFF_SIZE;
}
chainData.push(
abi.encode(
chainId,
logStorage[chainId],
messageStorage[chainId],
bytecodeStorage[chainId],
compressedStateDiff
)
);

delete chainSet[chainId];
delete logStorage[chainId];
delete messageStorage[chainId];
delete bytecodeStorage[chainId];
delete touchedKeys[chainId];
}
delete chainList;
batchInfo = abi.encode(chainData);
delete chainData;
}
}
6 changes: 5 additions & 1 deletion system-contracts/contracts/Constants.sol
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import {ICompressor} from "./interfaces/ICompressor.sol";
import {IComplexUpgrader} from "./interfaces/IComplexUpgrader.sol";
import {IBootloaderUtilities} from "./interfaces/IBootloaderUtilities.sol";
import {IPubdataChunkPublisher} from "./interfaces/IPubdataChunkPublisher.sol";
import {IBatchAggregator} from "./interfaces/IBatchAggregator.sol";

/// @dev All the system contracts introduced by zkSync have their addresses
/// started from 2^15 in order to avoid collision with Ethereum precompiles.
Expand Down Expand Up @@ -88,6 +89,8 @@ IPubdataChunkPublisher constant PUBDATA_CHUNK_PUBLISHER = IPubdataChunkPublisher
address(SYSTEM_CONTRACTS_OFFSET + 0x11)
);

IBatchAggregator constant BATCH_AGGREGATOR = IBatchAggregator(address(SYSTEM_CONTRACTS_OFFSET + 0x12));

/// @dev If the bitwise AND of the extraAbi[2] param when calling the MSG_VALUE_SIMULATOR
/// is non-zero, the call will be assumed to be a system one.
uint256 constant MSG_VALUE_SIMULATOR_IS_SYSTEM_BIT = 1;
Expand Down Expand Up @@ -119,7 +122,8 @@ enum SystemLogKey {
BLOB_FOUR_HASH_KEY,
BLOB_FIVE_HASH_KEY,
BLOB_SIX_HASH_KEY,
EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY
EXPECTED_SYSTEM_CONTRACT_UPGRADE_TX_HASH_KEY,
HYPERCHAIN_PUBDATA_KEY
}

/// @dev The number of leaves in the L2->L1 log Merkle tree.
Expand Down
10 changes: 9 additions & 1 deletion system-contracts/contracts/L1Messenger.sol
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import {ISystemContract} from "./interfaces/ISystemContract.sol";
import {SystemContractHelper} from "./libraries/SystemContractHelper.sol";
import {EfficientCall} from "./libraries/EfficientCall.sol";
import {Utils} from "./libraries/Utils.sol";
import {SystemLogKey, SYSTEM_CONTEXT_CONTRACT, KNOWN_CODE_STORAGE_CONTRACT, COMPRESSOR_CONTRACT, STATE_DIFF_ENTRY_SIZE, L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, PUBDATA_CHUNK_PUBLISHER, COMPUTATIONAL_PRICE_FOR_PUBDATA} from "./Constants.sol";
import {SystemLogKey, SYSTEM_CONTEXT_CONTRACT, KNOWN_CODE_STORAGE_CONTRACT, COMPRESSOR_CONTRACT, BATCH_AGGREGATOR, STATE_DIFF_ENTRY_SIZE, L2_TO_L1_LOGS_MERKLE_TREE_LEAVES, PUBDATA_CHUNK_PUBLISHER, COMPUTATIONAL_PRICE_FOR_PUBDATA} from "./Constants.sol";

/**
* @author Matter Labs
Expand Down Expand Up @@ -194,6 +194,13 @@ contract L1Messenger is IL1Messenger, ISystemContract {
function publishPubdataAndClearState(
bytes calldata _totalL2ToL1PubdataAndStateDiffs
) external onlyCallFromBootloader {
// Send hyperchain batches
SystemContractHelper.toL1(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the pubdata that comes from the batch aggregator should become part of the current batches pubdata instead of being published via a separate log. this way we wont have to verify it separate from the main portion of pubdata

true,
bytes32(uint256(SystemLogKey.HYPERCHAIN_PUBDATA_KEY)),
keccak256(BATCH_AGGREGATOR.returnBatchesAndClearState())
);

uint256 calldataPtr = 0;

/// Check logs
Expand Down Expand Up @@ -308,6 +315,7 @@ contract L1Messenger is IL1Messenger, ISystemContract {
stateDiffs,
compressedStateDiffs
);
//

/// Check for calldata strict format
require(calldataPtr == _totalL2ToL1PubdataAndStateDiffs.length, "Extra data in the totalL2ToL1Pubdata array");
Expand Down
14 changes: 14 additions & 0 deletions system-contracts/contracts/interfaces/IBatchAggregator.sol
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
// SPDX-License-Identifier: MIT

pragma solidity 0.8.20;

uint256 constant COMPRESSED_STATE_DIFF_SIZE = 64;

interface IBatchAggregator {
function commitBatch(
bytes calldata _totalL2ToL1PubdataAndStateDiffs,
uint256 chainId,
uint256 batchNumber
) external;
function returnBatchesAndClearState() external returns (bytes memory);
}
Loading