Skip to content

Commit

Permalink
refactor(store): optimize PackedCounter (#1231)
Browse files Browse the repository at this point in the history
Co-authored-by: alvarius <[email protected]>
Co-authored-by: Kevin Ingersoll <[email protected]>
  • Loading branch information
3 people authored Aug 18, 2023
1 parent 5135423 commit 433078c
Show file tree
Hide file tree
Showing 47 changed files with 614 additions and 339 deletions.
14 changes: 14 additions & 0 deletions .changeset/little-ravens-yawn.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
---
"@latticexyz/cli": patch
"@latticexyz/protocol-parser": major
"@latticexyz/services": major
"@latticexyz/store-sync": major
"@latticexyz/store": major
"@latticexyz/world": patch
---

Reverse PackedCounter encoding, to optimize gas for bitshifts.
Ints are right-aligned, shifting using an index is straightforward if they are indexed right-to-left.

- Previous encoding: (7 bytes | accumulator),(5 bytes | counter 1),...,(5 bytes | counter 5)
- New encoding: (5 bytes | counter 5),...,(5 bytes | counter 1),(7 bytes | accumulator)
8 changes: 5 additions & 3 deletions e2e/packages/contracts/src/codegen/tables/NumberList.sol
Original file line number Diff line number Diff line change
Expand Up @@ -194,9 +194,11 @@ library NumberList {

/** Tightly pack full data using this table's schema */
function encode(uint32[] memory value) internal pure returns (bytes memory) {
uint40[] memory _counters = new uint40[](1);
_counters[0] = uint40(value.length * 4);
PackedCounter _encodedLengths = PackedCounterLib.pack(_counters);
PackedCounter _encodedLengths;
// Lengths are effectively checked during copy by 2**40 bytes exceeding gas limits
unchecked {
_encodedLengths = PackedCounterLib.pack(value.length * 4);
}

return abi.encodePacked(_encodedLengths.unwrap(), EncodeArray.encode((value)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,9 +77,11 @@ library MessageTable {

/** Tightly pack full data using this table's schema */
function encode(string memory value) internal pure returns (bytes memory) {
uint40[] memory _counters = new uint40[](1);
_counters[0] = uint40(bytes(value).length);
PackedCounter _encodedLengths = PackedCounterLib.pack(_counters);
PackedCounter _encodedLengths;
// Lengths are effectively checked during copy by 2**40 bytes exceeding gas limits
unchecked {
_encodedLengths = PackedCounterLib.pack(bytes(value).length);
}

return abi.encodePacked(_encodedLengths.unwrap(), bytes((value)));
}
Expand Down
18 changes: 11 additions & 7 deletions packages/cli/contracts/src/codegen/tables/Dynamics1.sol
Original file line number Diff line number Diff line change
Expand Up @@ -998,13 +998,17 @@ library Dynamics1 {
address[4] memory staticAddrs,
bool[5] memory staticBools
) internal pure returns (bytes memory) {
uint40[] memory _counters = new uint40[](5);
_counters[0] = uint40(staticB32.length * 32);
_counters[1] = uint40(staticI32.length * 4);
_counters[2] = uint40(staticU128.length * 16);
_counters[3] = uint40(staticAddrs.length * 20);
_counters[4] = uint40(staticBools.length * 1);
PackedCounter _encodedLengths = PackedCounterLib.pack(_counters);
PackedCounter _encodedLengths;
// Lengths are effectively checked during copy by 2**40 bytes exceeding gas limits
unchecked {
_encodedLengths = PackedCounterLib.pack(
staticB32.length * 32,
staticI32.length * 4,
staticU128.length * 16,
staticAddrs.length * 20,
staticBools.length * 1
);
}

return
abi.encodePacked(
Expand Down
10 changes: 5 additions & 5 deletions packages/cli/contracts/src/codegen/tables/Dynamics2.sol
Original file line number Diff line number Diff line change
Expand Up @@ -598,11 +598,11 @@ library Dynamics2 {

/** Tightly pack full data using this table's schema */
function encode(uint64[] memory u64, string memory str, bytes memory b) internal pure returns (bytes memory) {
uint40[] memory _counters = new uint40[](3);
_counters[0] = uint40(u64.length * 8);
_counters[1] = uint40(bytes(str).length);
_counters[2] = uint40(bytes(b).length);
PackedCounter _encodedLengths = PackedCounterLib.pack(_counters);
PackedCounter _encodedLengths;
// Lengths are effectively checked during copy by 2**40 bytes exceeding gas limits
unchecked {
_encodedLengths = PackedCounterLib.pack(u64.length * 8, bytes(str).length, bytes(b).length);
}

return abi.encodePacked(_encodedLengths.unwrap(), EncodeArray.encode((u64)), bytes((str)), bytes((b)));
}
Expand Down
10 changes: 5 additions & 5 deletions packages/cli/contracts/src/codegen/tables/Singleton.sol
Original file line number Diff line number Diff line change
Expand Up @@ -577,11 +577,11 @@ library Singleton {
uint32[2] memory v3,
uint32[1] memory v4
) internal pure returns (bytes memory) {
uint40[] memory _counters = new uint40[](3);
_counters[0] = uint40(v2.length * 4);
_counters[1] = uint40(v3.length * 4);
_counters[2] = uint40(v4.length * 4);
PackedCounter _encodedLengths = PackedCounterLib.pack(_counters);
PackedCounter _encodedLengths;
// Lengths are effectively checked during copy by 2**40 bytes exceeding gas limits
unchecked {
_encodedLengths = PackedCounterLib.pack(v2.length * 4, v3.length * 4, v4.length * 4);
}

return
abi.encodePacked(
Expand Down
2 changes: 1 addition & 1 deletion packages/protocol-parser/src/decodeRecord.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ describe("decodeRecord", () => {
const schema = { staticFields: ["uint32", "uint128"], dynamicFields: ["uint32[]", "string"] } as const;
const values = decodeRecord(
schema,
"0x0000000100000000000000000000000000000002000000000000130000000008000000000b0000000000000000000000000000000000000300000004736f6d6520737472696e67"
"0x0000000100000000000000000000000000000002000000000000000000000000000000000000000b0000000008000000000000130000000300000004736f6d6520737472696e67"
);
expect(values).toStrictEqual([1, 2n, [3, 4], "some string"]);
});
Expand Down
6 changes: 3 additions & 3 deletions packages/protocol-parser/src/hexToPackedCounter.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { hexToPackedCounter } from "./hexToPackedCounter";

describe("hexToPackedCounter", () => {
it("decodes hex data to packed counter", () => {
expect(hexToPackedCounter("0x0000000000008000000000200000000020000000004000000000000000000000"))
expect(hexToPackedCounter("0x0000000000000000000000000000400000000020000000002000000000000080"))
.toMatchInlineSnapshot(`
{
"fieldByteLengths": [
Expand All @@ -26,9 +26,9 @@ describe("hexToPackedCounter", () => {

it("throws if packed counter total byte length doesn't match summed byte length of fields", () => {
expect(() =>
hexToPackedCounter("0x0000000000004000000000200000000020000000004000000000000000000000")
hexToPackedCounter("0x0000000000000000000000000000400000000020000000002000000000000040")
).toThrowErrorMatchingInlineSnapshot(
'"PackedCounter \\"0x0000000000004000000000200000000020000000004000000000000000000000\\" total bytes length (64) did not match the summed length of all field byte lengths (128)."'
'"PackedCounter \\"0x0000000000000000000000000000400000000020000000002000000000000040\\" total bytes length (64) did not match the summed length of all field byte lengths (128)."'
);
});
});
10 changes: 6 additions & 4 deletions packages/protocol-parser/src/hexToPackedCounter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ import { InvalidHexLengthForPackedCounterError, PackedCounterLengthMismatchError

// Keep this logic in sync with PackedCounter.sol

// - First 7 bytes (uint56) are used for the total byte length of the dynamic data
// - The next 5 byte (uint40) sections are used for the byte length of each field, in the same order as the schema's dynamic fields
// - Last 7 bytes (uint56) are used for the total byte length of the dynamic data
// - The next 5 byte (uint40) sections are used for the byte length of each field, indexed from right to left

// We use byte lengths rather than item counts so that, on chain, we can slice without having to get the schema first (and thus the field lengths of each dynamic type)

Expand All @@ -17,10 +17,12 @@ export function hexToPackedCounter(data: Hex): {
if (data.length !== 66) {
throw new InvalidHexLengthForPackedCounterError(data);
}
const totalByteLength = decodeStaticField("uint56", sliceHex(data, 0, 7));

const totalByteLength = decodeStaticField("uint56", sliceHex(data, 32 - 7, 32));
// TODO: use schema to make sure we only parse as many as we need (rather than zeroes at the end)?
const fieldByteLengths = decodeDynamicField("uint40[]", sliceHex(data, 7));
const reversedFieldByteLengths = decodeDynamicField("uint40[]", sliceHex(data, 0, 32 - 7));
// Reverse the lengths
const fieldByteLengths = Object.freeze([...reversedFieldByteLengths].reverse());

const summedLength = BigInt(fieldByteLengths.reduce((total, length) => total + length, 0));
if (summedLength !== totalByteLength) {
Expand Down
2 changes: 1 addition & 1 deletion packages/services/pkg/mode/storecore/encoding.go
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ func (schema *Schema) DecodeFieldData(encoding []byte) *DecodedData {
// since all uints are handled the same to handle those > Go uint64.
dataLengthString, _ := packedCounterCounterType.DecodeStaticField(
dynamicDataSlice,
packedCounterAccumulatorType.StaticByteLength()+uint64(i)*packedCounterCounterType.StaticByteLength(),
32-packedCounterAccumulatorType.StaticByteLength()-uint64(i+1)*packedCounterCounterType.StaticByteLength(),
).(string)

// Convert the length to a uint64.
Expand Down
2 changes: 1 addition & 1 deletion packages/services/pkg/mode/storecore/encoding_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import (

func TestDecodeData(t *testing.T) {
//nolint:lll // test
encoding := "0x000000000000ac000000000c00000000a00000000000000000000000000000004d6573736167655461626c65000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000"
encoding := "0x00000000000000000000000000000000000000a0000000000c000000000000ac4d6573736167655461626c65000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000"
schema := &Schema{
Static: []SchemaType{},
Dynamic: []SchemaType{
Expand Down
2 changes: 1 addition & 1 deletion packages/services/pkg/protocol-parser/decodeRecord_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ func TestDecodeRecord(t *testing.T) {
},
}

hex := "0x0000000100000000000000000000000000000002000000000000130000000008000000000b0000000000000000000000000000000000000300000004736f6d6520737472696e67"
hex := "0x0000000100000000000000000000000000000002000000000000000000000000000000000000000b0000000008000000000000130000000300000004736f6d6520737472696e67"

expectedDecodedData := []interface{}{
uint32(1),
Expand Down
8 changes: 6 additions & 2 deletions packages/services/pkg/protocol-parser/hexToPackedCounter.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,12 @@ func HexToPackedCounter(data string) PackedCounter {
panic(ErrInvalidHexLengthForPackedCounter)
}

totalByteLength := DecodeStaticField(schematype.UINT56, HexSlice(data, 0, 7)).(uint64)
fieldByteLengths := convert.ToUint64Array(DecodeDynamicField(schematype.UINT40_ARRAY, HexSliceFrom(data, 7)))
totalByteLength := DecodeStaticField(schematype.UINT56, HexSlice(data, 32-7, 32)).(uint64)
fieldByteLengths := convert.ToUint64Array(DecodeDynamicField(schematype.UINT40_ARRAY, HexSlice(data, 0, 32-7)))
// Reverse the lengths
for i, j := 0, len(fieldByteLengths)-1; i < j; i, j = i+1, j-1 {
fieldByteLengths[i], fieldByteLengths[j] = fieldByteLengths[j], fieldByteLengths[i]
}

summedLength := new(big.Int)
for _, length := range fieldByteLengths {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import (
)

func TestHexToPackedCounter(t *testing.T) {
hex := "0x0000000000008000000000200000000020000000004000000000000000000000"
hex := "0x0000000000000000000000000000400000000020000000002000000000000080"
expectedCounter := protocolparser.PackedCounter{
TotalByteLength: uint64(128),
FieldByteLengths: []uint64{
Expand Down Expand Up @@ -47,7 +47,7 @@ func TestHexToPackedCounterLengthMismatch(t *testing.T) {
}
}()

hex := "0x0000000000004000000000200000000020000000004000000000000000000000"
hex := "0x0000000000000000000000000000400000000020000000002000000000000040"
protocolparser.HexToPackedCounter(hex)
t.Error("expected panic")
}
4 changes: 2 additions & 2 deletions packages/store-sync/src/blockLogsToStorage.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ describe("blockLogsToStorage", () => {
{
address: "0x5fbdb2315678afecb367f032d93f642f64180aa3",
topics: ["0x912af873e852235aae78a1d25ae9bb28b616a67c36898c53a14fd8184504ee32"],
data: "0x6d756473746f7265000000000000000053746f72654d65746164617461000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000496e76656e746f72790000000000000000000000000000000000000000000000000000000000000000000000000000c9000000000000a9000000000900000000a0000000000000000000000000000000496e76656e746f7279000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c75650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
data: "0x6d756473746f7265000000000000000053746f72654d65746164617461000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000496e76656e746f72790000000000000000000000000000000000000000000000000000000000000000000000000000c900000000000000000000000000000000000000a00000000009000000000000a9496e76656e746f7279000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c75650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
blockNumber: 5448n,
transactionHash: "0x80d6650fdd6656461e6639988d7baa8d6d228297df505d8bbd0a4efc273b382b",
transactionIndex: 44,
Expand All @@ -83,7 +83,7 @@ describe("blockLogsToStorage", () => {
args: {
table: "0x6d756473746f7265000000000000000053746f72654d65746164617461000000",
key: ["0x00000000000000000000000000000000496e76656e746f727900000000000000"],
data: "0x000000000000a9000000000900000000a0000000000000000000000000000000496e76656e746f7279000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000",
data: "0x00000000000000000000000000000000000000a00000000009000000000000a9496e76656e746f7279000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000576616c7565000000000000000000000000000000000000000000000000000000",
},
eventName: "StoreSetRecord",
},
Expand Down
Loading

0 comments on commit 433078c

Please sign in to comment.