diff --git a/consensus/src/main/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasher.java b/consensus/src/main/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasher.java index b2f0eb951..f4e5897fb 100644 --- a/consensus/src/main/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasher.java +++ b/consensus/src/main/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasher.java @@ -19,26 +19,20 @@ public class SSZObjectHasher implements ObjectHasher { private final SSZHashSerializer sszHashSerializer; - private final Function hashFunction; - SSZObjectHasher(SSZHashSerializer sszHashSerializer, Function hashFunction) { + SSZObjectHasher(SSZHashSerializer sszHashSerializer) { this.sszHashSerializer = sszHashSerializer; - this.hashFunction = hashFunction; } public static SSZObjectHasher create(Function hashFunction) { SSZHashSerializer sszHashSerializer = SSZHashSerializers.createWithBeaconChainTypes(hashFunction, true); - return new SSZObjectHasher(sszHashSerializer, hashFunction); + return new SSZObjectHasher(sszHashSerializer); } @Override public Hash32 getHash(Object input) { - if (input instanceof List) { - return Hash32.wrap(Bytes32.wrap(sszHashSerializer.hash(input))); - } else { - return hashFunction.apply(BytesValue.wrap(sszHashSerializer.hash(input))); - } + return Hash32.wrap(Bytes32.wrap(sszHashSerializer.hash(input))); } @Override @@ -46,8 +40,7 @@ public Hash32 getHashTruncate(Object input, String field) { if (input instanceof List) { throw new RuntimeException("Lists are not supported in truncated hash"); } else { - return hashFunction.apply( - BytesValue.wrap(sszHashSerializer.hashTruncate(input, input.getClass(), field))); + return Hash32.wrap(Bytes32.wrap(sszHashSerializer.hashTruncate(input, input.getClass(), field))); } } } diff --git a/consensus/src/test/java/org/ethereum/beacon/consensus/SpecHelpersTest.java b/consensus/src/test/java/org/ethereum/beacon/consensus/SpecHelpersTest.java index 376241ccb..1b1dfc5eb 100644 --- a/consensus/src/test/java/org/ethereum/beacon/consensus/SpecHelpersTest.java +++ b/consensus/src/test/java/org/ethereum/beacon/consensus/SpecHelpersTest.java @@ -107,7 +107,7 @@ private DepositInput createDepositInput() { public void testHashTreeRoot1() { SpecHelpers specHelpers = SpecHelpers.createWithSSZHasher(SpecConstants.DEFAULT, () -> 0L); Hash32 expected = - Hash32.fromHexString("0x8fc89d0f1f435b07543b15fdf687e7fce4a754ecd9e5afbf8f0e83928a7f798f"); + Hash32.fromHexString("0x1a2017aea008e5bb8b3eb79d031f14347018353f1c58fc3a54e9fc7af7ab2fe1"); Hash32 actual = specHelpers.hash_tree_root(createDepositInput()); assertEquals(expected, actual); } diff --git a/consensus/src/test/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasherTest.java b/consensus/src/test/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasherTest.java index 8ce703a52..35524f7e1 100644 --- a/consensus/src/test/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasherTest.java +++ b/consensus/src/test/java/org/ethereum/beacon/consensus/hasher/SSZObjectHasherTest.java @@ -36,7 +36,7 @@ public class SSZObjectHasherTest { public void setup() { SSZHashSerializer sszHashSerializer = SSZHashSerializers.createWithBeaconChainTypes(Hashes::keccak256, false); - sszHasher = new SSZObjectHasher(sszHashSerializer, Hashes::keccak256); + sszHasher = new SSZObjectHasher(sszHashSerializer); } @Test @@ -46,7 +46,7 @@ public void bitfieldTest() { BytesValue hash = sszHasher.getHash(bitfield); assertEquals( BytesValue.fromHexString( - "A0B1BE2F50398CA7FE11EA48E5AFE9F89F758EC815E5C12BE21315AF6D34FA1D"), + "0x02000000abcd0000000000000000000000000000000000000000000000000000"), hash); } @@ -55,7 +55,7 @@ public void SignatureTest() { BytesValue hash = sszHasher.getHash(DEFAULT_SIG); assertEquals( BytesValue.fromHexString( - "D75724A07F4EFB3B456408DD6C36C70A6DF189FAE6A09F7AD0C848F0D3341290"), + "0x3d15cc04a0a366f8e0bc034db6df008f9eaf30d7bd0b1b40c4bd7bd141bd42f7"), hash); } @@ -75,7 +75,7 @@ public void simpleTest() { BytesValue hash = sszHasher.getHash(attestationRecord); assertEquals( BytesValue.fromHexString( - "740620beb3f42033473a7adf01b5f115ec0a72bf8c97eb36f732a6158ff8775d"), + "0xbfde5860f2d9e9d7e8b2a0d5d3630a09b1330197d09a359470601bae5b3839ae"), hash); } @@ -96,14 +96,14 @@ public void simpleTruncateTest() { BytesValue hash1 = sszHasher.getHashTruncate(attestationRecord, "justifiedBlockHash"); assertEquals( BytesValue.fromHexString( - "0x8d5fc215a3e8c2a67c44e8c43711ce1396315366f013892cce63ad88b8e8eb9e"), + "0x945b6a8eac7bd3611f6fb452fd7f63d77ce3672752df45443beb0e0169bf33cb"), hash1); // Sig only removed BytesValue hash2 = sszHasher.getHashTruncate(attestationRecord, "aggregateSig"); assertEquals( BytesValue.fromHexString( - "0x5df5425a3581f24ec3f8508c44820d2c70c89299cf217a3a5d8e126e51b6e4ed"), + "0xae3f28da5903192bff0472fc12baf3acb8c2554606c2449f833d2079188eb871"), hash2); boolean fired = false; @@ -143,7 +143,7 @@ public void list32Test() { BytesValue hash = sszHasher.getHash(attestationRecord); assertEquals( BytesValue.fromHexString( - "740620beb3f42033473a7adf01b5f115ec0a72bf8c97eb36f732a6158ff8775d"), + "0xbfde5860f2d9e9d7e8b2a0d5d3630a09b1330197d09a359470601bae5b3839ae"), hash); } @@ -159,7 +159,7 @@ public void smallItemsListTest() { BytesValue hash = sszHasher.getHash(someObject); assertEquals( BytesValue.fromHexString( - "BD4AB28F883B78BF4C5B3652AFCF272EAD9026C3361821A0420777A9B3C20425"), + "0xb1a18810e9b465f89b07c45716aef51cb243892a9ca24b37a4c322752fb905d6"), hash); } @@ -172,11 +172,11 @@ public void smallItemTest() { BytesValue hash2 = sszHasher.getHash(anotherObject2); assertEquals( BytesValue.fromHexString( - "FB5BAAECAB62C516763CEA2DFBA17FBBC24907E4E3B0BE426BDE71BE89AF495F"), + "0x0100000000000000000000000000000000000000000000000000000000000000"), hash1); assertEquals( BytesValue.fromHexString( - "B7047395B0D5A9C70336FDE7E40DE2BB369FE67C8E762A35641E209B7338FDD9"), + "0x0200000000000000000000000000000000000000000000000000000000000000"), hash2); } @@ -190,7 +190,7 @@ public void listTest() { BytesValue hash = sszHasher.getHash(anotherObjects); assertEquals( BytesValue.fromHexString( - "a9bb69cad9fb0d9a9963bf9a32f09b9c306bed6f6c95fff3e5d625fd9370646e"), + "0x6d3a1eb14c6b37eb4645044d0c1bf38284b408eab24e89238a8058f3b921e5d9"), hash); } diff --git a/core/src/main/java/org/ethereum/beacon/core/operations/deposit/DepositInput.java b/core/src/main/java/org/ethereum/beacon/core/operations/deposit/DepositInput.java index 99ba20161..d36b52550 100644 --- a/core/src/main/java/org/ethereum/beacon/core/operations/deposit/DepositInput.java +++ b/core/src/main/java/org/ethereum/beacon/core/operations/deposit/DepositInput.java @@ -7,8 +7,6 @@ import org.ethereum.beacon.ssz.annotation.SSZ; import org.ethereum.beacon.ssz.annotation.SSZSerializable; import tech.pegasys.artemis.ethereum.core.Hash32; -import tech.pegasys.artemis.util.bytes.Bytes48; -import tech.pegasys.artemis.util.bytes.Bytes96; /** * An input parameters of deposit contract. @@ -30,9 +28,7 @@ public class DepositInput { @SSZ private final BLSSignature proofOfPossession; public DepositInput( - BLSPubkey pubKey, - Hash32 withdrawalCredentials, - BLSSignature proofOfPossession) { + BLSPubkey pubKey, Hash32 withdrawalCredentials, BLSSignature proofOfPossession) { this.pubKey = pubKey; this.withdrawalCredentials = withdrawalCredentials; this.proofOfPossession = proofOfPossession; diff --git a/ssz/src/main/java/org/ethereum/beacon/ssz/SSZCodecHasher.java b/ssz/src/main/java/org/ethereum/beacon/ssz/SSZCodecHasher.java index b729e032e..5d1ebd9ac 100644 --- a/ssz/src/main/java/org/ethereum/beacon/ssz/SSZCodecHasher.java +++ b/ssz/src/main/java/org/ethereum/beacon/ssz/SSZCodecHasher.java @@ -1,5 +1,16 @@ package org.ethereum.beacon.ssz; +import net.consensys.cava.bytes.Bytes; +import net.consensys.cava.ssz.BytesSSZReaderProxy; +import net.consensys.cava.ssz.SSZ; +import net.consensys.cava.ssz.SSZException; +import net.consensys.cava.units.bigints.UInt256; +import org.ethereum.beacon.ssz.type.SSZCodec; +import org.ethereum.beacon.ssz.type.SubclassCodec; +import org.javatuples.Pair; +import org.javatuples.Triplet; +import tech.pegasys.artemis.util.bytes.BytesValue; + import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -12,15 +23,6 @@ import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; -import net.consensys.cava.bytes.Bytes; -import net.consensys.cava.ssz.BytesSSZReaderProxy; -import net.consensys.cava.ssz.SSZ; -import net.consensys.cava.ssz.SSZException; -import org.ethereum.beacon.ssz.type.SSZCodec; -import org.ethereum.beacon.ssz.type.SubclassCodec; -import org.javatuples.Pair; -import org.javatuples.Triplet; -import tech.pegasys.artemis.util.bytes.BytesValue; /** * Implementation of {@link SSZCodecResolver} which implements SSZ Hash function @@ -31,9 +33,9 @@ */ public class SSZCodecHasher implements SSZCodecResolver { - private static final int SSZ_CHUNK_SIZE = 32; + static final int BYTES_PER_CHUNK = 32; - private static final Bytes EMPTY_CHUNK = Bytes.of(new byte[SSZ_CHUNK_SIZE]); + static final Bytes EMPTY_CHUNK = Bytes.of(new byte[BYTES_PER_CHUNK]); private Function hashFunction; @@ -230,36 +232,49 @@ public void registerCodec(Set classes, Set types, SSZCodec codec) } /** - * Merkle tree hash of a list of homogenous, non-empty items + * Given ordered objects of the same basic type, serialize them, pack them into + * BYTES_PER_CHUNK-byte chunks, right-pad the last chunk with zero bytes, and return the chunks. * - * @param lst - * @return */ - private Bytes merkle_hash(Bytes[] lst) { - // Store length of list (to compensate for non-bijectiveness of padding) - Bytes dataLen = SSZ.encodeInt32(lst.length); - + List pack(Bytes[] lst) { List chunkz = new ArrayList<>(); // Handle empty list case - if (dataLen.isZero()) { + if (lst.length == 0) { chunkz.add(EMPTY_CHUNK); - } else if (lst[0].size() < SSZ_CHUNK_SIZE) { - // See how many items fit in a chunk - int itemsPerChunk = SSZ_CHUNK_SIZE / lst[0].size(); - // Build a list of chunks based on the number of items in the chunk - for (int i = 0; i < lst.length; i += itemsPerChunk) { - int chunkLen = Math.min(itemsPerChunk, lst.length - i); - Bytes[] lstSlice = new Bytes[chunkLen]; - System.arraycopy(lst, i, lstSlice, 0, chunkLen); - Bytes chunkBeforePad = Bytes.concatenate(lstSlice); - chunkz.add(zpad(chunkBeforePad, SSZ_CHUNK_SIZE)); - } } else { - // Leave large items alone - chunkz.addAll(Arrays.asList(lst)); + int currentItem = 0; + int itemPosition = 0; + while (currentItem < lst.length) { + int chunkPosition = 0; + byte[] currentChunk = new byte[BYTES_PER_CHUNK]; + while (chunkPosition < BYTES_PER_CHUNK) { + int len = + Math.min(BYTES_PER_CHUNK - chunkPosition, lst[currentItem].size() - itemPosition); + System.arraycopy( + lst[currentItem].toArray(), itemPosition, currentChunk, chunkPosition, len); + chunkPosition += len; + itemPosition += len; + if (itemPosition == lst[currentItem].size()) { + ++currentItem; + itemPosition = 0; + } + if (currentItem == lst.length || chunkPosition == BYTES_PER_CHUNK) { + chunkz.add(Bytes.wrap(currentChunk)); + chunkPosition = BYTES_PER_CHUNK; + } + } + ++currentItem; + } } - // Merkleise + return chunkz; + } + + /** + * Given ordered BYTES_PER_CHUNK-byte chunks, if necessary append zero chunks so that the number + * of chunks is a power of two, Merkleize the chunks, and return the root. + */ + Bytes merkleize(List chunkz) { for (int i = chunkz.size(); i < next_power_of_2(chunkz.size()); ++i) { chunkz.add(EMPTY_CHUNK); } @@ -272,7 +287,7 @@ private Bytes merkle_hash(Bytes[] lst) { chunkz = tempChunkz; } - return hashFunction.apply(Bytes.concatenate(chunkz.get(0), dataLen)); + return chunkz.get(0); } private long next_power_of_2(int x) { @@ -294,24 +309,41 @@ private int bit_length(int val) { return 0; } - private Bytes zpad(Bytes input, int length) { + /** + * Given a Merkle root and a length (uint256 little-endian serialization) return + * hash(root + length). + */ + Bytes mix_in_length(Bytes root, int length) { + Bytes len = SSZ.encodeUInt256(UInt256.valueOf(length)); + return hashFunction.apply(Bytes.concatenate(root, len)); + } + + Bytes zpad(Bytes input, int length) { return Bytes.concatenate(input, Bytes.wrap(new byte[length - input.size()])); } private Bytes hash_tree_root_list(Bytes[] lst) { - Bytes[] res = new Bytes[lst.length]; + return mix_in_length(merkleize(pack(lst)), lst.length); + } + + private Bytes hash_tree_root_container(Bytes[] lst) { + List values = new ArrayList<>(); for (int i = 0; i < lst.length; ++i) { - res[i] = hash_tree_root_element(lst[i]); + values.add(hash_tree_root_element(lst[i])); } - return merkle_hash(res); + return merkleize(values); } - private Bytes hash_tree_root_element(Bytes el) { - if (el.size() <= SSZ_CHUNK_SIZE) { - return el; - } else { - return hashFunction.apply(el); + Bytes hash_tree_root_element(Bytes el) { + return merkleize(pack(new Bytes[]{el})); + } + + private Bytes hash_tree_root_containers_list(Bytes[] lst) { + List values = new ArrayList<>(); + for (int i = 0; i < lst.length; ++i) { + values.add(hash_tree_root_element(lst[i])); } + return mix_in_length(merkleize(values), lst.length); } class CodecEntry { diff --git a/ssz/src/main/java/org/ethereum/beacon/ssz/SSZHashSerializer.java b/ssz/src/main/java/org/ethereum/beacon/ssz/SSZHashSerializer.java index 9cb32d47c..2b2c4ae04 100644 --- a/ssz/src/main/java/org/ethereum/beacon/ssz/SSZHashSerializer.java +++ b/ssz/src/main/java/org/ethereum/beacon/ssz/SSZHashSerializer.java @@ -1,5 +1,6 @@ package org.ethereum.beacon.ssz; +import net.consensys.cava.bytes.Bytes; import org.javatuples.Triplet; import javax.annotation.Nullable; @@ -8,10 +9,12 @@ import java.beans.PropertyDescriptor; import java.io.ByteArrayOutputStream; import java.lang.reflect.Method; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.ethereum.beacon.ssz.SSZCodecHasher.EMPTY_CHUNK; import static org.ethereum.beacon.ssz.SSZSerializer.checkSSZSerializableAnnotation; /** @@ -45,31 +48,20 @@ public SSZHashSerializer(SSZSchemeBuilder schemeBuilder, SSZCodecResolver codecR /** Calculates hash of the input object */ @Override public byte[] hash(@Nullable Object input, Class clazz) { - byte[] preBakedHash; + byte[] hash; if (input instanceof List) { - preBakedHash = hashList((List) input); + hash = hashList((List) input); } else { - preBakedHash = hashImpl(input, clazz, null); + hash = hashImpl(input, clazz, null); } - // For the final output only (ie. not intermediate outputs), if the output is less than 32 - // bytes, right-zero-pad it to 32 bytes. - byte[] res; - if (preBakedHash.length < HASH_LENGTH) { - res = new byte[HASH_LENGTH]; - System.arraycopy(preBakedHash, 0, res, 0, preBakedHash.length); - } else { - res = preBakedHash; - } - - return res; + return hash; } private byte[] hashImpl(@Nullable Object input, Class clazz, @Nullable String truncateField) { checkSSZSerializableAnnotation(clazz); - // Null check if (input == null) { - return EMPTY_PREFIX; + return EMPTY_CHUNK.toArray(); } // Fill up map with all available method getters @@ -103,9 +95,12 @@ private byte[] hashImpl(@Nullable Object input, Class clazz, @Nullable String tr String.format("Field %s doesn't exist in object %s", truncateField, input)); } } - ByteArrayOutputStream res = new ByteArrayOutputStream(); + + SSZCodecHasher codecHasher = (SSZCodecHasher) codecResolver; + List containerValues = new ArrayList<>(); for (SSZSchemeBuilder.SSZScheme.SSZField field : scheme.fields) { Object value; + ByteArrayOutputStream res = new ByteArrayOutputStream(); Method getter = getters.get(field.getter); try { if (getter != null) { // We have getter @@ -123,35 +118,24 @@ private byte[] hashImpl(@Nullable Object input, Class clazz, @Nullable String tr } codecResolver.resolveEncodeFunction(field).accept(new Triplet<>(value, res, this)); + containerValues.add(codecHasher.hash_tree_root_element(Bytes.wrap(res.toByteArray()))); } - return res.toByteArray(); + return codecHasher.merkleize(containerValues).toArray(); } @Override public byte[] hashTruncate(@Nullable Object input, Class clazz, String field) { - byte[] preBakedHash; if (input instanceof List) { throw new RuntimeException("hashTruncate doesn't support lists"); } else { - preBakedHash = hashImpl(input, clazz, field); - } - // For the final output only (ie. not intermediate outputs), if the output is less than 32 - // bytes, right-zero-pad it to 32 bytes. - byte[] res; - if (preBakedHash.length < HASH_LENGTH) { - res = new byte[HASH_LENGTH]; - System.arraycopy(preBakedHash, 0, res, 0, preBakedHash.length); - } else { - res = preBakedHash; + return hashImpl(input, clazz, field); } - - return res; } private byte[] hashList(List input) { if (input.isEmpty()) { - return new byte[0]; + return EMPTY_CHUNK.toArray(); } Class internalClass = input.get(0).getClass(); checkSSZSerializableAnnotation(internalClass); @@ -164,8 +148,9 @@ private byte[] hashList(List input) { ByteArrayOutputStream res = new ByteArrayOutputStream(); codecResolver.resolveEncodeFunction(field).accept(new Triplet<>(input, res, this)); + SSZCodecHasher codecHasher = (SSZCodecHasher) codecResolver; - return res.toByteArray(); + return codecHasher.mix_in_length(Bytes.wrap(res.toByteArray()), input.size()).toArray(); } @Override diff --git a/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesCodec.java b/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesCodec.java index a376293c4..85bb02dfa 100644 --- a/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesCodec.java +++ b/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesCodec.java @@ -71,7 +71,12 @@ public Set getSupportedClasses() { public void encode(Object value, SSZSchemeBuilder.SSZScheme.SSZField field, OutputStream result) { Bytes res = null; BytesValue data = (BytesValue) value; - res = SSZ.encodeBytes(Bytes.of(data.getArrayUnsafe())); + BytesType bytesType = parseFieldType(field); + if (bytesType.size == null) { + res = SSZ.encodeBytes(Bytes.of(data.getArrayUnsafe())); + } else { + res = SSZ.encodeHash(Bytes.of(data.getArrayUnsafe())); + } try { result.write(res.toArrayUnsafe()); @@ -87,7 +92,14 @@ public void encodeList( Bytes[] data = repackBytesList((List) (List) value); try { - result.write(SSZ.encodeBytesList(data).toArrayUnsafe()); + Bytes res; + BytesType bytesType = parseFieldType(field); + if (bytesType.size == null) { + res = SSZ.encodeBytesList(data); + } else { + res = SSZ.encodeHashList(data); + } + result.write(res.toArrayUnsafe()); } catch (IOException ex) { String error = String.format("Failed to write data from field \"%s\" to stream", field.name); throw new SSZException(error, ex); @@ -105,19 +117,19 @@ public Object decode(SSZSchemeBuilder.SSZScheme.SSZField field, BytesSSZReaderPr switch (bytesType.size) { case 1: { - return Bytes1.wrap(reader.readBytes(bytesType.size).toArrayUnsafe()); + return Bytes1.wrap(reader.readHash(bytesType.size).toArrayUnsafe()); } case 20: { - return Address.wrap(BytesValue.of(reader.readBytes(bytesType.size).toArrayUnsafe())); + return Address.wrap(BytesValue.of(reader.readHash(bytesType.size).toArrayUnsafe())); } case 48: { - return Bytes48.wrap(reader.readBytes(bytesType.size).toArrayUnsafe()); + return Bytes48.wrap(reader.readHash(bytesType.size).toArrayUnsafe()); } case 96: { - return Bytes96.wrap(reader.readBytes(bytesType.size).toArrayUnsafe()); + return Bytes96.wrap(reader.readHash(bytesType.size).toArrayUnsafe()); } } } catch (Exception ex) { @@ -133,15 +145,15 @@ public List decodeList( SSZSchemeBuilder.SSZScheme.SSZField field, BytesSSZReaderProxy reader) { BytesType bytesType = parseFieldType(field); - List bytesList = reader.readHashList(bytesType.size); if (bytesType.size == null) { - return bytesList.stream() + return reader.readBytesList().stream() .map(Bytes::toArrayUnsafe) .map(BytesValue::wrap) .collect(Collectors.toList()); } List res = null; try { + List bytesList = reader.readHashList(bytesType.size); switch (bytesType.size) { case 1: { diff --git a/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesPrimitive.java b/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesPrimitive.java index bbf68509e..b65dfc11b 100644 --- a/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesPrimitive.java +++ b/ssz/src/main/java/org/ethereum/beacon/ssz/type/BytesPrimitive.java @@ -6,6 +6,7 @@ import net.consensys.cava.ssz.SSZException; import org.ethereum.beacon.ssz.SSZSchemeBuilder; import org.ethereum.beacon.ssz.SSZSchemeException; + import java.io.IOException; import java.io.OutputStream; import java.util.HashSet; @@ -85,7 +86,11 @@ public void encode(Object value, SSZSchemeBuilder.SSZScheme.SSZField field, Outp } case BYTES: { - res = SSZ.encodeByteArray(data); + if (byteType.size == null) { + res = SSZ.encodeByteArray(data); + } else { + res = SSZ.encodeHash(Bytes.wrap(data)); // w/o length prefix + } break; } default: @@ -122,7 +127,11 @@ public void encodeList( } case ADDRESS: { - result.write(SSZ.encodeAddressList(data).toArrayUnsafe()); + if (bytesType.size == null) { + result.write(SSZ.encodeAddressList(data).toArrayUnsafe()); + } else { + result.write(SSZ.encodeHashList(data).toArrayUnsafe()); + } break; } default: @@ -144,7 +153,7 @@ public Object decode(SSZSchemeBuilder.SSZScheme.SSZField field, BytesSSZReaderPr { return (bytesType.size == null) ? reader.readBytes().toArrayUnsafe() - : reader.readBytes(bytesType.size).toArrayUnsafe(); + : reader.readHash(bytesType.size).toArrayUnsafe(); } case HASH: { @@ -167,7 +176,11 @@ public List decodeList( switch (bytesType.type) { case BYTES: { - return (List) (List) reader.readByteArrayList(); + if (bytesType.size == null) { + return (List) (List) reader.readByteArrayList(); + } else { + return (List) (List) reader.readHashList(bytesType.size); + } } case HASH: { diff --git a/ssz/src/test/java/org/ethereum/beacon/ssz/SSZSerializerTest.java b/ssz/src/test/java/org/ethereum/beacon/ssz/SSZSerializerTest.java index 1029341dd..4aac2e809 100644 --- a/ssz/src/test/java/org/ethereum/beacon/ssz/SSZSerializerTest.java +++ b/ssz/src/test/java/org/ethereum/beacon/ssz/SSZSerializerTest.java @@ -207,7 +207,7 @@ public void nullListTest() { public void shouldWorkLikeCavaWithObjects() { Bytes bytes = fromHexString( - "0x00000003426F62040000000000000000000000000000000000000000000000000000011F71B70768"); + "0x03000000426F62046807B7711F010000000000000000000000000000000000000000000000000000"); SomeObject readObject = SSZ.decode(bytes, r -> new SomeObject(r.readString(), r.readInt8(), r.readBigInteger(256))); diff --git a/versions.gradle b/versions.gradle index 05b429281..529a087ee 100644 --- a/versions.gradle +++ b/versions.gradle @@ -1,6 +1,5 @@ ext { - // After that version snapshots are compiled with new JDK w/o full backward compatibility - cavaVersion = '0.6.0-5618C0-snapshot' + cavaVersion = '1.0.0-876579-snapshot' jacksonVersion = '2.9.8' log4j2Version = '2.11.2' }