Skip to content

Commit

Permalink
Merge pull request #95 from harmony-dev/fix/ssz-updates
Browse files Browse the repository at this point in the history
Ssz updates
  • Loading branch information
mkalinin authored Mar 6, 2019
2 parents 77490de + b2d340b commit b000690
Show file tree
Hide file tree
Showing 10 changed files with 149 additions and 119 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,35 +19,28 @@
public class SSZObjectHasher implements ObjectHasher<Hash32> {

private final SSZHashSerializer sszHashSerializer;
private final Function<BytesValue, Hash32> hashFunction;

SSZObjectHasher(SSZHashSerializer sszHashSerializer, Function<BytesValue, Hash32> hashFunction) {
SSZObjectHasher(SSZHashSerializer sszHashSerializer) {
this.sszHashSerializer = sszHashSerializer;
this.hashFunction = hashFunction;
}

public static SSZObjectHasher create(Function<BytesValue, Hash32> hashFunction) {
SSZHashSerializer sszHashSerializer =
SSZHashSerializers.createWithBeaconChainTypes(hashFunction, true);
return new SSZObjectHasher(sszHashSerializer, hashFunction);
return new SSZObjectHasher(sszHashSerializer);
}

@Override
public Hash32 getHash(Object input) {
if (input instanceof List) {
return Hash32.wrap(Bytes32.wrap(sszHashSerializer.hash(input)));
} else {
return hashFunction.apply(BytesValue.wrap(sszHashSerializer.hash(input)));
}
return Hash32.wrap(Bytes32.wrap(sszHashSerializer.hash(input)));
}

@Override
public Hash32 getHashTruncate(Object input, String field) {
if (input instanceof List) {
throw new RuntimeException("Lists are not supported in truncated hash");
} else {
return hashFunction.apply(
BytesValue.wrap(sszHashSerializer.hashTruncate(input, input.getClass(), field)));
return Hash32.wrap(Bytes32.wrap(sszHashSerializer.hashTruncate(input, input.getClass(), field)));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ private DepositInput createDepositInput() {
public void testHashTreeRoot1() {
SpecHelpers specHelpers = SpecHelpers.createWithSSZHasher(SpecConstants.DEFAULT, () -> 0L);
Hash32 expected =
Hash32.fromHexString("0x8fc89d0f1f435b07543b15fdf687e7fce4a754ecd9e5afbf8f0e83928a7f798f");
Hash32.fromHexString("0x1a2017aea008e5bb8b3eb79d031f14347018353f1c58fc3a54e9fc7af7ab2fe1");
Hash32 actual = specHelpers.hash_tree_root(createDepositInput());
assertEquals(expected, actual);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class SSZObjectHasherTest {
public void setup() {
SSZHashSerializer sszHashSerializer =
SSZHashSerializers.createWithBeaconChainTypes(Hashes::keccak256, false);
sszHasher = new SSZObjectHasher(sszHashSerializer, Hashes::keccak256);
sszHasher = new SSZObjectHasher(sszHashSerializer);
}

@Test
Expand All @@ -46,7 +46,7 @@ public void bitfieldTest() {
BytesValue hash = sszHasher.getHash(bitfield);
assertEquals(
BytesValue.fromHexString(
"A0B1BE2F50398CA7FE11EA48E5AFE9F89F758EC815E5C12BE21315AF6D34FA1D"),
"0x02000000abcd0000000000000000000000000000000000000000000000000000"),
hash);
}

Expand All @@ -55,7 +55,7 @@ public void SignatureTest() {
BytesValue hash = sszHasher.getHash(DEFAULT_SIG);
assertEquals(
BytesValue.fromHexString(
"D75724A07F4EFB3B456408DD6C36C70A6DF189FAE6A09F7AD0C848F0D3341290"),
"0x3d15cc04a0a366f8e0bc034db6df008f9eaf30d7bd0b1b40c4bd7bd141bd42f7"),
hash);
}

Expand All @@ -75,7 +75,7 @@ public void simpleTest() {
BytesValue hash = sszHasher.getHash(attestationRecord);
assertEquals(
BytesValue.fromHexString(
"740620beb3f42033473a7adf01b5f115ec0a72bf8c97eb36f732a6158ff8775d"),
"0xbfde5860f2d9e9d7e8b2a0d5d3630a09b1330197d09a359470601bae5b3839ae"),
hash);
}

Expand All @@ -96,14 +96,14 @@ public void simpleTruncateTest() {
BytesValue hash1 = sszHasher.getHashTruncate(attestationRecord, "justifiedBlockHash");
assertEquals(
BytesValue.fromHexString(
"0x8d5fc215a3e8c2a67c44e8c43711ce1396315366f013892cce63ad88b8e8eb9e"),
"0x945b6a8eac7bd3611f6fb452fd7f63d77ce3672752df45443beb0e0169bf33cb"),
hash1);

// Sig only removed
BytesValue hash2 = sszHasher.getHashTruncate(attestationRecord, "aggregateSig");
assertEquals(
BytesValue.fromHexString(
"0x5df5425a3581f24ec3f8508c44820d2c70c89299cf217a3a5d8e126e51b6e4ed"),
"0xae3f28da5903192bff0472fc12baf3acb8c2554606c2449f833d2079188eb871"),
hash2);

boolean fired = false;
Expand Down Expand Up @@ -143,7 +143,7 @@ public void list32Test() {
BytesValue hash = sszHasher.getHash(attestationRecord);
assertEquals(
BytesValue.fromHexString(
"740620beb3f42033473a7adf01b5f115ec0a72bf8c97eb36f732a6158ff8775d"),
"0xbfde5860f2d9e9d7e8b2a0d5d3630a09b1330197d09a359470601bae5b3839ae"),
hash);
}

Expand All @@ -159,7 +159,7 @@ public void smallItemsListTest() {
BytesValue hash = sszHasher.getHash(someObject);
assertEquals(
BytesValue.fromHexString(
"BD4AB28F883B78BF4C5B3652AFCF272EAD9026C3361821A0420777A9B3C20425"),
"0xb1a18810e9b465f89b07c45716aef51cb243892a9ca24b37a4c322752fb905d6"),
hash);
}

Expand All @@ -172,11 +172,11 @@ public void smallItemTest() {
BytesValue hash2 = sszHasher.getHash(anotherObject2);
assertEquals(
BytesValue.fromHexString(
"FB5BAAECAB62C516763CEA2DFBA17FBBC24907E4E3B0BE426BDE71BE89AF495F"),
"0x0100000000000000000000000000000000000000000000000000000000000000"),
hash1);
assertEquals(
BytesValue.fromHexString(
"B7047395B0D5A9C70336FDE7E40DE2BB369FE67C8E762A35641E209B7338FDD9"),
"0x0200000000000000000000000000000000000000000000000000000000000000"),
hash2);
}

Expand All @@ -190,7 +190,7 @@ public void listTest() {
BytesValue hash = sszHasher.getHash(anotherObjects);
assertEquals(
BytesValue.fromHexString(
"a9bb69cad9fb0d9a9963bf9a32f09b9c306bed6f6c95fff3e5d625fd9370646e"),
"0x6d3a1eb14c6b37eb4645044d0c1bf38284b408eab24e89238a8058f3b921e5d9"),
hash);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
import org.ethereum.beacon.ssz.annotation.SSZ;
import org.ethereum.beacon.ssz.annotation.SSZSerializable;
import tech.pegasys.artemis.ethereum.core.Hash32;
import tech.pegasys.artemis.util.bytes.Bytes48;
import tech.pegasys.artemis.util.bytes.Bytes96;

/**
* An input parameters of deposit contract.
Expand All @@ -30,9 +28,7 @@ public class DepositInput {
@SSZ private final BLSSignature proofOfPossession;

public DepositInput(
BLSPubkey pubKey,
Hash32 withdrawalCredentials,
BLSSignature proofOfPossession) {
BLSPubkey pubKey, Hash32 withdrawalCredentials, BLSSignature proofOfPossession) {
this.pubKey = pubKey;
this.withdrawalCredentials = withdrawalCredentials;
this.proofOfPossession = proofOfPossession;
Expand Down
118 changes: 75 additions & 43 deletions ssz/src/main/java/org/ethereum/beacon/ssz/SSZCodecHasher.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
package org.ethereum.beacon.ssz;

import net.consensys.cava.bytes.Bytes;
import net.consensys.cava.ssz.BytesSSZReaderProxy;
import net.consensys.cava.ssz.SSZ;
import net.consensys.cava.ssz.SSZException;
import net.consensys.cava.units.bigints.UInt256;
import org.ethereum.beacon.ssz.type.SSZCodec;
import org.ethereum.beacon.ssz.type.SubclassCodec;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import tech.pegasys.artemis.util.bytes.BytesValue;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
Expand All @@ -12,15 +23,6 @@
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import net.consensys.cava.bytes.Bytes;
import net.consensys.cava.ssz.BytesSSZReaderProxy;
import net.consensys.cava.ssz.SSZ;
import net.consensys.cava.ssz.SSZException;
import org.ethereum.beacon.ssz.type.SSZCodec;
import org.ethereum.beacon.ssz.type.SubclassCodec;
import org.javatuples.Pair;
import org.javatuples.Triplet;
import tech.pegasys.artemis.util.bytes.BytesValue;

/**
* Implementation of {@link SSZCodecResolver} which implements SSZ Hash function
Expand All @@ -31,9 +33,9 @@
*/
public class SSZCodecHasher implements SSZCodecResolver {

private static final int SSZ_CHUNK_SIZE = 32;
static final int BYTES_PER_CHUNK = 32;

private static final Bytes EMPTY_CHUNK = Bytes.of(new byte[SSZ_CHUNK_SIZE]);
static final Bytes EMPTY_CHUNK = Bytes.of(new byte[BYTES_PER_CHUNK]);

private Function<Bytes, Bytes> hashFunction;

Expand Down Expand Up @@ -230,36 +232,49 @@ public void registerCodec(Set<Class> classes, Set<String> types, SSZCodec codec)
}

/**
* Merkle tree hash of a list of homogenous, non-empty items
* Given ordered objects of the same basic type, serialize them, pack them into
* BYTES_PER_CHUNK-byte chunks, right-pad the last chunk with zero bytes, and return the chunks.
*
* @param lst
* @return
*/
private Bytes merkle_hash(Bytes[] lst) {
// Store length of list (to compensate for non-bijectiveness of padding)
Bytes dataLen = SSZ.encodeInt32(lst.length);

List<Bytes> pack(Bytes[] lst) {
List<Bytes> chunkz = new ArrayList<>();
// Handle empty list case
if (dataLen.isZero()) {
if (lst.length == 0) {
chunkz.add(EMPTY_CHUNK);
} else if (lst[0].size() < SSZ_CHUNK_SIZE) {
// See how many items fit in a chunk
int itemsPerChunk = SSZ_CHUNK_SIZE / lst[0].size();
// Build a list of chunks based on the number of items in the chunk
for (int i = 0; i < lst.length; i += itemsPerChunk) {
int chunkLen = Math.min(itemsPerChunk, lst.length - i);
Bytes[] lstSlice = new Bytes[chunkLen];
System.arraycopy(lst, i, lstSlice, 0, chunkLen);
Bytes chunkBeforePad = Bytes.concatenate(lstSlice);
chunkz.add(zpad(chunkBeforePad, SSZ_CHUNK_SIZE));
}
} else {
// Leave large items alone
chunkz.addAll(Arrays.asList(lst));
int currentItem = 0;
int itemPosition = 0;
while (currentItem < lst.length) {
int chunkPosition = 0;
byte[] currentChunk = new byte[BYTES_PER_CHUNK];
while (chunkPosition < BYTES_PER_CHUNK) {
int len =
Math.min(BYTES_PER_CHUNK - chunkPosition, lst[currentItem].size() - itemPosition);
System.arraycopy(
lst[currentItem].toArray(), itemPosition, currentChunk, chunkPosition, len);
chunkPosition += len;
itemPosition += len;
if (itemPosition == lst[currentItem].size()) {
++currentItem;
itemPosition = 0;
}
if (currentItem == lst.length || chunkPosition == BYTES_PER_CHUNK) {
chunkz.add(Bytes.wrap(currentChunk));
chunkPosition = BYTES_PER_CHUNK;
}
}
++currentItem;
}
}

// Merkleise
return chunkz;
}

/**
* Given ordered BYTES_PER_CHUNK-byte chunks, if necessary append zero chunks so that the number
* of chunks is a power of two, Merkleize the chunks, and return the root.
*/
Bytes merkleize(List<Bytes> chunkz) {
for (int i = chunkz.size(); i < next_power_of_2(chunkz.size()); ++i) {
chunkz.add(EMPTY_CHUNK);
}
Expand All @@ -272,7 +287,7 @@ private Bytes merkle_hash(Bytes[] lst) {
chunkz = tempChunkz;
}

return hashFunction.apply(Bytes.concatenate(chunkz.get(0), dataLen));
return chunkz.get(0);
}

private long next_power_of_2(int x) {
Expand All @@ -294,24 +309,41 @@ private int bit_length(int val) {
return 0;
}

private Bytes zpad(Bytes input, int length) {
/**
* Given a Merkle root and a length (uint256 little-endian serialization) return
* hash(root + length).
*/
Bytes mix_in_length(Bytes root, int length) {
Bytes len = SSZ.encodeUInt256(UInt256.valueOf(length));
return hashFunction.apply(Bytes.concatenate(root, len));
}

Bytes zpad(Bytes input, int length) {
return Bytes.concatenate(input, Bytes.wrap(new byte[length - input.size()]));
}

private Bytes hash_tree_root_list(Bytes[] lst) {
Bytes[] res = new Bytes[lst.length];
return mix_in_length(merkleize(pack(lst)), lst.length);
}

private Bytes hash_tree_root_container(Bytes[] lst) {
List<Bytes> values = new ArrayList<>();
for (int i = 0; i < lst.length; ++i) {
res[i] = hash_tree_root_element(lst[i]);
values.add(hash_tree_root_element(lst[i]));
}
return merkle_hash(res);
return merkleize(values);
}

private Bytes hash_tree_root_element(Bytes el) {
if (el.size() <= SSZ_CHUNK_SIZE) {
return el;
} else {
return hashFunction.apply(el);
Bytes hash_tree_root_element(Bytes el) {
return merkleize(pack(new Bytes[]{el}));
}

private Bytes hash_tree_root_containers_list(Bytes[] lst) {
List<Bytes> values = new ArrayList<>();
for (int i = 0; i < lst.length; ++i) {
values.add(hash_tree_root_element(lst[i]));
}
return mix_in_length(merkleize(values), lst.length);
}

class CodecEntry {
Expand Down
Loading

0 comments on commit b000690

Please sign in to comment.