Skip to content

Commit

Permalink
Use hash_tree_root to generate ShardBlockHeader.body_root
Browse files Browse the repository at this point in the history
Include the mix-in length to the proof.
  • Loading branch information
hwwhww committed Jul 23, 2020
1 parent 819bb6d commit e7f070d
Show file tree
Hide file tree
Showing 5 changed files with 40 additions and 21 deletions.
6 changes: 0 additions & 6 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,12 +225,6 @@ def wrapper(*args, **kw): # type: ignore

PHASE1_SUNDRY_FUNCTIONS = '''
def get_block_data_merkle_root(data: ByteList) -> Root:
# To get the Merkle root of the block data, we need the Merkle root without the length mix-in
# The below implements this in the Remerkleable framework
return Root(data.get_backing().get_left().merkle_root())
_get_start_shard = get_start_shard
get_start_shard = cache_this(
lambda state, slot: (state.validators.hash_tree_root(), slot),
Expand Down
12 changes: 3 additions & 9 deletions specs/phase1/custody-game.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
- [`CustodyKeyReveal`](#custodykeyreveal)
- [`EarlyDerivedSecretReveal`](#earlyderivedsecretreveal)
- [Helpers](#helpers)
- [`get_block_data_merkle_root`](#get_block_data_merkle_root)
- [`replace_empty_or_append`](#replace_empty_or_append)
- [`legendre_bit`](#legendre_bit)
- [`get_custody_atoms`](#get_custody_atoms)
Expand Down Expand Up @@ -127,7 +126,7 @@ class CustodyChunkResponse(Container):
challenge_index: uint64
chunk_index: uint64
chunk: ByteVector[BYTES_PER_CUSTODY_CHUNK]
branch: Vector[Root, CUSTODY_RESPONSE_DEPTH]
branch: Vector[Root, CUSTODY_RESPONSE_DEPTH + 1]
```

#### `CustodySlashing`
Expand Down Expand Up @@ -180,13 +179,8 @@ class EarlyDerivedSecretReveal(Container):
mask: Bytes32
```


## Helpers

### `get_block_data_merkle_root`

`get_block_data_merkle_root(data: ByteList) -> Root` is the function that returns the Merkle root of the block data without the length mix-in.

### `replace_empty_or_append`

```python
Expand Down Expand Up @@ -381,7 +375,7 @@ def process_chunk_challenge_response(state: BeaconState,
assert is_valid_merkle_branch(
leaf=hash_tree_root(response.chunk),
branch=response.branch,
depth=CUSTODY_RESPONSE_DEPTH,
depth=CUSTODY_RESPONSE_DEPTH + 1, # Add 1 for the List length mix-in
index=response.chunk_index,
root=challenge.data_root,
)
Expand Down Expand Up @@ -523,7 +517,7 @@ def process_custody_slashing(state: BeaconState, signed_custody_slashing: Signed
assert hash_tree_root(shard_transition) == attestation.data.shard_transition_root
# Verify that the provided data matches the shard-transition
assert (
get_block_data_merkle_root(custody_slashing.data)
hash_tree_root(custody_slashing.data)
== shard_transition.shard_data_roots[custody_slashing.data_index]
)
assert len(custody_slashing.data) == shard_transition.shard_block_lengths[custody_slashing.data_index]
Expand Down
2 changes: 1 addition & 1 deletion specs/phase1/validator.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def get_shard_transition_fields(
for slot in offset_slots:
if slot in shard_block_slots:
shard_block = shard_blocks[shard_block_slots.index(slot)]
shard_data_roots.append(get_block_data_merkle_root(shard_block.message.body))
shard_data_roots.append(hash_tree_root(shard_block.message.body))
else:
shard_block = SignedShardBlock(message=ShardBlock(slot=slot, shard=shard))
shard_data_roots.append(Root())
Expand Down
12 changes: 7 additions & 5 deletions tests/core/pyspec/eth2spec/test/helpers/custody.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def get_valid_chunk_challenge(spec, state, attestation, shard_transition, data_i
def custody_chunkify(spec, x):
chunks = [bytes(x[i:i + spec.BYTES_PER_CUSTODY_CHUNK]) for i in range(0, len(x), spec.BYTES_PER_CUSTODY_CHUNK)]
chunks[-1] = chunks[-1].ljust(spec.BYTES_PER_CUSTODY_CHUNK, b"\0")
return chunks
return [ByteVector[spec.BYTES_PER_CUSTODY_CHUNK](c) for c in chunks]


def build_proof(anchor, leaf_index):
Expand Down Expand Up @@ -149,12 +149,14 @@ def get_valid_custody_chunk_response(spec, state, chunk_challenge, challenge_ind

chunk_index = chunk_challenge.chunk_index

data_branch = build_proof(custody_data_block.get_backing().get_left(), chunk_index + 2**spec.CUSTODY_RESPONSE_DEPTH)
leaf_index = chunk_index + 2**spec.CUSTODY_RESPONSE_DEPTH
serialized_length = (len(custody_data_block)).to_bytes(32, 'little')
data_branch = build_proof(custody_data_block.get_backing().get_left(), leaf_index) + [serialized_length]

return spec.CustodyChunkResponse(
challenge_index=challenge_index,
chunk_index=chunk_index,
chunk=ByteVector[spec.BYTES_PER_CUSTODY_CHUNK](chunks[chunk_index]),
chunk=chunks[chunk_index],
branch=data_branch,
)

Expand All @@ -165,7 +167,7 @@ def get_custody_test_vector(bytelength, offset=0):


def get_sample_shard_transition(spec, start_slot, block_lengths):
b = [spec.get_block_data_merkle_root(ByteList[spec.MAX_SHARD_BLOCK_SIZE](get_custody_test_vector(x)))
b = [spec.hash_tree_root(ByteList[spec.MAX_SHARD_BLOCK_SIZE](get_custody_test_vector(x)))
for x in block_lengths]
shard_transition = spec.ShardTransition(
start_slot=start_slot,
Expand Down Expand Up @@ -200,5 +202,5 @@ def get_custody_slashable_shard_transition(spec, start_slot, block_lengths, cust
slashable_test_vector = get_custody_slashable_test_vector(spec, custody_secret,
block_lengths[0], slashable=slashable)
block_data = ByteList[spec.MAX_SHARD_BLOCK_SIZE](slashable_test_vector)
shard_transition.shard_data_roots[0] = spec.get_block_data_merkle_root(block_data)
shard_transition.shard_data_roots[0] = spec.hash_tree_root(block_data)
return shard_transition, slashable_test_vector
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,35 @@ def test_custody_response(spec, state):
yield from run_custody_chunk_response_processing(spec, state, custody_response)


@with_all_phases_except([PHASE0])
@spec_state_test
def test_custody_response_chunk_index_2(spec, state):
transition_to(spec, state, state.slot + spec.SLOTS_PER_EPOCH)

shard = 0
offset_slots = spec.get_offset_slots(state, shard)
shard_transition = get_sample_shard_transition(spec, state.slot, [2**15 // 3] * len(offset_slots))
attestation = get_valid_on_time_attestation(spec, state, index=shard, signed=True,
shard_transition=shard_transition)

transition_to(spec, state, state.slot + spec.MIN_ATTESTATION_INCLUSION_DELAY)

_, _, _ = run_attestation_processing(spec, state, attestation)

transition_to(spec, state, state.slot + spec.SLOTS_PER_EPOCH * (spec.EPOCHS_PER_CUSTODY_PERIOD - 1))

challenge = get_valid_chunk_challenge(spec, state, attestation, shard_transition, chunk_index=2)

_, _, _ = run_chunk_challenge_processing(spec, state, challenge)

chunk_challenge_index = state.custody_chunk_challenge_index - 1

custody_response = get_valid_custody_chunk_response(
spec, state, challenge, chunk_challenge_index, block_length_or_custody_data=2**15 // 3)

yield from run_custody_chunk_response_processing(spec, state, custody_response)


@with_all_phases_except([PHASE0])
@spec_state_test
def test_custody_response_multiple_epochs(spec, state):
Expand Down

0 comments on commit e7f070d

Please sign in to comment.