Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use hash_tree_root to generate shard block body root #1980

Merged
merged 3 commits into from
Jul 29, 2020
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 0 additions & 6 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,12 +225,6 @@ def wrapper(*args, **kw): # type: ignore

PHASE1_SUNDRY_FUNCTIONS = '''

def get_block_data_merkle_root(data: ByteList) -> Root:
# To get the Merkle root of the block data, we need the Merkle root without the length mix-in
# The below implements this in the Remerkleable framework
return Root(data.get_backing().get_left().merkle_root())


_get_start_shard = get_start_shard
get_start_shard = cache_this(
lambda state, slot: (state.validators.hash_tree_root(), slot),
Expand Down
12 changes: 3 additions & 9 deletions specs/phase1/custody-game.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
- [`CustodyKeyReveal`](#custodykeyreveal)
- [`EarlyDerivedSecretReveal`](#earlyderivedsecretreveal)
- [Helpers](#helpers)
- [`get_block_data_merkle_root`](#get_block_data_merkle_root)
- [`replace_empty_or_append`](#replace_empty_or_append)
- [`legendre_bit`](#legendre_bit)
- [`get_custody_atoms`](#get_custody_atoms)
Expand Down Expand Up @@ -127,7 +126,7 @@ class CustodyChunkResponse(Container):
challenge_index: uint64
chunk_index: uint64
chunk: ByteVector[BYTES_PER_CUSTODY_CHUNK]
branch: Vector[Root, CUSTODY_RESPONSE_DEPTH]
branch: Vector[Root, CUSTODY_RESPONSE_DEPTH + 1]
```

#### `CustodySlashing`
Expand Down Expand Up @@ -180,13 +179,8 @@ class EarlyDerivedSecretReveal(Container):
mask: Bytes32
```


## Helpers

### `get_block_data_merkle_root`

`get_block_data_merkle_root(data: ByteList) -> Root` is the function that returns the Merkle root of the block data without the length mix-in.

### `replace_empty_or_append`

```python
Expand Down Expand Up @@ -381,7 +375,7 @@ def process_chunk_challenge_response(state: BeaconState,
assert is_valid_merkle_branch(
leaf=hash_tree_root(response.chunk),
branch=response.branch,
depth=CUSTODY_RESPONSE_DEPTH,
depth=CUSTODY_RESPONSE_DEPTH + 1, # Add 1 for the List length mix-in
index=response.chunk_index,
root=challenge.data_root,
)
Expand Down Expand Up @@ -523,7 +517,7 @@ def process_custody_slashing(state: BeaconState, signed_custody_slashing: Signed
assert hash_tree_root(shard_transition) == attestation.data.shard_transition_root
# Verify that the provided data matches the shard-transition
assert (
hwwhww marked this conversation as resolved.
Show resolved Hide resolved
get_block_data_merkle_root(custody_slashing.data)
hash_tree_root(custody_slashing.data)
== shard_transition.shard_data_roots[custody_slashing.data_index]
)
assert len(custody_slashing.data) == shard_transition.shard_block_lengths[custody_slashing.data_index]
Expand Down
2 changes: 1 addition & 1 deletion specs/phase1/validator.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def get_shard_transition_fields(
for slot in offset_slots:
if slot in shard_block_slots:
shard_block = shard_blocks[shard_block_slots.index(slot)]
shard_data_roots.append(get_block_data_merkle_root(shard_block.message.body))
shard_data_roots.append(hash_tree_root(shard_block.message.body))
else:
shard_block = SignedShardBlock(message=ShardBlock(slot=slot, shard=shard))
shard_data_roots.append(Root())
Expand Down
12 changes: 7 additions & 5 deletions tests/core/pyspec/eth2spec/test/helpers/custody.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def get_valid_chunk_challenge(spec, state, attestation, shard_transition, data_i
def custody_chunkify(spec, x):
chunks = [bytes(x[i:i + spec.BYTES_PER_CUSTODY_CHUNK]) for i in range(0, len(x), spec.BYTES_PER_CUSTODY_CHUNK)]
chunks[-1] = chunks[-1].ljust(spec.BYTES_PER_CUSTODY_CHUNK, b"\0")
return chunks
return [ByteVector[spec.BYTES_PER_CUSTODY_CHUNK](c) for c in chunks]


def build_proof(anchor, leaf_index):
Expand Down Expand Up @@ -149,12 +149,14 @@ def get_valid_custody_chunk_response(spec, state, chunk_challenge, challenge_ind

chunk_index = chunk_challenge.chunk_index

data_branch = build_proof(custody_data_block.get_backing().get_left(), chunk_index + 2**spec.CUSTODY_RESPONSE_DEPTH)
leaf_index = chunk_index + 2**spec.CUSTODY_RESPONSE_DEPTH
serialized_length = (len(custody_data_block)).to_bytes(32, 'little')
hwwhww marked this conversation as resolved.
Show resolved Hide resolved
data_branch = build_proof(custody_data_block.get_backing().get_left(), leaf_index) + [serialized_length]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we still need the backing from remerkleable?

What exactly does get_left() give you here?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

afaik the get_right() is the mix-in length and the left side is the block data.
@protolambda 's example here: https://gist.github.com/protolambda/8384b1305613d064ba962214d08396f0

We can use eth2spec.utils.merkle_minimal.calc_merkle_tree_from_leaves and get_merkle_proof to create the proof too. Hmm, if we want to include proof generation in v-guide, using get_merkle_proof helper would be simpler. What do you think?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In the validator guide, I think we should be explicit and not use underlying library structure functions.
As for the spec tests, I'm happy to use the remerkleable backings because they are probably much more efficient


return spec.CustodyChunkResponse(
challenge_index=challenge_index,
chunk_index=chunk_index,
chunk=ByteVector[spec.BYTES_PER_CUSTODY_CHUNK](chunks[chunk_index]),
chunk=chunks[chunk_index],
branch=data_branch,
)

Expand All @@ -165,7 +167,7 @@ def get_custody_test_vector(bytelength, offset=0):


def get_sample_shard_transition(spec, start_slot, block_lengths):
b = [spec.get_block_data_merkle_root(ByteList[spec.MAX_SHARD_BLOCK_SIZE](get_custody_test_vector(x)))
b = [spec.hash_tree_root(ByteList[spec.MAX_SHARD_BLOCK_SIZE](get_custody_test_vector(x)))
for x in block_lengths]
shard_transition = spec.ShardTransition(
start_slot=start_slot,
Expand Down Expand Up @@ -200,5 +202,5 @@ def get_custody_slashable_shard_transition(spec, start_slot, block_lengths, cust
slashable_test_vector = get_custody_slashable_test_vector(spec, custody_secret,
block_lengths[0], slashable=slashable)
block_data = ByteList[spec.MAX_SHARD_BLOCK_SIZE](slashable_test_vector)
shard_transition.shard_data_roots[0] = spec.get_block_data_merkle_root(block_data)
shard_transition.shard_data_roots[0] = spec.hash_tree_root(block_data)
return shard_transition, slashable_test_vector
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,35 @@ def test_custody_response(spec, state):
yield from run_custody_chunk_response_processing(spec, state, custody_response)


@with_all_phases_except([PHASE0])
@spec_state_test
def test_custody_response_chunk_index_2(spec, state):
transition_to(spec, state, state.slot + spec.SLOTS_PER_EPOCH)

shard = 0
offset_slots = spec.get_offset_slots(state, shard)
shard_transition = get_sample_shard_transition(spec, state.slot, [2**15 // 3] * len(offset_slots))
attestation = get_valid_on_time_attestation(spec, state, index=shard, signed=True,
shard_transition=shard_transition)

transition_to(spec, state, state.slot + spec.MIN_ATTESTATION_INCLUSION_DELAY)

_, _, _ = run_attestation_processing(spec, state, attestation)

transition_to(spec, state, state.slot + spec.SLOTS_PER_EPOCH * (spec.EPOCHS_PER_CUSTODY_PERIOD - 1))

challenge = get_valid_chunk_challenge(spec, state, attestation, shard_transition, chunk_index=2)

_, _, _ = run_chunk_challenge_processing(spec, state, challenge)

chunk_challenge_index = state.custody_chunk_challenge_index - 1

custody_response = get_valid_custody_chunk_response(
spec, state, challenge, chunk_challenge_index, block_length_or_custody_data=2**15 // 3)

yield from run_custody_chunk_response_processing(spec, state, custody_response)


@with_all_phases_except([PHASE0])
@spec_state_test
def test_custody_response_multiple_epochs(spec, state):
Expand Down