Skip to content

Commit

Permalink
bump nim-ssz-serialization to 763968076821a0f59532442c7c0c97181999a6cc
Browse files Browse the repository at this point in the history
This updates `nim-ssz-serialization` to
`763968076821a0f59532442c7c0c97181999a6cc`.

Notable changes:
- Integrate merkle proof code from `nimbus-eth2`
- Add support for building merkle multiproofs
  • Loading branch information
etan-status committed Jan 3, 2022
1 parent 35b4b61 commit 93d8af6
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 498 deletions.
4 changes: 2 additions & 2 deletions .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -211,9 +211,9 @@
url = https://github.com/status-im/nim-taskpools
[submodule "vendor/nim-ssz-serialization"]
path = vendor/nim-ssz-serialization
url = https://github.com/status-im/nim-ssz-serialization.git
url = https://github.com/etan-status/nim-ssz-serialization.git
ignore = untracked
branch = master
branch = merkle-multiproof
[submodule "vendor/nim-websock"]
path = vendor/nim-websock
url = https://github.com/status-im/nim-websock.git
Expand Down
9 changes: 2 additions & 7 deletions AllTests-mainnet.md
Original file line number Diff line number Diff line change
Expand Up @@ -298,14 +298,9 @@ OK: 1/1 Fail: 0/1 Skip: 0/1
## Spec helpers
```diff
+ build_proof - BeaconState OK
+ get_branch_indices OK
+ get_helper_indices OK
+ get_path_indices OK
+ integer_squareroot OK
+ is_valid_merkle_branch OK
+ verify_merkle_multiproof OK
```
OK: 7/7 Fail: 0/7 Skip: 0/7
OK: 2/2 Fail: 0/2 Skip: 0/2
## Specific field types
```diff
+ root update OK
Expand Down Expand Up @@ -401,4 +396,4 @@ OK: 1/1 Fail: 0/1 Skip: 0/1
OK: 1/1 Fail: 0/1 Skip: 0/1

---TOTAL---
OK: 217/219 Fail: 0/219 Skip: 2/219
OK: 212/214 Fail: 0/214 Skip: 2/214
6 changes: 3 additions & 3 deletions beacon_chain/spec/eth2_merkleization.nim
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# beacon_chain
# Copyright (c) 2018-2021 Status Research & Development GmbH
# Copyright (c) 2018-2022 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
Expand All @@ -10,11 +10,11 @@
# Import this module to get access to `hash_tree_root` for spec types

import
ssz_serialization/merkleization,
ssz_serialization/[merkleization, proofs],
./ssz_codec,
./datatypes/[phase0, altair]

export ssz_codec, merkleization
export ssz_codec, merkleization, proofs

func hash_tree_root*(x: phase0.HashedBeaconState | altair.HashedBeaconState) {.
error: "HashedBeaconState should not be hashed".}
Expand Down
313 changes: 2 additions & 311 deletions beacon_chain/spec/helpers.nim
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# beacon_chain
# Copyright (c) 2018-2021 Status Research & Development GmbH
# Copyright (c) 2018-2022 Status Research & Development GmbH
# Licensed and distributed under either of
# * MIT license (license terms in the root directory or at https://opensource.org/licenses/MIT).
# * Apache v2 license (license terms in the root directory or at https://www.apache.org/licenses/LICENSE-2.0).
Expand All @@ -11,7 +11,7 @@

import
# Standard lib
std/[algorithm, math, sequtils, sets, tables],
std/[algorithm, math, sets, tables],
# Status libraries
stew/[byteutils, endians2, bitops2],
chronicles,
Expand Down Expand Up @@ -48,310 +48,6 @@ template epoch*(slot: Slot): Epoch =
template isEpoch*(slot: Slot): bool =
(slot mod SLOTS_PER_EPOCH) == 0

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#generalized_index_sibling
template generalized_index_sibling*(
index: GeneralizedIndex): GeneralizedIndex =
index xor 1.GeneralizedIndex

template generalized_index_sibling_left(
index: GeneralizedIndex): GeneralizedIndex =
index and not 1.GeneralizedIndex

template generalized_index_sibling_right(
index: GeneralizedIndex): GeneralizedIndex =
index or 1.GeneralizedIndex

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#generalized_index_parent
template generalized_index_parent*(
index: GeneralizedIndex): GeneralizedIndex =
index shr 1

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#merkle-multiproofs
iterator get_branch_indices*(
tree_index: GeneralizedIndex): GeneralizedIndex =
## Get the generalized indices of the sister chunks along the path
## from the chunk with the given tree index to the root.
var index = tree_index
while index > 1.GeneralizedIndex:
yield generalized_index_sibling(index)
index = generalized_index_parent(index)

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#merkle-multiproofs
iterator get_path_indices*(
tree_index: GeneralizedIndex): GeneralizedIndex =
## Get the generalized indices of the chunks along the path
## from the chunk with the given tree index to the root.
var index = tree_index
while index > 1.GeneralizedIndex:
yield index
index = generalized_index_parent(index)

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#merkle-multiproofs
func get_helper_indices*(
indices: openArray[GeneralizedIndex]): seq[GeneralizedIndex] =
## Get the generalized indices of all "extra" chunks in the tree needed
## to prove the chunks with the given generalized indices. Note that the
## decreasing order is chosen deliberately to ensure equivalence to the order
## of hashes in a regular single-item Merkle proof in the single-item case.
var all_helper_indices = initHashSet[GeneralizedIndex]()
for index in indices:
for idx in get_branch_indices(index):
all_helper_indices.incl idx
for index in indices:
for idx in get_path_indices(index):
all_helper_indices.excl idx

var res = newSeqOfCap[GeneralizedIndex](all_helper_indices.len)
for idx in all_helper_indices:
res.add idx
res.sort(SortOrder.Descending)
res

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#merkle-multiproofs
func check_multiproof_acceptable*(
indices: openArray[GeneralizedIndex]): Result[void, string] =
# Check that proof verification won't allocate excessive amounts of memory.
const max_multiproof_complexity = nextPowerOfTwo(256)
if indices.len > max_multiproof_complexity:
trace "Max multiproof complexity exceeded",
num_indices=indices.len, max_multiproof_complexity
return err("Unsupported multiproof complexity (" & $indices.len & ")")

if indices.len == 0:
return err("No indices specified")
if indices.anyIt(it == 0.GeneralizedIndex):
return err("Invalid index specified")
ok()

func calculate_multi_merkle_root_impl(
leaves: openArray[Eth2Digest],
proof: openArray[Eth2Digest],
indices: openArray[GeneralizedIndex],
helper_indices: openArray[GeneralizedIndex]): Result[Eth2Digest, string] =
# All callers have already verified the checks in check_multiproof_acceptable,
# as well as whether lengths of leaves/indices and proof/helper_indices match.

# Helper to retrieve a value from a table that is statically known to exist.
template getExisting[A, B](t: var Table[A, B], key: A): var B =
try: t[key]
except KeyError: raiseAssert "Unreachable"

# Populate data structure with all leaves.
# This data structure only scales with the number of `leaves`,
# in contrast to the spec one that also scales with the number of `proof`
# items and the number of all intermediate roots, potentially the entire tree.
let capacity = nextPowerOfTwo(leaves.len)
var objects = initTable[GeneralizedIndex, Eth2Digest](capacity)
for i, index in indices:
if objects.mgetOrPut(index, leaves[i]) != leaves[i]:
return err("Conflicting roots for same index")

# Create list with keys of all active nodes that need to be visited.
# This list is sorted in descending order, same as `helper_indices`.
# Pulling from `objects` instead of from `indices` deduplicates the list.
var keys = newSeqOfCap[GeneralizedIndex](objects.len)
for index in objects.keys:
if index > 1.GeneralizedIndex: # For the root, no work needs to be done.
keys.add index
keys.sort(SortOrder.Descending)

# The merkle tree is processed from bottom to top, pulling in helper
# indices from `proof` as needed. During processing, the `keys` list
# may temporarily end up being split into two parts, sorted individually.
# An additional index tracks the current maximum element of the list.
var
completed = 0 # All key indices before this are fully processed.
maxIndex = completed # Index of the list's largest key.
helper = 0 # Helper index from `proof` to be pulled next.

# Processing is done when there are no more keys to process.
while completed < keys.len:
let
k = keys[maxIndex]
sibling = generalized_index_sibling(k)
left = generalized_index_sibling_left(k)
right = generalized_index_sibling_right(k)
parent = generalized_index_parent(k)
parentRight = generalized_index_sibling_right(parent)

# Keys need to be processed in descending order to ensure that intermediate
# roots remain available until they are no longer needed. This ensures that
# conflicting roots are detected in all cases.
keys[maxIndex] =
if not objects.hasKey(k):
# A previous computation did already merge this key with its sibling.
0.GeneralizedIndex
else:
# Compute expected root for parent. This deletes child roots.
# Because the list is sorted in descending order, they are not needed.
let root = withEth2Hash:
if helper < helper_indices.len and helper_indices[helper] == sibling:
# The next proof item is required to form the parent hash.
if sibling == left:
h.update proof[helper].data
h.update objects.getExisting(right).data; objects.del right
else:
h.update objects.getExisting(left).data; objects.del left
h.update proof[helper].data
inc helper
else:
# Both siblings are already known.
h.update objects.getExisting(left).data; objects.del left
h.update objects.getExisting(right).data; objects.del right

# Store parent root, and replace the current list entry with its parent.
if objects.hasKeyOrPut(parent, root):
if objects.getExisting(parent) != root:
return err("Conflicting roots for same index")
0.GeneralizedIndex
elif parent > 1.GeneralizedIndex:
# Note that the list may contain further nodes that are on a layer
# beneath the parent, so this may break the strictly descending order
# of the list. For example, given [12, 9], this will lead to [6, 9].
# This will resolve itself after the additional nodes are processed,
# i.e., [6, 9] -> [6, 4] -> [3, 4] -> [3, 2] -> [1].
parent
else:
0.GeneralizedIndex
if keys[maxIndex] != 0.GeneralizedIndex:
# The list may have been temporarily split up into two parts that are
# individually sorted in descending order. Have to first process further
# nodes until the list is sorted once more.
inc maxIndex

# Determine whether descending sort order has been restored.
let isSorted =
if maxIndex == completed: true
else:
while maxIndex < keys.len and keys[maxIndex] == 0.GeneralizedIndex:
inc maxIndex
maxIndex >= keys.len or keys[maxIndex] <= parentRight
if isSorted:
# List is sorted once more. Reset `maxIndex` to its start.
while completed < keys.len and keys[completed] == 0.GeneralizedIndex:
inc completed
maxIndex = completed

# Proof is guaranteed to provide all info needed to reach the root.
doAssert helper == helper_indices.len
doAssert objects.len == 1
ok(objects.getExisting(1.GeneralizedIndex))

func calculate_multi_merkle_root*(
leaves: openArray[Eth2Digest],
proof: openArray[Eth2Digest],
indices: openArray[GeneralizedIndex],
helper_indices: openArray[GeneralizedIndex]): Result[Eth2Digest, string] =
doAssert proof.len == helper_indices.len
if leaves.len != indices.len:
return err("Length mismatch for leaves and indices")
? check_multiproof_acceptable(indices)
calculate_multi_merkle_root_impl(
leaves, proof, indices, helper_indices)

func calculate_multi_merkle_root*(
leaves: openArray[Eth2Digest],
proof: openArray[Eth2Digest],
indices: openArray[GeneralizedIndex]): Result[Eth2Digest, string] =
if leaves.len != indices.len:
return err("Length mismatch for leaves and indices")
? check_multiproof_acceptable(indices)
calculate_multi_merkle_root_impl(
leaves, proof, indices, get_helper_indices(indices))

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/ssz/merkle-proofs.md#merkle-multiproofs
func verify_merkle_multiproof*(
leaves: openArray[Eth2Digest],
proof: openArray[Eth2Digest],
indices: openArray[GeneralizedIndex],
helper_indices: openArray[GeneralizedIndex],
root: Eth2Digest): bool =
let calc = calculate_multi_merkle_root(leaves, proof, indices, helper_indices)
if calc.isErr: return false
calc.get == root

func verify_merkle_multiproof*(
leaves: openArray[Eth2Digest],
proof: openArray[Eth2Digest],
indices: openArray[GeneralizedIndex],
root: Eth2Digest): bool =
let calc = calculate_multi_merkle_root(leaves, proof, indices)
if calc.isErr: return false
calc.get == root

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/specs/phase0/beacon-chain.md#is_valid_merkle_branch
func is_valid_merkle_branch*(leaf: Eth2Digest, branch: openArray[Eth2Digest],
depth: int, index: uint64,
root: Eth2Digest): bool =
## Check if ``leaf`` at ``index`` verifies against the Merkle ``root`` and
## ``branch``.
var
value = leaf
buf: array[64, byte]

for i in 0 ..< depth:
if (index div (1'u64 shl i)) mod 2 != 0:
buf[0..31] = branch[i].data
buf[32..63] = value.data
else:
buf[0..31] = value.data
buf[32..63] = branch[i].data
value = eth2digest(buf)
value == root

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/tests/core/pyspec/eth2spec/test/helpers/merkle.py#L4-L21
func build_proof_impl(anchor: object, leaf_index: uint64,
proof: var openArray[Eth2Digest]) =
let
bottom_length = nextPow2(typeof(anchor).totalSerializedFields.uint64)
tree_depth = log2trunc(bottom_length)
parent_index =
if leaf_index < bottom_length shl 1:
0'u64
else:
var i = leaf_index
while i >= bottom_length shl 1:
i = i shr 1
i

var
prefix_len = 0
proof_len = log2trunc(leaf_index)
cache = newSeq[Eth2Digest](bottom_length shl 1)
block:
var i = bottom_length
anchor.enumInstanceSerializedFields(fieldNameVar, fieldVar):
if i == parent_index:
when fieldVar is object:
prefix_len = log2trunc(leaf_index) - tree_depth
proof_len -= prefix_len
let
bottom_bits = leaf_index and not (uint64.high shl prefix_len)
prefix_leaf_index = (1'u64 shl prefix_len) + bottom_bits
build_proof_impl(fieldVar, prefix_leaf_index, proof)
else: raiseAssert "Invalid leaf_index"
cache[i] = hash_tree_root(fieldVar)
i += 1
for i in countdown(bottom_length - 1, 1):
cache[i] = withEth2Hash:
h.update cache[i shl 1].data
h.update cache[i shl 1 + 1].data

var i = if parent_index != 0: parent_index
else: leaf_index
doAssert i > 0 and i < bottom_length shl 1
for proof_index in prefix_len ..< prefix_len + proof_len:
let b = (i and 1) != 0
i = i shr 1
proof[proof_index] = if b: cache[i shl 1]
else: cache[i shl 1 + 1]

func build_proof*(anchor: object, leaf_index: uint64,
proof: var openArray[Eth2Digest]) =
doAssert leaf_index > 0
doAssert proof.len == log2trunc(leaf_index)
build_proof_impl(anchor, leaf_index, proof)

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/specs/altair/validator.md#sync-committee
template sync_committee_period*(epoch: Epoch): SyncCommitteePeriod =
(epoch div EPOCHS_PER_SYNC_COMMITTEE_PERIOD).SyncCommitteePeriod
Expand Down Expand Up @@ -508,11 +204,6 @@ func has_flag*(flags: ParticipationFlags, flag_index: int): bool =
let flag = ParticipationFlags(1'u8 shl flag_index)
(flags and flag) == flag

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/specs/altair/sync-protocol.md#get_subtree_index
func get_subtree_index*(idx: GeneralizedIndex): uint64 =
doAssert idx > 0
uint64(idx mod (type(idx)(1) shl log2trunc(idx)))

# https://github.com/ethereum/consensus-specs/blob/v1.1.6/specs/merge/beacon-chain.md#is_merge_transition_complete
func is_merge_transition_complete*(state: merge.BeaconState): bool =
state.latest_execution_payload_header != default(ExecutionPayloadHeader)
Expand Down
Loading

0 comments on commit 93d8af6

Please sign in to comment.