Skip to content

Commit

Permalink
[FAB-1700] Determinsitic BlockData hashing
Browse files Browse the repository at this point in the history
https://jira.hyperledger.org/browse/FAB-1700

This is a companion item to

https://jira.hyperledger.org/browse/FAB-1699

It removes the protobuf serialization of the BlockData in favor a simple
concatenation of the byte contents.  Eventually, this should be encoded
as a Merkle tree at the configured width, but, like with our hashing
algorithm, for v1, we will make it configurable at a later date, see:

https://jira.hyperledger.org/browse/FAB-1983

Change-Id: I8a09e84f1c35d8fe1269339a443c44ba01b841a8
Signed-off-by: Jason Yellick <[email protected]>
  • Loading branch information
Jason Yellick committed Feb 1, 2017
1 parent 7104614 commit 25c888d
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 9 deletions.
5 changes: 3 additions & 2 deletions common/chainconfig/chainconfig.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package chainconfig

import (
"fmt"
"math"

"github.com/hyperledger/fabric/common/util"
cb "github.com/hyperledger/fabric/protos/common"
Expand Down Expand Up @@ -144,8 +145,8 @@ func (pm *DescriptorImpl) ProposeConfig(configItem *cb.ConfigurationItem) error
return fmt.Errorf("Unmarshaling error for BlockDataHashingStructure: %s", err)
}

if blockDataHashingStructure.Width == 0 {
return fmt.Errorf("BlockDataHashStructure width must not be zero")
if blockDataHashingStructure.Width != math.MaxUint32 {
return fmt.Errorf("BlockDataHashStructure width only supported at MaxUint32 in this version")
}

pm.pendingConfig.blockDataHashingStructureWidth = blockDataHashingStructure.Width
Expand Down
16 changes: 9 additions & 7 deletions protos/common/block.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
"fmt"
"math"

"github.com/golang/protobuf/proto"
"github.com/hyperledger/fabric/common/util"
)

Expand Down Expand Up @@ -75,12 +74,15 @@ func (b *BlockHeader) Hash() []byte {
return util.ComputeCryptoHash(b.Bytes())
}

// Bytes returns a deterministically serialized version of the BlockData
// eventually, this should be replaced with a true Merkle tree construction,
// but for the moment, we assume a Merkle tree of infinite width (uint32_max)
// which degrades to a flat hash
func (b *BlockData) Bytes() []byte {
return util.ConcatenateBytes(b.Data...)
}

// Hash returns the hash of the marshaled representation of the block data.
func (b *BlockData) Hash() []byte {
data, err := proto.Marshal(b) // XXX this is wrong, protobuf is not the right mechanism to serialize for a hash, AND, it is not a MerkleTree hash
if err != nil {
panic("This should never fail and is generally irrecoverable")
}

return util.ComputeCryptoHash(data)
return util.ComputeCryptoHash(b.Bytes())
}

0 comments on commit 25c888d

Please sign in to comment.