From 9890339d8a8d48fecf0ffaee9bc47f98dc17cf3d Mon Sep 17 00:00:00 2001 From: james Date: Tue, 28 Nov 2023 13:19:21 +0800 Subject: [PATCH 01/15] add diff manager module --- consensus/difficultymanager/blockwindow.go | 102 +++++ .../difficultymanager/difficultymanager.go | 213 +++++++++++ .../difficultymanager_test.go | 358 ++++++++++++++++++ consensus/difficultymanager/hashrate.go | 74 ++++ 4 files changed, 747 insertions(+) create mode 100644 consensus/difficultymanager/blockwindow.go create mode 100644 consensus/difficultymanager/difficultymanager.go create mode 100644 consensus/difficultymanager/difficultymanager_test.go create mode 100644 consensus/difficultymanager/hashrate.go diff --git a/consensus/difficultymanager/blockwindow.go b/consensus/difficultymanager/blockwindow.go new file mode 100644 index 00000000..8c66eb7a --- /dev/null +++ b/consensus/difficultymanager/blockwindow.go @@ -0,0 +1,102 @@ +package difficultymanager + +import ( + "github.com/kaspanet/kaspad/domain/consensus/model" + "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" + "github.com/kaspanet/kaspad/util/difficulty" + "math" + "math/big" +) + +type difficultyBlock struct { + timeInMilliseconds int64 + Bits uint32 + hash *externalapi.DomainHash + blueWork *big.Int +} + +type blockWindow []difficultyBlock + +func (dm *difficultyManager) getDifficultyBlock( + stagingArea *model.StagingArea, blockHash *externalapi.DomainHash) (difficultyBlock, error) { + + header, err := dm.headerStore.BlockHeader(dm.databaseContext, stagingArea, blockHash) + if err != nil { + return difficultyBlock{}, err + } + return difficultyBlock{ + timeInMilliseconds: header.TimeInMilliseconds(), + Bits: header.Bits(), + hash: blockHash, + blueWork: header.BlueWork(), + }, nil +} + +// blockWindow returns a blockWindow of the given size that contains the +// blocks in the past of startingNode, the sorting is unspecified. +// If the number of blocks in the past of startingNode is less then windowSize, +// the window will be padded by genesis blocks to achieve a size of windowSize. +func (dm *difficultyManager) blockWindow(stagingArea *model.StagingArea, startingNode *externalapi.DomainHash, windowSize int) (blockWindow, + []*externalapi.DomainHash, error) { + + window := make(blockWindow, 0, windowSize) + windowHashes, err := dm.dagTraversalManager.BlockWindow(stagingArea, startingNode, windowSize) + if err != nil { + return nil, nil, err + } + + for _, hash := range windowHashes { + block, err := dm.getDifficultyBlock(stagingArea, hash) + if err != nil { + return nil, nil, err + } + window = append(window, block) + } + return window, windowHashes, nil +} + +func ghostdagLess(blockA *difficultyBlock, blockB *difficultyBlock) bool { + switch blockA.blueWork.Cmp(blockB.blueWork) { + case -1: + return true + case 1: + return false + case 0: + return blockA.hash.Less(blockB.hash) + default: + panic("big.Int.Cmp is defined to always return -1/1/0 and nothing else") + } +} + +func (window blockWindow) minMaxTimestamps() (min, max int64, minIndex int) { + min = math.MaxInt64 + minIndex = 0 + max = 0 + for i, block := range window { + // If timestamps are equal we ghostdag compare in order to reach consensus on `minIndex` + if block.timeInMilliseconds < min || + (block.timeInMilliseconds == min && ghostdagLess(&block, &window[minIndex])) { + min = block.timeInMilliseconds + minIndex = i + } + if block.timeInMilliseconds > max { + max = block.timeInMilliseconds + } + } + return +} + +func (window *blockWindow) remove(n int) { + (*window)[n] = (*window)[len(*window)-1] + *window = (*window)[:len(*window)-1] +} + +func (window blockWindow) averageTarget() *big.Int { + averageTarget := new(big.Int) + targetTmp := new(big.Int) + for _, block := range window { + difficulty.CompactToBigWithDestination(block.Bits, targetTmp) + averageTarget.Add(averageTarget, targetTmp) + } + return averageTarget.Div(averageTarget, big.NewInt(int64(len(window)))) +} diff --git a/consensus/difficultymanager/difficultymanager.go b/consensus/difficultymanager/difficultymanager.go new file mode 100644 index 00000000..1838729f --- /dev/null +++ b/consensus/difficultymanager/difficultymanager.go @@ -0,0 +1,213 @@ +package difficultymanager + +import ( + "math/big" + "time" + + "github.com/kaspanet/kaspad/infrastructure/logger" + "github.com/kaspanet/kaspad/util/math" + + "github.com/kaspanet/kaspad/util/difficulty" + + "github.com/kaspanet/kaspad/domain/consensus/model" + "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" +) + +// DifficultyManager provides a method to resolve the +// difficulty value of a block +type difficultyManager struct { + databaseContext model.DBReader + ghostdagManager model.GHOSTDAGManager + ghostdagStore model.GHOSTDAGDataStore + headerStore model.BlockHeaderStore + daaBlocksStore model.DAABlocksStore + dagTopologyManager model.DAGTopologyManager + dagTraversalManager model.DAGTraversalManager + genesisHash *externalapi.DomainHash + powMax *big.Int + difficultyAdjustmentWindowSize int + disableDifficultyAdjustment bool + targetTimePerBlock time.Duration + genesisBits uint32 +} + +// New instantiates a new DifficultyManager +func New(databaseContext model.DBReader, + ghostdagManager model.GHOSTDAGManager, + ghostdagStore model.GHOSTDAGDataStore, + headerStore model.BlockHeaderStore, + daaBlocksStore model.DAABlocksStore, + dagTopologyManager model.DAGTopologyManager, + dagTraversalManager model.DAGTraversalManager, + powMax *big.Int, + difficultyAdjustmentWindowSize int, + disableDifficultyAdjustment bool, + targetTimePerBlock time.Duration, + genesisHash *externalapi.DomainHash, + genesisBits uint32) model.DifficultyManager { + return &difficultyManager{ + databaseContext: databaseContext, + ghostdagManager: ghostdagManager, + ghostdagStore: ghostdagStore, + headerStore: headerStore, + daaBlocksStore: daaBlocksStore, + dagTopologyManager: dagTopologyManager, + dagTraversalManager: dagTraversalManager, + powMax: powMax, + difficultyAdjustmentWindowSize: difficultyAdjustmentWindowSize, + disableDifficultyAdjustment: disableDifficultyAdjustment, + targetTimePerBlock: targetTimePerBlock, + genesisHash: genesisHash, + genesisBits: genesisBits, + } +} + +// StageDAADataAndReturnRequiredDifficulty calculates the DAA window, stages the DAA score and DAA added +// blocks, and returns the required difficulty for the given block. +// The reason this function both stages DAA data and returns the difficulty is because in order to calculate +// both of them we need to calculate the DAA window, which is a relatively heavy operation, so we reuse the +// block window instead of recalculating it for the two purposes. +// For cases where no staging should happen and the caller only needs to know the difficulty he should +// use RequiredDifficulty. +func (dm *difficultyManager) StageDAADataAndReturnRequiredDifficulty( + stagingArea *model.StagingArea, + blockHash *externalapi.DomainHash, + isBlockWithTrustedData bool) (uint32, error) { + + onEnd := logger.LogAndMeasureExecutionTime(log, "StageDAADataAndReturnRequiredDifficulty") + defer onEnd() + + targetsWindow, windowHashes, err := dm.blockWindow(stagingArea, blockHash, dm.difficultyAdjustmentWindowSize) + if err != nil { + return 0, err + } + + err = dm.stageDAAScoreAndAddedBlocks(stagingArea, blockHash, windowHashes, isBlockWithTrustedData) + if err != nil { + return 0, err + } + + return dm.requiredDifficultyFromTargetsWindow(targetsWindow) +} + +// RequiredDifficulty returns the difficulty required for some block +func (dm *difficultyManager) RequiredDifficulty(stagingArea *model.StagingArea, blockHash *externalapi.DomainHash) (uint32, error) { + targetsWindow, _, err := dm.blockWindow(stagingArea, blockHash, dm.difficultyAdjustmentWindowSize) + if err != nil { + return 0, err + } + + return dm.requiredDifficultyFromTargetsWindow(targetsWindow) +} + +func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow blockWindow) (uint32, error) { + if dm.disableDifficultyAdjustment { + return dm.genesisBits, nil + } + + // in the past this was < 2 as the comment explains, we changed it to under the window size to + // make the hashrate(which is ~1.5GH/s) constant in the first 2641 blocks so that we won't have a lot of tips + + // We need at least 2 blocks to get a timestamp interval + // We could instead clamp the timestamp difference to `targetTimePerBlock`, + // but then everything will cancel out and we'll get the target from the last block, which will be the same as genesis. + // We add 64 as a safety margin + if len(targetsWindow) < 2 || len(targetsWindow) < dm.difficultyAdjustmentWindowSize { + return dm.genesisBits, nil + } + + windowMinTimestamp, windowMaxTimeStamp, windowMinIndex := targetsWindow.minMaxTimestamps() + // Remove the last block from the window so to calculate the average target of dag.difficultyAdjustmentWindowSize blocks + targetsWindow.remove(windowMinIndex) + + // Calculate new target difficulty as: + // averageWindowTarget * (windowMinTimestamp / (targetTimePerBlock * windowSize)) + // The result uses integer division which means it will be slightly + // rounded down. + div := new(big.Int) + newTarget := targetsWindow.averageTarget() + newTarget. + // We need to clamp the timestamp difference to 1 so that we'll never get a 0 target. + Mul(newTarget, div.SetInt64(math.MaxInt64(windowMaxTimeStamp-windowMinTimestamp, 1))). + Div(newTarget, div.SetInt64(dm.targetTimePerBlock.Milliseconds())). + Div(newTarget, div.SetUint64(uint64(len(targetsWindow)))) + if newTarget.Cmp(dm.powMax) > 0 { + return difficulty.BigToCompact(dm.powMax), nil + } + newTargetBits := difficulty.BigToCompact(newTarget) + return newTargetBits, nil +} + +func (dm *difficultyManager) stageDAAScoreAndAddedBlocks(stagingArea *model.StagingArea, + blockHash *externalapi.DomainHash, + windowHashes []*externalapi.DomainHash, + isBlockWithTrustedData bool) error { + + onEnd := logger.LogAndMeasureExecutionTime(log, "stageDAAScoreAndAddedBlocks") + defer onEnd() + + daaScore, addedBlocks, err := dm.calculateDaaScoreAndAddedBlocks(stagingArea, blockHash, windowHashes, isBlockWithTrustedData) + if err != nil { + return err + } + + dm.daaBlocksStore.StageDAAScore(stagingArea, blockHash, daaScore) + dm.daaBlocksStore.StageBlockDAAAddedBlocks(stagingArea, blockHash, addedBlocks) + return nil +} + +func (dm *difficultyManager) calculateDaaScoreAndAddedBlocks(stagingArea *model.StagingArea, + blockHash *externalapi.DomainHash, + windowHashes []*externalapi.DomainHash, + isBlockWithTrustedData bool) (uint64, []*externalapi.DomainHash, error) { + + if blockHash.Equal(dm.genesisHash) { + genesisHeader, err := dm.headerStore.BlockHeader(dm.databaseContext, stagingArea, dm.genesisHash) + if err != nil { + return 0, nil, err + } + return genesisHeader.DAAScore(), nil, nil + } + + ghostdagData, err := dm.ghostdagStore.Get(dm.databaseContext, stagingArea, blockHash, false) + if err != nil { + return 0, nil, err + } + mergeSetLength := len(ghostdagData.MergeSetBlues()) + len(ghostdagData.MergeSetReds()) + mergeSet := make(map[externalapi.DomainHash]struct{}, mergeSetLength) + for _, hash := range ghostdagData.MergeSetBlues() { + mergeSet[*hash] = struct{}{} + } + + for _, hash := range ghostdagData.MergeSetReds() { + mergeSet[*hash] = struct{}{} + } + + // TODO: Consider optimizing by breaking the loop once you arrive to the + // window block with blue work higher than all non-added merge set blocks. + daaAddedBlocks := make([]*externalapi.DomainHash, 0, len(mergeSet)) + for _, hash := range windowHashes { + if _, exists := mergeSet[*hash]; exists { + daaAddedBlocks = append(daaAddedBlocks, hash) + if len(daaAddedBlocks) == len(mergeSet) { + break + } + } + } + + var daaScore uint64 + if isBlockWithTrustedData { + daaScore, err = dm.daaBlocksStore.DAAScore(dm.databaseContext, stagingArea, blockHash) + if err != nil { + return 0, nil, err + } + } else { + selectedParentDAAScore, err := dm.daaBlocksStore.DAAScore(dm.databaseContext, stagingArea, ghostdagData.SelectedParent()) + if err != nil { + return 0, nil, err + } + daaScore = selectedParentDAAScore + uint64(len(daaAddedBlocks)) + } + + return daaScore, daaAddedBlocks, nil +} diff --git a/consensus/difficultymanager/difficultymanager_test.go b/consensus/difficultymanager/difficultymanager_test.go new file mode 100644 index 00000000..fe6cb588 --- /dev/null +++ b/consensus/difficultymanager/difficultymanager_test.go @@ -0,0 +1,358 @@ +package difficultymanager_test + +import ( + "testing" + "time" + + "github.com/kaspanet/kaspad/util/difficulty" + + "github.com/kaspanet/kaspad/util/mstime" + + "github.com/kaspanet/kaspad/domain/consensus/utils/consensushashing" + + "github.com/kaspanet/kaspad/domain/consensus" + "github.com/kaspanet/kaspad/domain/consensus/model" + "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" + "github.com/kaspanet/kaspad/domain/consensus/utils/testutils" + "github.com/kaspanet/kaspad/domain/dagconfig" +) + +func TestDifficulty(t *testing.T) { + testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { + if consensusConfig.DisableDifficultyAdjustment { + return + } + // This test generates 3066 blocks above genesis with at least 1 second between each block, amounting to + // a bit less then an hour of timestamps. + // To prevent rejected blocks due to timestamps in the future, the following safeguard makes sure + // the genesis block is at least 1 hour in the past. + if consensusConfig.GenesisBlock.Header.TimeInMilliseconds() > mstime.ToMSTime(time.Now().Add(-time.Hour)).UnixMilliseconds() { + t.Fatalf("TestDifficulty requires the GenesisBlock to be at least 1 hour old to pass") + } + + consensusConfig.K = 1 + consensusConfig.DifficultyAdjustmentWindowSize = 140 + + factory := consensus.NewFactory() + tc, teardown, err := factory.NewTestConsensus(consensusConfig, "TestDifficulty") + if err != nil { + t.Fatalf("Error setting up consensus: %+v", err) + } + defer teardown(false) + + stagingArea := model.NewStagingArea() + + addBlock := func(blockTime int64, parents ...*externalapi.DomainHash) (*externalapi.DomainBlock, *externalapi.DomainHash) { + bluestParent, err := tc.GHOSTDAGManager().ChooseSelectedParent(stagingArea, parents...) + if err != nil { + t.Fatalf("ChooseSelectedParent: %+v", err) + } + + if blockTime == 0 { + header, err := tc.BlockHeaderStore().BlockHeader(tc.DatabaseContext(), stagingArea, bluestParent) + if err != nil { + t.Fatalf("BlockHeader: %+v", err) + } + + blockTime = header.TimeInMilliseconds() + consensusConfig.TargetTimePerBlock.Milliseconds() + } + + block, _, err := tc.BuildBlockWithParents(parents, nil, nil) + if err != nil { + t.Fatalf("BuildBlockWithParents: %+v", err) + } + + newHeader := block.Header.ToMutable() + newHeader.SetTimeInMilliseconds(blockTime) + block.Header = newHeader.ToImmutable() + err = tc.ValidateAndInsertBlock(block, true) + if err != nil { + t.Fatalf("ValidateAndInsertBlock: %+v", err) + } + + return block, consensushashing.BlockHash(block) + } + + minimumTime := func(parents ...*externalapi.DomainHash) int64 { + var tempHash externalapi.DomainHash + stagingArea := model.NewStagingArea() + tc.BlockRelationStore().StageBlockRelation(stagingArea, &tempHash, &model.BlockRelations{ + Parents: parents, + Children: nil, + }) + + err = tc.GHOSTDAGManager().GHOSTDAG(stagingArea, &tempHash) + if err != nil { + t.Fatalf("GHOSTDAG: %+v", err) + } + + pastMedianTime, err := tc.PastMedianTimeManager().PastMedianTime(stagingArea, &tempHash) + if err != nil { + t.Fatalf("PastMedianTime: %+v", err) + } + + return pastMedianTime + 1 + } + + addBlockWithMinimumTime := func(parents ...*externalapi.DomainHash) (*externalapi.DomainBlock, *externalapi.DomainHash) { + minTime := minimumTime(parents...) + return addBlock(minTime, parents...) + } + + tipHash := consensusConfig.GenesisHash + tip := consensusConfig.GenesisBlock + for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize; i++ { + tip, tipHash = addBlock(0, tipHash) + if tip.Header.Bits() != consensusConfig.GenesisBlock.Header.Bits() { + t.Fatalf("As long as the block blue score is less then the difficulty adjustment " + + "window size, the difficulty should be the same as genesis'") + } + } + for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize+10; i++ { + tip, tipHash = addBlock(0, tipHash) + if tip.Header.Bits() != consensusConfig.GenesisBlock.Header.Bits() { + t.Fatalf("As long as the block rate remains the same, the difficulty shouldn't change") + } + } + + blockInThePast, tipHash := addBlockWithMinimumTime(tipHash) + if blockInThePast.Header.Bits() != tip.Header.Bits() { + t.Fatalf("The difficulty should only change when blockInThePast is in the past of a block") + } + tip = blockInThePast + + tip, tipHash = addBlock(0, tipHash) + if compareBits(tip.Header.Bits(), blockInThePast.Header.Bits()) >= 0 { + t.Fatalf("tip.bits should be smaller than blockInThePast.bits because blockInThePast increased the " + + "block rate, so the difficulty should increase as well") + } + + var expectedBits uint32 + switch consensusConfig.Name { + case dagconfig.TestnetParams.Name: + expectedBits = uint32(0x1e7f1441) + case dagconfig.DevnetParams.Name: + expectedBits = uint32(0x1f4e54ab) + case dagconfig.MainnetParams.Name: + expectedBits = uint32(0x1d02c50f) + } + + if tip.Header.Bits() != expectedBits { + t.Errorf("tip.bits was expected to be %x but got %x", expectedBits, tip.Header.Bits()) + } + + // Increase block rate to increase difficulty + for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize; i++ { + tip, tipHash = addBlockWithMinimumTime(tipHash) + tipGHOSTDAGData, err := tc.GHOSTDAGDataStore().Get(tc.DatabaseContext(), stagingArea, tipHash, false) + if err != nil { + t.Fatalf("GHOSTDAGDataStore: %+v", err) + } + + selectedParentHeader, err := + tc.BlockHeaderStore().BlockHeader(tc.DatabaseContext(), stagingArea, tipGHOSTDAGData.SelectedParent()) + if err != nil { + t.Fatalf("BlockHeader: %+v", err) + } + + if compareBits(tip.Header.Bits(), selectedParentHeader.Bits()) > 0 { + t.Fatalf("Because we're increasing the block rate, the difficulty can't decrease") + } + } + + // Add blocks until difficulty stabilizes + lastBits := tip.Header.Bits() + sameBitsCount := 0 + for sameBitsCount < consensusConfig.DifficultyAdjustmentWindowSize+1 { + tip, tipHash = addBlock(0, tipHash) + if tip.Header.Bits() == lastBits { + sameBitsCount++ + } else { + lastBits = tip.Header.Bits() + sameBitsCount = 0 + } + } + + slowBlockTime := tip.Header.TimeInMilliseconds() + consensusConfig.TargetTimePerBlock.Milliseconds() + 1000 + slowBlock, tipHash := addBlock(slowBlockTime, tipHash) + if slowBlock.Header.Bits() != tip.Header.Bits() { + t.Fatalf("The difficulty should only change when slowBlock is in the past of a block") + } + + tip = slowBlock + + tip, tipHash = addBlock(0, tipHash) + if compareBits(tip.Header.Bits(), slowBlock.Header.Bits()) <= 0 { + t.Fatalf("tip.bits should be smaller than slowBlock.bits because slowBlock decreased the block" + + " rate, so the difficulty should decrease as well") + } + + // Here we create two chains: a chain of blue blocks, and a chain of red blocks with + // very low timestamps. Because the red blocks should be part of the difficulty + // window, their low timestamps should lower the difficulty, and we check it by + // comparing the bits of two blocks with the same blue score, one with the red + // blocks in its past and one without. + splitBlockHash := tipHash + blueTipHash := splitBlockHash + for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize; i++ { + _, blueTipHash = addBlock(0, blueTipHash) + } + + redChainTipHash := splitBlockHash + const redChainLength = 10 + for i := 0; i < redChainLength; i++ { + _, redChainTipHash = addBlockWithMinimumTime(redChainTipHash) + } + tipWithRedPast, _ := addBlock(0, redChainTipHash, blueTipHash) + tipWithoutRedPast, _ := addBlock(0, blueTipHash) + if tipWithRedPast.Header.Bits() <= tipWithoutRedPast.Header.Bits() { + t.Fatalf("tipWithRedPast.bits should be greater than tipWithoutRedPast.bits because the red blocks" + + " blocks have very low timestamp and should lower the difficulty") + } + + // We repeat the test, but now we make the blue chain longer in order to filter + // out the red blocks from the window, and check that the red blocks don't + // affect the difficulty. + blueTipHash = splitBlockHash + for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize+redChainLength+1; i++ { + _, blueTipHash = addBlock(0, blueTipHash) + } + + redChainTipHash = splitBlockHash + for i := 0; i < redChainLength; i++ { + _, redChainTipHash = addBlockWithMinimumTime(redChainTipHash) + } + tipWithRedPast, _ = addBlock(0, redChainTipHash, blueTipHash) + tipWithoutRedPast, _ = addBlock(0, blueTipHash) + if tipWithRedPast.Header.Bits() != tipWithoutRedPast.Header.Bits() { + t.Fatalf("tipWithoutRedPast.bits should be the same as tipWithRedPast.bits because the red blocks" + + " are not part of the difficulty window") + } + }) +} + +func TestDAAScore(t *testing.T) { + testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { + consensusConfig.DifficultyAdjustmentWindowSize = 86 + + stagingArea := model.NewStagingArea() + + factory := consensus.NewFactory() + tc, teardown, err := factory.NewTestConsensus(consensusConfig, "TestDAAScore") + if err != nil { + t.Fatalf("Error setting up consensus: %+v", err) + } + defer teardown(false) + + // We create a small DAG in order to skip from block with blue score of 1 directly to 3 + split1Hash, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + block, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + blockBlueScore3, _, err := tc.AddBlock([]*externalapi.DomainHash{split1Hash, block}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + tipHash := blockBlueScore3 + blockBlueScore3DAAScore, err := tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) + if err != nil { + t.Fatalf("DAAScore: %+v", err) + } + + blockBlueScore3ExpectedDAAScore := uint64(2) + consensusConfig.GenesisBlock.Header.DAAScore() + if blockBlueScore3DAAScore != blockBlueScore3ExpectedDAAScore { + t.Fatalf("DAA score is expected to be %d but got %d", blockBlueScore3ExpectedDAAScore, blockBlueScore3DAAScore) + } + tipDAAScore := blockBlueScore3ExpectedDAAScore + + for i := uint64(0); i < 10; i++ { + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + tipDAAScore, err = tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) + if err != nil { + t.Fatalf("DAAScore: %+v", err) + } + + expectedDAAScore := blockBlueScore3ExpectedDAAScore + i + 1 + if tipDAAScore != expectedDAAScore { + t.Fatalf("DAA score is expected to be %d but got %d", expectedDAAScore, tipDAAScore) + } + } + + split2Hash := tipHash + split2DAAScore := tipDAAScore + for i := uint64(0); i < uint64(consensusConfig.DifficultyAdjustmentWindowSize)-1; i++ { + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + tipDAAScore, err = tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) + if err != nil { + t.Fatalf("DAAScore: %+v", err) + } + + expectedDAAScore := split2DAAScore + i + 1 + if tipDAAScore != expectedDAAScore { + t.Fatalf("DAA score is expected to be %d but got %d", expectedDAAScore, split2DAAScore) + } + } + + // This block should have blue score of 2 so it shouldn't be added to the DAA window of a merging block + blockAboveSplit1, _, err := tc.AddBlock([]*externalapi.DomainHash{split1Hash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + // This block is in the anticone of consensusConfig.DifficultyAdjustmentWindowSize-1 blocks, so it must be part + // of the DAA window of a merging block + blockAboveSplit2, _, err := tc.AddBlock([]*externalapi.DomainHash{split2Hash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + currentSelectedTipDAAScore := tipDAAScore + currentSelectedTip := tipHash + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{blockAboveSplit1, blockAboveSplit2, tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + tipDAAScore, err = tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) + if err != nil { + t.Fatalf("DAAScore: %+v", err) + } + + // The DAA score should be increased only by 2, because 1 of the 3 merged blocks + // is not in the DAA window + expectedDAAScore := currentSelectedTipDAAScore + 2 + if tipDAAScore != expectedDAAScore { + t.Fatalf("DAA score is expected to be %d but got %d", expectedDAAScore, tipDAAScore) + } + + tipDAAAddedBlocks, err := tc.DAABlocksStore().DAAAddedBlocks(tc.DatabaseContext(), stagingArea, tipHash) + if err != nil { + t.Fatalf("DAAScore: %+v", err) + } + + // blockAboveSplit2 should be excluded from the DAA added blocks because it's not in the tip's + // DAA window. + expectedDAABlocks := []*externalapi.DomainHash{blockAboveSplit2, currentSelectedTip} + if !externalapi.HashesEqual(tipDAAAddedBlocks, expectedDAABlocks) { + t.Fatalf("DAA added blocks are expected to be %s but got %s", expectedDAABlocks, tipDAAAddedBlocks) + } + }) +} + +func compareBits(a uint32, b uint32) int { + aTarget := difficulty.CompactToBig(a) + bTarget := difficulty.CompactToBig(b) + return aTarget.Cmp(bTarget) +} diff --git a/consensus/difficultymanager/hashrate.go b/consensus/difficultymanager/hashrate.go new file mode 100644 index 00000000..658c7be1 --- /dev/null +++ b/consensus/difficultymanager/hashrate.go @@ -0,0 +1,74 @@ +package difficultymanager + +import ( + "math/big" + + "github.com/kaspanet/kaspad/domain/consensus/model" + "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" + "github.com/kaspanet/kaspad/infrastructure/logger" + "github.com/pkg/errors" +) + +func (dm *difficultyManager) EstimateNetworkHashesPerSecond(startHash *externalapi.DomainHash, windowSize int) (uint64, error) { + onEnd := logger.LogAndMeasureExecutionTime(log, "EstimateNetworkHashesPerSecond") + defer onEnd() + + stagingArea := model.NewStagingArea() + return dm.estimateNetworkHashesPerSecond(stagingArea, startHash, windowSize) +} + +func (dm *difficultyManager) estimateNetworkHashesPerSecond(stagingArea *model.StagingArea, + startHash *externalapi.DomainHash, windowSize int) (uint64, error) { + + const minWindowSize = 1000 + if windowSize < minWindowSize { + return 0, errors.Errorf("windowSize must be equal to or greater than %d", minWindowSize) + } + + blockWindow, windowHashes, err := dm.blockWindow(stagingArea, startHash, windowSize) + if err != nil { + return 0, err + } + + // return 0 if no blocks had been mined yet + if len(windowHashes) == 0 { + return 0, nil + } + + minWindowTimestamp, maxWindowTimestamp, _ := blockWindow.minMaxTimestamps() + if minWindowTimestamp == maxWindowTimestamp { + return 0, errors.Errorf("min window timestamp is equal to the max window timestamp") + } + + firstHash := windowHashes[0] + firstBlockGHOSTDAGData, err := dm.ghostdagStore.Get(dm.databaseContext, stagingArea, firstHash, false) + if err != nil { + return 0, err + } + firstBlockBlueWork := firstBlockGHOSTDAGData.BlueWork() + minWindowBlueWork := firstBlockBlueWork + maxWindowBlueWork := firstBlockBlueWork + for _, hash := range windowHashes[1:] { + blockGHOSTDAGData, err := dm.ghostdagStore.Get(dm.databaseContext, stagingArea, hash, false) + if err != nil { + return 0, err + } + blockBlueWork := blockGHOSTDAGData.BlueWork() + if blockBlueWork.Cmp(minWindowBlueWork) < 0 { + minWindowBlueWork = blockBlueWork + } + if blockBlueWork.Cmp(maxWindowBlueWork) > 0 { + maxWindowBlueWork = blockBlueWork + } + } + + windowsDiff := (maxWindowTimestamp - minWindowTimestamp) / 1000 // Divided by 1000 to convert milliseconds to seconds + if windowsDiff == 0 { + return 0, nil + } + + nominator := new(big.Int).Sub(maxWindowBlueWork, minWindowBlueWork) + denominator := big.NewInt(windowsDiff) + networkHashesPerSecondBigInt := new(big.Int).Div(nominator, denominator) + return networkHashesPerSecondBigInt.Uint64(), nil +} From 27e88925c8aa0514f696386019d3a3e522789c03 Mon Sep 17 00:00:00 2001 From: james Date: Wed, 29 Nov 2023 17:15:08 +0800 Subject: [PATCH 02/15] add model --- common/util/math/min.go | 25 + common/util/math/min_test.go | 64 + consensus/dagtraversalmanager/anticone.go | 77 ++ consensus/dagtraversalmanager/block_heap.go | 204 +++ .../dagtraversalmanager.go | 129 ++ .../dagtraversalmanager_test.go | 117 ++ .../selected_child_iterator.go | 109 ++ consensus/dagtraversalmanager/window.go | 200 +++ consensus/dagtraversalmanager/window_test.go | 369 ++++++ consensus/difficultymanager/blockwindow.go | 9 +- .../difficultymanager/difficultymanager.go | 50 +- .../difficultymanager_test.go | 17 +- consensus/difficultymanager/hashrate.go | 4 +- consensus/model/block_heap.go | 12 + consensus/model/blockiterator.go | 11 + consensus/model/database.go | 57 + consensus/model/externalapi/acceptancedata.go | 145 +++ consensus/model/externalapi/block.go | 84 ++ .../externalapi/block_equal_clone_test.go | 499 ++++++++ .../externalapi/block_with_trusted_data.go | 23 + consensus/model/externalapi/blockinfo.go | 37 + .../model/externalapi/blockinfo_clone_test.go | 108 ++ .../model/externalapi/blocklevelparents.go | 63 + consensus/model/externalapi/blocklocator.go | 24 + .../externalapi/blocklocator_clone_test.go | 76 ++ consensus/model/externalapi/blockstatus.go | 49 + .../blockstatus_equal_clone_test.go | 87 ++ consensus/model/externalapi/blocktemplate.go | 19 + consensus/model/externalapi/coinbase.go | 38 + .../model/externalapi/coinbase_clone_test.go | 59 + consensus/model/externalapi/consensus.go | 59 + .../model/externalapi/consensus_events.go | 30 + consensus/model/externalapi/ghostdag.go | 67 + consensus/model/externalapi/hash.go | 123 ++ .../externalapi/hash_clone_equal_test.go | 79 ++ .../model/externalapi/pruning_point_proof.go | 6 + .../model/externalapi/readonlyutxoset.go | 10 + consensus/model/externalapi/subnetworkid.go | 33 + .../subnetworkid_clone_equal_test.go | 99 ++ consensus/model/externalapi/sync.go | 36 + .../externalapi/sync_equal_clone_test.go | 99 ++ consensus/model/externalapi/transaction.go | 363 ++++++ .../transaction_equal_clone_test.go | 1107 +++++++++++++++++ consensus/model/externalapi/utxodiff.go | 32 + consensus/model/externalapi/utxoentry.go | 20 + consensus/model/externalapi/virtual.go | 10 + ...terface_datastructures_blockheaderstore.go | 15 + ...interface_datastructures_daablocksstore.go | 14 + ...erface_datastructures_ghostdagdatastore.go | 12 + .../interface_processes_dagtopologymanager.go | 19 + ...interface_processes_dagtraversalmanager.go | 21 + .../interface_processes_difficultymanager.go | 13 + .../interface_processes_ghostdagmanager.go | 12 + core/blockchain/blockchain.go | 8 +- core/types/pow/diff.go | 57 +- 55 files changed, 5039 insertions(+), 70 deletions(-) create mode 100644 common/util/math/min.go create mode 100644 common/util/math/min_test.go create mode 100644 consensus/dagtraversalmanager/anticone.go create mode 100644 consensus/dagtraversalmanager/block_heap.go create mode 100644 consensus/dagtraversalmanager/dagtraversalmanager.go create mode 100644 consensus/dagtraversalmanager/dagtraversalmanager_test.go create mode 100644 consensus/dagtraversalmanager/selected_child_iterator.go create mode 100644 consensus/dagtraversalmanager/window.go create mode 100644 consensus/dagtraversalmanager/window_test.go create mode 100644 consensus/model/block_heap.go create mode 100644 consensus/model/blockiterator.go create mode 100644 consensus/model/externalapi/acceptancedata.go create mode 100644 consensus/model/externalapi/block.go create mode 100644 consensus/model/externalapi/block_equal_clone_test.go create mode 100644 consensus/model/externalapi/block_with_trusted_data.go create mode 100644 consensus/model/externalapi/blockinfo.go create mode 100644 consensus/model/externalapi/blockinfo_clone_test.go create mode 100644 consensus/model/externalapi/blocklevelparents.go create mode 100644 consensus/model/externalapi/blocklocator.go create mode 100644 consensus/model/externalapi/blocklocator_clone_test.go create mode 100644 consensus/model/externalapi/blockstatus.go create mode 100644 consensus/model/externalapi/blockstatus_equal_clone_test.go create mode 100644 consensus/model/externalapi/blocktemplate.go create mode 100644 consensus/model/externalapi/coinbase.go create mode 100644 consensus/model/externalapi/coinbase_clone_test.go create mode 100644 consensus/model/externalapi/consensus.go create mode 100644 consensus/model/externalapi/consensus_events.go create mode 100644 consensus/model/externalapi/ghostdag.go create mode 100644 consensus/model/externalapi/hash.go create mode 100644 consensus/model/externalapi/hash_clone_equal_test.go create mode 100644 consensus/model/externalapi/pruning_point_proof.go create mode 100644 consensus/model/externalapi/readonlyutxoset.go create mode 100644 consensus/model/externalapi/subnetworkid.go create mode 100644 consensus/model/externalapi/subnetworkid_clone_equal_test.go create mode 100644 consensus/model/externalapi/sync.go create mode 100644 consensus/model/externalapi/sync_equal_clone_test.go create mode 100644 consensus/model/externalapi/transaction.go create mode 100644 consensus/model/externalapi/transaction_equal_clone_test.go create mode 100644 consensus/model/externalapi/utxodiff.go create mode 100644 consensus/model/externalapi/utxoentry.go create mode 100644 consensus/model/externalapi/virtual.go create mode 100644 consensus/model/interface_datastructures_blockheaderstore.go create mode 100644 consensus/model/interface_datastructures_daablocksstore.go create mode 100644 consensus/model/interface_datastructures_ghostdagdatastore.go create mode 100644 consensus/model/interface_processes_dagtopologymanager.go create mode 100644 consensus/model/interface_processes_dagtraversalmanager.go create mode 100644 consensus/model/interface_processes_difficultymanager.go create mode 100644 consensus/model/interface_processes_ghostdagmanager.go diff --git a/common/util/math/min.go b/common/util/math/min.go new file mode 100644 index 00000000..edbeac11 --- /dev/null +++ b/common/util/math/min.go @@ -0,0 +1,25 @@ +package math + +// MinInt returns the smaller of x or y. +func MinInt(x, y int) int { + if x < y { + return x + } + return y +} + +// MaxInt64 returns the bigger of x or y. +func MaxInt64(x, y int64) int64 { + if x > y { + return x + } + return y +} + +// MinUint32 returns the smaller of x or y. +func MinUint32(x, y uint32) uint32 { + if x < y { + return x + } + return y +} diff --git a/common/util/math/min_test.go b/common/util/math/min_test.go new file mode 100644 index 00000000..62d46f4d --- /dev/null +++ b/common/util/math/min_test.go @@ -0,0 +1,64 @@ +package math_test + +import ( + utilMath "github.com/kaspanet/kaspad/util/math" + "math" + "testing" +) + +const ( + MaxInt = int(^uint(0) >> 1) + MinInt = -MaxInt - 1 +) + +func TestMinInt(t *testing.T) { + tests := []struct { + inputs [2]int + expected int + }{ + {[2]int{MaxInt, 0}, 0}, + {[2]int{1, 2}, 1}, + {[2]int{MaxInt, MaxInt}, MaxInt}, + {[2]int{MaxInt, MaxInt - 1}, MaxInt - 1}, + {[2]int{MaxInt, MinInt}, MinInt}, + {[2]int{MinInt, 0}, MinInt}, + {[2]int{MinInt, MinInt}, MinInt}, + {[2]int{0, MinInt + 1}, MinInt + 1}, + {[2]int{0, MinInt}, MinInt}, + } + + for i, test := range tests { + result := utilMath.MinInt(test.inputs[0], test.inputs[1]) + if result != test.expected { + t.Fatalf("%d: Expected %d, instead found: %d", i, test.expected, result) + } + reverseResult := utilMath.MinInt(test.inputs[1], test.inputs[0]) + if result != reverseResult { + t.Fatalf("%d: Expected result and reverseResult to be the same, instead: %d!=%d", i, result, reverseResult) + } + } +} + +func TestMinUint32(t *testing.T) { + tests := []struct { + inputs [2]uint32 + expected uint32 + }{ + {[2]uint32{math.MaxUint32, 0}, 0}, + {[2]uint32{1, 2}, 1}, + {[2]uint32{math.MaxUint32, math.MaxUint32}, math.MaxUint32}, + {[2]uint32{math.MaxUint32, math.MaxUint32 - 1}, math.MaxUint32 - 1}, + } + + for _, test := range tests { + result := utilMath.MinUint32(test.inputs[0], test.inputs[1]) + if result != test.expected { + t.Fatalf("Expected %d, instead found: %d", test.expected, result) + + } + reverseResult := utilMath.MinUint32(test.inputs[1], test.inputs[0]) + if result != reverseResult { + t.Fatalf("Expected result and reverseResult to be the same, instead: %d!=%d", result, reverseResult) + } + } +} diff --git a/consensus/dagtraversalmanager/anticone.go b/consensus/dagtraversalmanager/anticone.go new file mode 100644 index 00000000..21cfb3ce --- /dev/null +++ b/consensus/dagtraversalmanager/anticone.go @@ -0,0 +1,77 @@ +package dagtraversalmanager + +import ( + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/utils/hashset" + "github.com/pkg/errors" +) + +func (dtm *dagTraversalManager) AnticoneFromVirtualPOV(stagingArea *model.StagingArea, blockHash *externalapi.DomainHash) ( + []*externalapi.DomainHash, error) { + + virtualParents, err := dtm.dagTopologyManager.Parents(stagingArea, model.VirtualBlockHash) + if err != nil { + return nil, err + } + + return dtm.AnticoneFromBlocks(stagingArea, virtualParents, blockHash, 0) +} + +func (dtm *dagTraversalManager) AnticoneFromBlocks(stagingArea *model.StagingArea, tips []*externalapi.DomainHash, + blockHash *externalapi.DomainHash, maxTraversalAllowed uint64) ( + []*externalapi.DomainHash, error) { + + anticone := []*externalapi.DomainHash{} + queue := tips + visited := hashset.New() + + traversalCounter := uint64(0) + for len(queue) > 0 { + var current *externalapi.DomainHash + current, queue = queue[0], queue[1:] + + if visited.Contains(current) { + continue + } + + visited.Add(current) + + currentIsAncestorOfBlock, err := dtm.dagTopologyManager.IsAncestorOf(stagingArea, current, blockHash) + if err != nil { + return nil, err + } + + if currentIsAncestorOfBlock { + continue + } + + blockIsAncestorOfCurrent, err := dtm.dagTopologyManager.IsAncestorOf(stagingArea, blockHash, current) + if err != nil { + return nil, err + } + + // We count the number of blocks in past(tips) \setminus past(blockHash). + // We don't use `len(visited)` since it includes some maximal blocks in past(blockHash) as well. + traversalCounter++ + if maxTraversalAllowed > 0 && traversalCounter > maxTraversalAllowed { + return nil, errors.Wrapf(model.ErrReachedMaxTraversalAllowed, + "Passed max allowed traversal (%d > %d)", traversalCounter, maxTraversalAllowed) + } + + if !blockIsAncestorOfCurrent { + anticone = append(anticone, current) + } + + currentParents, err := dtm.dagTopologyManager.Parents(stagingArea, current) + if err != nil { + return nil, err + } + + for _, parent := range currentParents { + queue = append(queue, parent) + } + } + + return anticone, nil +} diff --git a/consensus/dagtraversalmanager/block_heap.go b/consensus/dagtraversalmanager/block_heap.go new file mode 100644 index 00000000..639ff54f --- /dev/null +++ b/consensus/dagtraversalmanager/block_heap.go @@ -0,0 +1,204 @@ +package dagtraversalmanager + +import ( + "container/heap" + + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" +) + +func blockGHOSTDAGDataHashPairLess(left, right *externalapi.BlockGHOSTDAGDataHashPair, gm model.GHOSTDAGManager) bool { + return gm.Less(left.Hash, left.GHOSTDAGData, right.Hash, right.GHOSTDAGData) +} + +// baseHeap is an implementation for heap.Interface that sorts blocks by their blueWork+hash +type baseHeap struct { + slice []*externalapi.BlockGHOSTDAGDataHashPair + ghostdagManager model.GHOSTDAGManager +} + +func (h *baseHeap) Len() int { return len(h.slice) } +func (h *baseHeap) Swap(i, j int) { h.slice[i], h.slice[j] = h.slice[j], h.slice[i] } + +func (h *baseHeap) Push(x interface{}) { + h.slice = append(h.slice, x.(*externalapi.BlockGHOSTDAGDataHashPair)) +} + +func (h *baseHeap) Pop() interface{} { + oldSlice := h.slice + oldLength := len(oldSlice) + popped := oldSlice[oldLength-1] + h.slice = oldSlice[0 : oldLength-1] + return popped +} + +// peek returns the block with lowest blueWork+hash from this heap without removing it +func (h *baseHeap) peek() *externalapi.BlockGHOSTDAGDataHashPair { + return h.slice[0] +} + +// upHeap extends baseHeap to include Less operation that traverses from bottom to top +type upHeap struct{ baseHeap } + +func (h *upHeap) Less(i, j int) bool { + heapNodeI := h.slice[i] + heapNodeJ := h.slice[j] + return blockGHOSTDAGDataHashPairLess(heapNodeI, heapNodeJ, h.ghostdagManager) +} + +// downHeap extends baseHeap to include Less operation that traverses from top to bottom +type downHeap struct{ baseHeap } + +func (h *downHeap) Less(i, j int) bool { + heapNodeI := h.slice[i] + heapNodeJ := h.slice[j] + return !blockGHOSTDAGDataHashPairLess(heapNodeI, heapNodeJ, h.ghostdagManager) +} + +// blockHeap represents a mutable heap of blocks, sorted by their blueWork+hash +type blockHeap struct { + impl heap.Interface + ghostdagStore model.GHOSTDAGDataStore + dbContext model.DBReader + stagingArea *model.StagingArea +} + +// NewDownHeap initializes and returns a new blockHeap +func (dtm *dagTraversalManager) NewDownHeap(stagingArea *model.StagingArea) model.BlockHeap { + h := blockHeap{ + impl: &downHeap{baseHeap{ghostdagManager: dtm.ghostdagManager}}, + ghostdagStore: dtm.ghostdagDataStore, + dbContext: dtm.databaseContext, + stagingArea: stagingArea, + } + heap.Init(h.impl) + return &h +} + +// NewUpHeap initializes and returns a new blockHeap +func (dtm *dagTraversalManager) NewUpHeap(stagingArea *model.StagingArea) model.BlockHeap { + h := blockHeap{ + impl: &upHeap{baseHeap{ghostdagManager: dtm.ghostdagManager}}, + ghostdagStore: dtm.ghostdagDataStore, + dbContext: dtm.databaseContext, + stagingArea: stagingArea, + } + heap.Init(h.impl) + return &h +} + +// Pop removes the block with lowest blueWork+hash from this heap and returns it +func (bh *blockHeap) Pop() *externalapi.DomainHash { + return heap.Pop(bh.impl).(*externalapi.BlockGHOSTDAGDataHashPair).Hash +} + +// Push pushes the block onto the heap +func (bh *blockHeap) Push(blockHash *externalapi.DomainHash) error { + ghostdagData, err := bh.ghostdagStore.Get(bh.dbContext, bh.stagingArea, blockHash, false) + if err != nil { + return err + } + + heap.Push(bh.impl, &externalapi.BlockGHOSTDAGDataHashPair{ + Hash: blockHash, + GHOSTDAGData: ghostdagData, + }) + + return nil +} + +func (bh *blockHeap) PushSlice(blockHashes []*externalapi.DomainHash) error { + for _, blockHash := range blockHashes { + err := bh.Push(blockHash) + if err != nil { + return err + } + } + return nil +} + +// Len returns the length of this heap +func (bh *blockHeap) Len() int { + return bh.impl.Len() +} + +// ToSlice copies this heap to a slice +func (bh *blockHeap) ToSlice() []*externalapi.DomainHash { + length := bh.Len() + hashes := make([]*externalapi.DomainHash, length) + for i := 0; i < length; i++ { + hashes[i] = bh.Pop() + } + return hashes +} + +// sizedUpBlockHeap represents a mutable heap of Blocks, sorted by their blueWork+hash, capped by a specific size. +type sizedUpBlockHeap struct { + impl upHeap + ghostdagStore model.GHOSTDAGDataStore + dbContext model.DBReader + stagingArea *model.StagingArea +} + +// newSizedUpHeap initializes and returns a new sizedUpBlockHeap +func (dtm *dagTraversalManager) newSizedUpHeap(stagingArea *model.StagingArea, cap int) *sizedUpBlockHeap { + h := sizedUpBlockHeap{ + impl: upHeap{baseHeap{slice: make([]*externalapi.BlockGHOSTDAGDataHashPair, 0, cap), ghostdagManager: dtm.ghostdagManager}}, + ghostdagStore: dtm.ghostdagDataStore, + dbContext: dtm.databaseContext, + stagingArea: stagingArea, + } + heap.Init(&h.impl) + return &h +} + +func (dtm *dagTraversalManager) newSizedUpHeapFromSlice(stagingArea *model.StagingArea, slice []*externalapi.BlockGHOSTDAGDataHashPair) *sizedUpBlockHeap { + sliceClone := make([]*externalapi.BlockGHOSTDAGDataHashPair, len(slice), cap(slice)) + copy(sliceClone, slice) + h := sizedUpBlockHeap{ + impl: upHeap{baseHeap{slice: sliceClone, ghostdagManager: dtm.ghostdagManager}}, + ghostdagStore: dtm.ghostdagDataStore, + dbContext: dtm.databaseContext, + stagingArea: stagingArea, + } + return &h +} + +// len returns the length of this heap +func (sbh *sizedUpBlockHeap) len() int { + return sbh.impl.Len() +} + +// pop removes the block with lowest blueWork+hash from this heap and returns it +func (sbh *sizedUpBlockHeap) pop() *externalapi.DomainHash { + return heap.Pop(&sbh.impl).(*externalapi.BlockGHOSTDAGDataHashPair).Hash +} + +// tryPushWithGHOSTDAGData is just like tryPush but the caller provides the ghostdagData of the block. +func (sbh *sizedUpBlockHeap) tryPushWithGHOSTDAGData(blockHash *externalapi.DomainHash, + ghostdagData *externalapi.BlockGHOSTDAGData) (bool, error) { + + node := &externalapi.BlockGHOSTDAGDataHashPair{ + Hash: blockHash, + GHOSTDAGData: ghostdagData, + } + if len(sbh.impl.slice) == cap(sbh.impl.slice) { + min := sbh.impl.peek() + // if the heap is full, and the new block is less than the minimum, return false + if blockGHOSTDAGDataHashPairLess(node, min, sbh.impl.ghostdagManager) { + return false, nil + } + sbh.pop() + } + heap.Push(&sbh.impl, node) + return true, nil +} + +// tryPush tries to push the block onto the heap, if the heap is full and it's less than the minimum it rejects it +func (sbh *sizedUpBlockHeap) tryPush(blockHash *externalapi.DomainHash) (bool, error) { + ghostdagData, err := sbh.ghostdagStore.Get(sbh.dbContext, sbh.stagingArea, blockHash, false) + if err != nil { + return false, err + } + return sbh.tryPushWithGHOSTDAGData(blockHash, ghostdagData) +} diff --git a/consensus/dagtraversalmanager/dagtraversalmanager.go b/consensus/dagtraversalmanager/dagtraversalmanager.go new file mode 100644 index 00000000..d2f0f8b4 --- /dev/null +++ b/consensus/dagtraversalmanager/dagtraversalmanager.go @@ -0,0 +1,129 @@ +package dagtraversalmanager + +import ( + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/pkg/errors" +) + +// dagTraversalManager exposes methods for traversing blocks +// in the DAG +type dagTraversalManager struct { + databaseContext model.DBReader + + dagTopologyManager model.DAGTopologyManager + ghostdagManager model.GHOSTDAGManager + ghostdagDataStore model.GHOSTDAGDataStore + reachabilityManager model.ReachabilityManager + daaWindowStore model.BlocksWithTrustedDataDAAWindowStore + genesisHash *externalapi.DomainHash + difficultyAdjustmentWindowSize int + windowHeapSliceStore model.WindowHeapSliceStore +} + +// New instantiates a new DAGTraversalManager +func New( + databaseContext model.DBReader, + dagTopologyManager model.DAGTopologyManager, + ghostdagDataStore model.GHOSTDAGDataStore, + reachabilityManager model.ReachabilityManager, + ghostdagManager model.GHOSTDAGManager, + daaWindowStore model.BlocksWithTrustedDataDAAWindowStore, + windowHeapSliceStore model.WindowHeapSliceStore, + genesisHash *externalapi.DomainHash, + difficultyAdjustmentWindowSize int) model.DAGTraversalManager { + return &dagTraversalManager{ + databaseContext: databaseContext, + dagTopologyManager: dagTopologyManager, + ghostdagDataStore: ghostdagDataStore, + reachabilityManager: reachabilityManager, + ghostdagManager: ghostdagManager, + daaWindowStore: daaWindowStore, + + genesisHash: genesisHash, + difficultyAdjustmentWindowSize: difficultyAdjustmentWindowSize, + windowHeapSliceStore: windowHeapSliceStore, + } +} + +func (dtm *dagTraversalManager) LowestChainBlockAboveOrEqualToBlueScore(stagingArea *model.StagingArea, highHash *externalapi.DomainHash, blueScore uint64) (*externalapi.DomainHash, error) { + highBlockGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, highHash, false) + if err != nil { + return nil, err + } + + if highBlockGHOSTDAGData.BlueScore() < blueScore { + return nil, errors.Errorf("the given blue score %d is higher than block %s blue score of %d", + blueScore, highHash, highBlockGHOSTDAGData.BlueScore()) + } + + currentHash := highHash + currentBlockGHOSTDAGData := highBlockGHOSTDAGData + + for !currentHash.Equal(dtm.genesisHash) { + selectedParentBlockGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, + currentBlockGHOSTDAGData.SelectedParent(), false) + if err != nil { + return nil, err + } + + if selectedParentBlockGHOSTDAGData.BlueScore() < blueScore { + break + } + currentHash = currentBlockGHOSTDAGData.SelectedParent() + currentBlockGHOSTDAGData = selectedParentBlockGHOSTDAGData + } + + return currentHash, nil +} + +func (dtm *dagTraversalManager) CalculateChainPath(stagingArea *model.StagingArea, + fromBlockHash, toBlockHash *externalapi.DomainHash) (*externalapi.SelectedChainPath, error) { + + // Walk down from fromBlockHash until we reach the common selected + // parent chain ancestor of fromBlockHash and toBlockHash. Note + // that this slice will be empty if fromBlockHash is the selected + // parent of toBlockHash + var removed []*externalapi.DomainHash + current := fromBlockHash + for { + isCurrentInTheSelectedParentChainOfNewVirtualSelectedParent, err := + dtm.dagTopologyManager.IsInSelectedParentChainOf(stagingArea, current, toBlockHash) + if err != nil { + return nil, err + } + if isCurrentInTheSelectedParentChainOfNewVirtualSelectedParent { + break + } + removed = append(removed, current) + + currentGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, current, false) + if err != nil { + return nil, err + } + current = currentGHOSTDAGData.SelectedParent() + } + commonAncestor := current + + // Walk down from the toBlockHash to the common ancestor + var added []*externalapi.DomainHash + current = toBlockHash + for !current.Equal(commonAncestor) { + added = append(added, current) + currentGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, current, false) + if err != nil { + return nil, err + } + current = currentGHOSTDAGData.SelectedParent() + } + + // Reverse the order of `added` so that it's sorted from low hash to high hash + for i, j := 0, len(added)-1; i < j; i, j = i+1, j-1 { + added[i], added[j] = added[j], added[i] + } + + return &externalapi.SelectedChainPath{ + Added: added, + Removed: removed, + }, nil +} diff --git a/consensus/dagtraversalmanager/dagtraversalmanager_test.go b/consensus/dagtraversalmanager/dagtraversalmanager_test.go new file mode 100644 index 00000000..72cc82df --- /dev/null +++ b/consensus/dagtraversalmanager/dagtraversalmanager_test.go @@ -0,0 +1,117 @@ +package dagtraversalmanager_test + +import ( + "testing" + + "github.com/Qitmeer/qng/consensus/model" + + "github.com/Qitmeer/qng/consensus" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/utils/testutils" +) + +func TestLowestChainBlockAboveOrEqualToBlueScore(t *testing.T) { + testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { + consensusConfig.FinalityDuration = 10 * consensusConfig.TargetTimePerBlock + factory := consensus.NewFactory() + tc, tearDown, err := factory.NewTestConsensus(consensusConfig, + "TestLowestChainBlockAboveOrEqualToBlueScore") + if err != nil { + t.Fatalf("NewTestConsensus: %s", err) + } + defer tearDown(false) + + stagingArea := model.NewStagingArea() + + checkExpectedBlock := func(highHash *externalapi.DomainHash, blueScore uint64, expected *externalapi.DomainHash) { + blockHash, err := tc.DAGTraversalManager().LowestChainBlockAboveOrEqualToBlueScore(stagingArea, highHash, blueScore) + if err != nil { + t.Fatalf("LowestChainBlockAboveOrEqualToBlueScore: %+v", err) + } + + if !blockHash.Equal(expected) { + t.Fatalf("Expected block %s but got %s", expected, blockHash) + } + } + + checkBlueScore := func(blockHash *externalapi.DomainHash, expectedBlueScore uint64) { + ghostdagData, err := tc.GHOSTDAGDataStore().Get(tc.DatabaseContext(), stagingArea, blockHash, false) + if err != nil { + t.Fatalf("GHOSTDAGDataStore().Get: %+v", err) + } + + if ghostdagData.BlueScore() != expectedBlueScore { + t.Fatalf("Expected blue score %d but got %d", expectedBlueScore, ghostdagData.BlueScore()) + } + } + + chain := []*externalapi.DomainHash{consensusConfig.GenesisHash} + tipHash := consensusConfig.GenesisHash + for i := 0; i < 9; i++ { + var err error + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + chain = append(chain, tipHash) + } + + sideChain1TipHash, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{sideChain1TipHash, tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + chain = append(chain, tipHash) + blueScore11BlockHash := tipHash + checkBlueScore(blueScore11BlockHash, 11) + + for i := 0; i < 5; i++ { + var err error + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + chain = append(chain, tipHash) + } + + sideChain2TipHash, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{sideChain2TipHash, tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + chain = append(chain, tipHash) + + blueScore18BlockHash := tipHash + checkBlueScore(blueScore18BlockHash, 18) + + for i := 0; i < 3; i++ { + var err error + tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + chain = append(chain, tipHash) + } + + // Check by exact blue score + checkExpectedBlock(tipHash, 0, consensusConfig.GenesisHash) + checkExpectedBlock(tipHash, 5, chain[5]) + checkExpectedBlock(tipHash, 19, chain[len(chain)-3]) + + // Check by non exact blue score + checkExpectedBlock(tipHash, 17, blueScore18BlockHash) + checkExpectedBlock(tipHash, 10, blueScore11BlockHash) + }) +} diff --git a/consensus/dagtraversalmanager/selected_child_iterator.go b/consensus/dagtraversalmanager/selected_child_iterator.go new file mode 100644 index 00000000..92e9cf57 --- /dev/null +++ b/consensus/dagtraversalmanager/selected_child_iterator.go @@ -0,0 +1,109 @@ +package dagtraversalmanager + +import ( + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/pkg/errors" +) + +type selectedChildIterator struct { + dagTraversalManager model.DAGTraversalManager + + includeLowHash bool + highHash, lowHash *externalapi.DomainHash + current *externalapi.DomainHash + err error + isClosed bool + stagingArea *model.StagingArea +} + +func (s *selectedChildIterator) First() bool { + if s.isClosed { + panic("Tried using a closed SelectedChildIterator") + } + s.current = s.lowHash + if s.includeLowHash { + return true + } + + return s.Next() +} + +func (s *selectedChildIterator) Next() bool { + if s.isClosed { + panic("Tried using a closed SelectedChildIterator") + } + if s.err != nil { + return true + } + + selectedChild, err := s.dagTraversalManager.SelectedChild(s.stagingArea, s.highHash, s.current) + if errors.Is(err, errNoSelectedChild) { + return false + } + if err != nil { + s.current = nil + s.err = err + return true + } + + s.current = selectedChild + return true +} + +func (s *selectedChildIterator) Get() (*externalapi.DomainHash, error) { + if s.isClosed { + return nil, errors.New("Tried using a closed SelectedChildIterator") + } + return s.current, s.err +} + +func (s *selectedChildIterator) Close() error { + if s.isClosed { + return errors.New("Tried using a closed SelectedChildIterator") + } + s.isClosed = true + s.highHash = nil + s.lowHash = nil + s.current = nil + s.err = nil + return nil +} + +// SelectedChildIterator returns a BlockIterator that iterates from lowHash (exclusive) to highHash (inclusive) over +// highHash's selected parent chain +func (dtm *dagTraversalManager) SelectedChildIterator(stagingArea *model.StagingArea, + highHash, lowHash *externalapi.DomainHash, includeLowHash bool) (model.BlockIterator, error) { + + isLowHashInSelectedParentChainOfHighHash, err := dtm.dagTopologyManager.IsInSelectedParentChainOf( + stagingArea, lowHash, highHash) + if err != nil { + return nil, err + } + + if !isLowHashInSelectedParentChainOfHighHash { + return nil, errors.Errorf("%s is not in the selected parent chain of %s", highHash, lowHash) + } + return &selectedChildIterator{ + dagTraversalManager: dtm, + includeLowHash: includeLowHash, + highHash: highHash, + lowHash: lowHash, + current: lowHash, + stagingArea: stagingArea, + }, nil +} + +var errNoSelectedChild = errors.New("errNoSelectedChild") + +func (dtm *dagTraversalManager) SelectedChild(stagingArea *model.StagingArea, + highHash, lowHash *externalapi.DomainHash) (*externalapi.DomainHash, error) { + + // The selected child is in fact the next reachability tree nextAncestor + nextAncestor, err := dtm.reachabilityManager.FindNextAncestor(stagingArea, highHash, lowHash) + if err != nil { + return nil, errors.Wrapf(errNoSelectedChild, "no selected child for %s from the point of view of %s", + lowHash, highHash) + } + return nextAncestor, nil +} diff --git a/consensus/dagtraversalmanager/window.go b/consensus/dagtraversalmanager/window.go new file mode 100644 index 00000000..ffd2cde3 --- /dev/null +++ b/consensus/dagtraversalmanager/window.go @@ -0,0 +1,200 @@ +package dagtraversalmanager + +import ( + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/kaspanet/kaspad/infrastructure/db/database" +) + +func (dtm *dagTraversalManager) DAABlockWindow(stagingArea *model.StagingArea, highHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) { + return dtm.BlockWindow(stagingArea, highHash, dtm.difficultyAdjustmentWindowSize) +} + +// BlockWindow returns a blockWindow of the given size that contains the +// blocks in the past of highHash, the sorting is unspecified. +// If the number of blocks in the past of startingNode is less then windowSize, +func (dtm *dagTraversalManager) BlockWindow(stagingArea *model.StagingArea, highHash *externalapi.DomainHash, + windowSize int) ([]*externalapi.DomainHash, error) { + + windowHeap, err := dtm.blockWindowHeap(stagingArea, highHash, windowSize) + if err != nil { + return nil, err + } + + window := make([]*externalapi.DomainHash, 0, len(windowHeap.impl.slice)) + for _, b := range windowHeap.impl.slice { + window = append(window, b.Hash) + } + return window, nil +} + +func (dtm *dagTraversalManager) blockWindowHeap(stagingArea *model.StagingArea, + highHash *externalapi.DomainHash, windowSize int) (*sizedUpBlockHeap, error) { + windowHeapSlice, err := dtm.windowHeapSliceStore.Get(stagingArea, highHash, windowSize) + sliceNotCached := database.IsNotFoundError(err) + if !sliceNotCached && err != nil { + return nil, err + } + if !sliceNotCached { + return dtm.newSizedUpHeapFromSlice(stagingArea, windowHeapSlice), nil + } + + heap, err := dtm.calculateBlockWindowHeap(stagingArea, highHash, windowSize) + if err != nil { + return nil, err + } + + if !highHash.Equal(model.VirtualBlockHash) { + dtm.windowHeapSliceStore.Stage(stagingArea, highHash, windowSize, heap.impl.slice) + } + return heap, nil +} + +func (dtm *dagTraversalManager) calculateBlockWindowHeap(stagingArea *model.StagingArea, + highHash *externalapi.DomainHash, windowSize int) (*sizedUpBlockHeap, error) { + + windowHeap := dtm.newSizedUpHeap(stagingArea, windowSize) + if highHash.Equal(dtm.genesisHash) { + return windowHeap, nil + } + if windowSize == 0 { + return windowHeap, nil + } + + current := highHash + currentGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, highHash, false) + if err != nil { + return nil, err + } + + // If the block has a trusted DAA window attached, we just take it as is and don't use cache of selected parent to + // build the window. This is because tryPushMergeSet might not be able to find all the GHOSTDAG data that is + // associated with the block merge set. + _, err = dtm.daaWindowStore.DAAWindowBlock(dtm.databaseContext, stagingArea, current, 0) + isNonTrustedBlock := database.IsNotFoundError(err) + if !isNonTrustedBlock && err != nil { + return nil, err + } + + if isNonTrustedBlock && currentGHOSTDAGData.SelectedParent() != nil { + windowHeapSlice, err := dtm.windowHeapSliceStore.Get(stagingArea, currentGHOSTDAGData.SelectedParent(), windowSize) + selectedParentNotCached := database.IsNotFoundError(err) + if !selectedParentNotCached && err != nil { + return nil, err + } + if !selectedParentNotCached { + windowHeap := dtm.newSizedUpHeapFromSlice(stagingArea, windowHeapSlice) + if !currentGHOSTDAGData.SelectedParent().Equal(dtm.genesisHash) { + selectedParentGHOSTDAGData, err := dtm.ghostdagDataStore.Get( + dtm.databaseContext, stagingArea, currentGHOSTDAGData.SelectedParent(), false) + if err != nil { + return nil, err + } + + _, err = dtm.tryPushMergeSet(windowHeap, currentGHOSTDAGData, selectedParentGHOSTDAGData) + if err != nil { + return nil, err + } + } + + return windowHeap, nil + } + } + + // Walk down the chain until you finish or find a trusted block and then take complete the rest + // of the window with the trusted window. + for { + if currentGHOSTDAGData.SelectedParent().Equal(dtm.genesisHash) { + break + } + + _, err := dtm.daaWindowStore.DAAWindowBlock(dtm.databaseContext, stagingArea, current, 0) + currentIsNonTrustedBlock := database.IsNotFoundError(err) + if !currentIsNonTrustedBlock && err != nil { + return nil, err + } + + if !currentIsNonTrustedBlock { + for i := uint64(0); ; i++ { + daaBlock, err := dtm.daaWindowStore.DAAWindowBlock(dtm.databaseContext, stagingArea, current, i) + if database.IsNotFoundError(err) { + break + } + if err != nil { + return nil, err + } + + _, err = windowHeap.tryPushWithGHOSTDAGData(daaBlock.Hash, daaBlock.GHOSTDAGData) + if err != nil { + return nil, err + } + + // Right now we go over all of the window of `current` and filter blocks on the fly. + // We can optimize it if we make sure that daaWindowStore stores sorted windows, and + // then return from this function once one block was not added to the heap. + } + break + } + + selectedParentGHOSTDAGData, err := dtm.ghostdagDataStore.Get( + dtm.databaseContext, stagingArea, currentGHOSTDAGData.SelectedParent(), false) + if err != nil { + return nil, err + } + + done, err := dtm.tryPushMergeSet(windowHeap, currentGHOSTDAGData, selectedParentGHOSTDAGData) + if err != nil { + return nil, err + } + if done { + break + } + + current = currentGHOSTDAGData.SelectedParent() + currentGHOSTDAGData = selectedParentGHOSTDAGData + } + + return windowHeap, nil +} + +func (dtm *dagTraversalManager) tryPushMergeSet(windowHeap *sizedUpBlockHeap, currentGHOSTDAGData, selectedParentGHOSTDAGData *externalapi.BlockGHOSTDAGData) (bool, error) { + added, err := windowHeap.tryPushWithGHOSTDAGData(currentGHOSTDAGData.SelectedParent(), selectedParentGHOSTDAGData) + if err != nil { + return false, err + } + + // If the window is full and the selected parent is less than the minimum then we break + // because this means that there cannot be any more blocks in the past with higher blueWork + if !added { + return true, nil + } + + // Now we go over the merge set. + // Remove the SP from the blue merge set because we already added it. + mergeSetBlues := currentGHOSTDAGData.MergeSetBlues()[1:] + // Go over the merge set in reverse because it's ordered in reverse by blueWork. + for i := len(mergeSetBlues) - 1; i >= 0; i-- { + added, err := windowHeap.tryPush(mergeSetBlues[i]) + if err != nil { + return false, err + } + // If it's smaller than minimum then we won't be able to add the rest because they're even smaller. + if !added { + break + } + } + + mergeSetReds := currentGHOSTDAGData.MergeSetReds() + for i := len(mergeSetReds) - 1; i >= 0; i-- { + added, err := windowHeap.tryPush(mergeSetReds[i]) + if err != nil { + return false, err + } + // If it's smaller than minimum then we won't be able to add the rest because they're even smaller. + if !added { + break + } + } + + return false, nil +} diff --git a/consensus/dagtraversalmanager/window_test.go b/consensus/dagtraversalmanager/window_test.go new file mode 100644 index 00000000..c10dac30 --- /dev/null +++ b/consensus/dagtraversalmanager/window_test.go @@ -0,0 +1,369 @@ +package dagtraversalmanager_test + +import ( + "reflect" + "sort" + "testing" + + "github.com/Qitmeer/qng/consensus/model" + + "github.com/Qitmeer/qng/consensus" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/utils/hashset" + "github.com/Qitmeer/qng/consensus/utils/testutils" + "github.com/kaspanet/kaspad/domain/dagconfig" + "github.com/pkg/errors" +) + +func TestBlockWindow(t *testing.T) { + tests := map[string][]*struct { + parents []string + id string //id is a virtual entity that is used only for tests so we can define relations between blocks without knowing their hash + expectedWindow []string + }{ + dagconfig.MainnetParams.Name: { + { + parents: []string{"A"}, + id: "B", + expectedWindow: []string{}, + }, + { + parents: []string{"B"}, + id: "C", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"B"}, + id: "D", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"C", "D"}, + id: "E", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"C", "D"}, + id: "F", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"A"}, + id: "G", + expectedWindow: []string{}, + }, + { + parents: []string{"G"}, + id: "H", + expectedWindow: []string{"G"}, + }, + { + parents: []string{"H", "F"}, + id: "I", + expectedWindow: []string{"F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"I"}, + id: "J", + expectedWindow: []string{"I", "F", "C", "H", "D", "B", "G"}, + }, + // + { + parents: []string{"J"}, + id: "K", + expectedWindow: []string{"J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"K"}, + id: "L", + expectedWindow: []string{"K", "J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"L"}, + id: "M", + expectedWindow: []string{"L", "K", "J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"M"}, + id: "N", + expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "H", "D", "B"}, + }, + { + parents: []string{"N"}, + id: "O", + expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "H", "D"}, + }, + }, + dagconfig.TestnetParams.Name: { + { + parents: []string{"A"}, + id: "B", + expectedWindow: []string{}, + }, + { + parents: []string{"B"}, + id: "C", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"B"}, + id: "D", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"C", "D"}, + id: "E", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"C", "D"}, + id: "F", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"A"}, + id: "G", + expectedWindow: []string{}, + }, + { + parents: []string{"G"}, + id: "H", + expectedWindow: []string{"G"}, + }, + { + parents: []string{"H", "F"}, + id: "I", + expectedWindow: []string{"F", "C", "D", "H", "B", "G"}, + }, + { + parents: []string{"I"}, + id: "J", + expectedWindow: []string{"I", "F", "C", "D", "H", "B", "G"}, + }, + { + parents: []string{"J"}, + id: "K", + expectedWindow: []string{"J", "I", "F", "C", "D", "H", "B", "G"}, + }, + { + parents: []string{"K"}, + id: "L", + expectedWindow: []string{"K", "J", "I", "F", "C", "D", "H", "B", "G"}, + }, + { + parents: []string{"L"}, + id: "M", + expectedWindow: []string{"L", "K", "J", "I", "F", "C", "D", "H", "B", "G"}, + }, + { + parents: []string{"M"}, + id: "N", + expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "D", "H", "B"}, + }, + { + parents: []string{"N"}, + id: "O", + expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "D", "H"}, + }, + }, + dagconfig.DevnetParams.Name: { + { + parents: []string{"A"}, + id: "B", + expectedWindow: []string{}, + }, + { + parents: []string{"B"}, + id: "C", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"B"}, + id: "D", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"C", "D"}, + id: "E", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"C", "D"}, + id: "F", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"A"}, + id: "G", + expectedWindow: []string{}, + }, + { + parents: []string{"G"}, + id: "H", + expectedWindow: []string{"G"}, + }, + { + parents: []string{"H", "F"}, + id: "I", + expectedWindow: []string{"F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"I"}, + id: "J", + expectedWindow: []string{"I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"J"}, + id: "K", + expectedWindow: []string{"J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"K"}, + id: "L", + expectedWindow: []string{"K", "J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"L"}, + id: "M", + expectedWindow: []string{"L", "K", "J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"M"}, + id: "N", + expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "H", "D", "B"}, + }, + { + parents: []string{"N"}, + id: "O", + expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "H", "D"}, + }, + }, + dagconfig.SimnetParams.Name: { + { + parents: []string{"A"}, + id: "B", + expectedWindow: []string{}, + }, + { + parents: []string{"B"}, + id: "C", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"B"}, + id: "D", + expectedWindow: []string{"B"}, + }, + { + parents: []string{"C", "D"}, + id: "E", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"C", "D"}, + id: "F", + expectedWindow: []string{"C", "D", "B"}, + }, + { + parents: []string{"A"}, + id: "G", + expectedWindow: []string{}, + }, + { + parents: []string{"G"}, + id: "H", + expectedWindow: []string{"G"}, + }, + { + parents: []string{"H", "F"}, + id: "I", + expectedWindow: []string{"F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"I"}, + id: "J", + expectedWindow: []string{"I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"J"}, + id: "K", + expectedWindow: []string{"J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"K"}, + id: "L", + expectedWindow: []string{"K", "J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"L"}, + id: "M", + expectedWindow: []string{"L", "K", "J", "I", "F", "C", "H", "D", "B", "G"}, + }, + { + parents: []string{"M"}, + id: "N", + expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "H", "D", "B"}, + }, + { + parents: []string{"N"}, + id: "O", + expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "H", "D"}, + }, + }, + } + testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { + consensusConfig.K = 1 + factory := consensus.NewFactory() + tc, tearDown, err := factory.NewTestConsensus(consensusConfig, "TestBlockWindow") + if err != nil { + t.Fatalf("NewTestConsensus: %s", err) + } + defer tearDown(false) + + windowSize := 10 + blockByIDMap := make(map[string]*externalapi.DomainHash) + idByBlockMap := make(map[externalapi.DomainHash]string) + blockByIDMap["A"] = consensusConfig.GenesisHash + idByBlockMap[*consensusConfig.GenesisHash] = "A" + + blocksData := tests[consensusConfig.Name] + + for _, blockData := range blocksData { + parents := hashset.New() + for _, parentID := range blockData.parents { + parent := blockByIDMap[parentID] + parents.Add(parent) + } + + block, _, err := tc.AddBlock(parents.ToSlice(), nil, nil) + if err != nil { + t.Fatalf("AddBlock: %+v", err) + } + + blockByIDMap[blockData.id] = block + idByBlockMap[*block] = blockData.id + + stagingArea := model.NewStagingArea() + + window, err := tc.DAGTraversalManager().BlockWindow(stagingArea, block, windowSize) + if err != nil { + t.Fatalf("BlockWindow: %s", err) + } + sort.Sort(testutils.NewTestGhostDAGSorter(stagingArea, window, tc, t)) + if err := checkWindowIDs(window, blockData.expectedWindow, idByBlockMap); err != nil { + t.Errorf("Unexpected values for window for block %s: %s", blockData.id, err) + } + } + }) +} + +func checkWindowIDs(window []*externalapi.DomainHash, expectedIDs []string, idByBlockMap map[externalapi.DomainHash]string) error { + ids := make([]string, len(window)) + for i, node := range window { + ids[i] = idByBlockMap[*node] + } + if !reflect.DeepEqual(ids, expectedIDs) { + return errors.Errorf("window expected to have blocks %s but got %s", expectedIDs, ids) + } + return nil +} diff --git a/consensus/difficultymanager/blockwindow.go b/consensus/difficultymanager/blockwindow.go index 8c66eb7a..aa3f3f13 100644 --- a/consensus/difficultymanager/blockwindow.go +++ b/consensus/difficultymanager/blockwindow.go @@ -1,11 +1,12 @@ package difficultymanager import ( - "github.com/kaspanet/kaspad/domain/consensus/model" - "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" - "github.com/kaspanet/kaspad/util/difficulty" "math" "math/big" + + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/core/types/pow" ) type difficultyBlock struct { @@ -95,7 +96,7 @@ func (window blockWindow) averageTarget() *big.Int { averageTarget := new(big.Int) targetTmp := new(big.Int) for _, block := range window { - difficulty.CompactToBigWithDestination(block.Bits, targetTmp) + pow.CompactToBigWithDestination(block.Bits, targetTmp) averageTarget.Add(averageTarget, targetTmp) } return averageTarget.Div(averageTarget, big.NewInt(int64(len(window)))) diff --git a/consensus/difficultymanager/difficultymanager.go b/consensus/difficultymanager/difficultymanager.go index 1838729f..e5c737bc 100644 --- a/consensus/difficultymanager/difficultymanager.go +++ b/consensus/difficultymanager/difficultymanager.go @@ -4,13 +4,12 @@ import ( "math/big" "time" - "github.com/kaspanet/kaspad/infrastructure/logger" - "github.com/kaspanet/kaspad/util/math" + "github.com/Qitmeer/qng/common/util/math" + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/params" - "github.com/kaspanet/kaspad/util/difficulty" - - "github.com/kaspanet/kaspad/domain/consensus/model" - "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/model/externalapi" ) // DifficultyManager provides a method to resolve the @@ -29,36 +28,13 @@ type difficultyManager struct { disableDifficultyAdjustment bool targetTimePerBlock time.Duration genesisBits uint32 + cfg *params.Params } // New instantiates a new DifficultyManager -func New(databaseContext model.DBReader, - ghostdagManager model.GHOSTDAGManager, - ghostdagStore model.GHOSTDAGDataStore, - headerStore model.BlockHeaderStore, - daaBlocksStore model.DAABlocksStore, - dagTopologyManager model.DAGTopologyManager, - dagTraversalManager model.DAGTraversalManager, - powMax *big.Int, - difficultyAdjustmentWindowSize int, - disableDifficultyAdjustment bool, - targetTimePerBlock time.Duration, - genesisHash *externalapi.DomainHash, - genesisBits uint32) model.DifficultyManager { +func New(cfg *params.Params) model.DifficultyManager { return &difficultyManager{ - databaseContext: databaseContext, - ghostdagManager: ghostdagManager, - ghostdagStore: ghostdagStore, - headerStore: headerStore, - daaBlocksStore: daaBlocksStore, - dagTopologyManager: dagTopologyManager, - dagTraversalManager: dagTraversalManager, - powMax: powMax, - difficultyAdjustmentWindowSize: difficultyAdjustmentWindowSize, - disableDifficultyAdjustment: disableDifficultyAdjustment, - targetTimePerBlock: targetTimePerBlock, - genesisHash: genesisHash, - genesisBits: genesisBits, + cfg: cfg, } } @@ -74,9 +50,6 @@ func (dm *difficultyManager) StageDAADataAndReturnRequiredDifficulty( blockHash *externalapi.DomainHash, isBlockWithTrustedData bool) (uint32, error) { - onEnd := logger.LogAndMeasureExecutionTime(log, "StageDAADataAndReturnRequiredDifficulty") - defer onEnd() - targetsWindow, windowHashes, err := dm.blockWindow(stagingArea, blockHash, dm.difficultyAdjustmentWindowSize) if err != nil { return 0, err @@ -132,9 +105,9 @@ func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow b Div(newTarget, div.SetInt64(dm.targetTimePerBlock.Milliseconds())). Div(newTarget, div.SetUint64(uint64(len(targetsWindow)))) if newTarget.Cmp(dm.powMax) > 0 { - return difficulty.BigToCompact(dm.powMax), nil + return pow.BigToCompact(dm.powMax), nil } - newTargetBits := difficulty.BigToCompact(newTarget) + newTargetBits := pow.BigToCompact(newTarget) return newTargetBits, nil } @@ -143,9 +116,6 @@ func (dm *difficultyManager) stageDAAScoreAndAddedBlocks(stagingArea *model.Stag windowHashes []*externalapi.DomainHash, isBlockWithTrustedData bool) error { - onEnd := logger.LogAndMeasureExecutionTime(log, "stageDAAScoreAndAddedBlocks") - defer onEnd() - daaScore, addedBlocks, err := dm.calculateDaaScoreAndAddedBlocks(stagingArea, blockHash, windowHashes, isBlockWithTrustedData) if err != nil { return err diff --git a/consensus/difficultymanager/difficultymanager_test.go b/consensus/difficultymanager/difficultymanager_test.go index fe6cb588..bd9134e9 100644 --- a/consensus/difficultymanager/difficultymanager_test.go +++ b/consensus/difficultymanager/difficultymanager_test.go @@ -4,16 +4,15 @@ import ( "testing" "time" - "github.com/kaspanet/kaspad/util/difficulty" - + "github.com/Qitmeer/qng/core/types/pow" "github.com/kaspanet/kaspad/util/mstime" - "github.com/kaspanet/kaspad/domain/consensus/utils/consensushashing" + "github.com/Qitmeer/qng/consensus/utils/consensushashing" - "github.com/kaspanet/kaspad/domain/consensus" - "github.com/kaspanet/kaspad/domain/consensus/model" - "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" - "github.com/kaspanet/kaspad/domain/consensus/utils/testutils" + "github.com/Qitmeer/qng/consensus" + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/utils/testutils" "github.com/kaspanet/kaspad/domain/dagconfig" ) @@ -352,7 +351,7 @@ func TestDAAScore(t *testing.T) { } func compareBits(a uint32, b uint32) int { - aTarget := difficulty.CompactToBig(a) - bTarget := difficulty.CompactToBig(b) + aTarget := pow.CompactToBig(a) + bTarget := pow.CompactToBig(b) return aTarget.Cmp(bTarget) } diff --git a/consensus/difficultymanager/hashrate.go b/consensus/difficultymanager/hashrate.go index 658c7be1..29555655 100644 --- a/consensus/difficultymanager/hashrate.go +++ b/consensus/difficultymanager/hashrate.go @@ -3,8 +3,8 @@ package difficultymanager import ( "math/big" - "github.com/kaspanet/kaspad/domain/consensus/model" - "github.com/kaspanet/kaspad/domain/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/consensus/model/externalapi" "github.com/kaspanet/kaspad/infrastructure/logger" "github.com/pkg/errors" ) diff --git a/consensus/model/block_heap.go b/consensus/model/block_heap.go new file mode 100644 index 00000000..c8cba85c --- /dev/null +++ b/consensus/model/block_heap.go @@ -0,0 +1,12 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// BlockHeap represents a heap of block hashes, providing a priority-queue functionality +type BlockHeap interface { + Push(blockHash *externalapi.DomainHash) error + PushSlice(blockHash []*externalapi.DomainHash) error + Pop() *externalapi.DomainHash + Len() int + ToSlice() []*externalapi.DomainHash +} diff --git a/consensus/model/blockiterator.go b/consensus/model/blockiterator.go new file mode 100644 index 00000000..bf35519c --- /dev/null +++ b/consensus/model/blockiterator.go @@ -0,0 +1,11 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// BlockIterator is an iterator over blocks according to some order. +type BlockIterator interface { + First() bool + Next() bool + Get() (*externalapi.DomainHash, error) + Close() error +} diff --git a/consensus/model/database.go b/consensus/model/database.go index 989e52e4..24466f28 100644 --- a/consensus/model/database.go +++ b/consensus/model/database.go @@ -83,3 +83,60 @@ type DataBase interface { StartTrack(info string) error StopTrack() error } + +// DBCursor iterates over database entries given some bucket. +type DBCursor interface { + // Next moves the iterator to the next key/value pair. It returns whether the + // iterator is exhausted. Panics if the cursor is closed. + Next() bool + + // First moves the iterator to the first key/value pair. It returns false if + // such a pair does not exist. Panics if the cursor is closed. + First() bool + + // Seek moves the iterator to the first key/value pair whose key is greater + // than or equal to the given key. It returns ErrNotFound if such pair does not + // exist. + Seek(key DBKey) error + + // Key returns the key of the current key/value pair, or ErrNotFound if done. + // The caller should not modify the contents of the returned key, and + // its contents may change on the next call to Next. + Key() (DBKey, error) + + // Value returns the value of the current key/value pair, or ErrNotFound if done. + // The caller should not modify the contents of the returned slice, and its + // contents may change on the next call to Next. + Value() ([]byte, error) + + // Close releases associated resources. + Close() error +} + +// DBReader defines a proxy over domain data access +type DBReader interface { + // Get gets the value for the given key. It returns + // ErrNotFound if the given key does not exist. + Get(key DBKey) ([]byte, error) + + // Has returns true if the database does contains the + // given key. + Has(key DBKey) (bool, error) + + // Cursor begins a new cursor over the given bucket. + Cursor(bucket DBBucket) (DBCursor, error) +} + +// DBKey is an interface for a database key +type DBKey interface { + Bytes() []byte + Bucket() DBBucket + Suffix() []byte +} + +// DBBucket is an interface for a database bucket +type DBBucket interface { + Bucket(bucketBytes []byte) DBBucket + Key(suffix []byte) DBKey + Path() []byte +} diff --git a/consensus/model/externalapi/acceptancedata.go b/consensus/model/externalapi/acceptancedata.go new file mode 100644 index 00000000..4b85741f --- /dev/null +++ b/consensus/model/externalapi/acceptancedata.go @@ -0,0 +1,145 @@ +package externalapi + +// AcceptanceData stores data about which transactions were accepted by a block. +// It's ordered in the same way as the block merge set blues. +type AcceptanceData []*BlockAcceptanceData + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ AcceptanceData = []*BlockAcceptanceData{} + +// Equal returns whether ad equals to other +func (ad AcceptanceData) Equal(other AcceptanceData) bool { + if len(ad) != len(other) { + return false + } + + for i, blockAcceptanceData := range ad { + if !blockAcceptanceData.Equal(other[i]) { + return false + } + } + + return true +} + +// Clone clones the AcceptanceData +func (ad AcceptanceData) Clone() AcceptanceData { + clone := make(AcceptanceData, len(ad)) + for i, blockAcceptanceData := range ad { + clone[i] = blockAcceptanceData.Clone() + } + + return clone +} + +// BlockAcceptanceData stores all transactions in a block with an indication +// if they were accepted or not by some other block +type BlockAcceptanceData struct { + BlockHash *DomainHash + TransactionAcceptanceData []*TransactionAcceptanceData +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = &BlockAcceptanceData{&DomainHash{}, []*TransactionAcceptanceData{}} + +// Equal returns whether bad equals to other +func (bad *BlockAcceptanceData) Equal(other *BlockAcceptanceData) bool { + if bad == nil || other == nil { + return bad == other + } + + if !bad.BlockHash.Equal(other.BlockHash) { + return false + } + + if len(bad.TransactionAcceptanceData) != len(other.TransactionAcceptanceData) { + return false + } + + for i, acceptanceData := range bad.TransactionAcceptanceData { + if !acceptanceData.Equal(other.TransactionAcceptanceData[i]) { + return false + } + } + + return true +} + +// Clone returns a clone of BlockAcceptanceData +func (bad *BlockAcceptanceData) Clone() *BlockAcceptanceData { + if bad == nil { + return nil + } + + clone := &BlockAcceptanceData{ + BlockHash: bad.BlockHash, + TransactionAcceptanceData: make([]*TransactionAcceptanceData, len(bad.TransactionAcceptanceData)), + } + for i, acceptanceData := range bad.TransactionAcceptanceData { + clone.TransactionAcceptanceData[i] = acceptanceData.Clone() + } + + return clone +} + +// TransactionAcceptanceData stores a transaction together with an indication +// if it was accepted or not by some block +type TransactionAcceptanceData struct { + Transaction *DomainTransaction + Fee uint64 + IsAccepted bool + TransactionInputUTXOEntries []UTXOEntry +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = &TransactionAcceptanceData{&DomainTransaction{}, 0, false, []UTXOEntry{}} + +// Equal returns whether tad equals to other +func (tad *TransactionAcceptanceData) Equal(other *TransactionAcceptanceData) bool { + if tad == nil || other == nil { + return tad == other + } + + if !tad.Transaction.Equal(other.Transaction) { + return false + } + + if tad.Fee != other.Fee { + return false + } + + if tad.IsAccepted != other.IsAccepted { + return false + } + + if len(tad.TransactionInputUTXOEntries) != len(other.TransactionInputUTXOEntries) { + return false + } + + for i, thisUTXOEntry := range tad.TransactionInputUTXOEntries { + otherUTXOEntry := other.TransactionInputUTXOEntries[i] + if !thisUTXOEntry.Equal(otherUTXOEntry) { + return false + } + } + + return true +} + +// Clone returns a clone of TransactionAcceptanceData +func (tad *TransactionAcceptanceData) Clone() *TransactionAcceptanceData { + cloneTransactionInputUTXOEntries := make([]UTXOEntry, len(tad.TransactionInputUTXOEntries)) + for i, utxoEntry := range tad.TransactionInputUTXOEntries { + cloneTransactionInputUTXOEntries[i] = utxoEntry + } + + return &TransactionAcceptanceData{ + Transaction: tad.Transaction.Clone(), + Fee: tad.Fee, + IsAccepted: tad.IsAccepted, + TransactionInputUTXOEntries: cloneTransactionInputUTXOEntries, + } +} diff --git a/consensus/model/externalapi/block.go b/consensus/model/externalapi/block.go new file mode 100644 index 00000000..a65d452e --- /dev/null +++ b/consensus/model/externalapi/block.go @@ -0,0 +1,84 @@ +package externalapi + +import "math/big" + +// DomainBlock represents a Kaspa block +type DomainBlock struct { + Header BlockHeader + Transactions []*DomainTransaction +} + +// Clone returns a clone of DomainBlock +func (block *DomainBlock) Clone() *DomainBlock { + transactionClone := make([]*DomainTransaction, len(block.Transactions)) + for i, tx := range block.Transactions { + transactionClone[i] = tx.Clone() + } + + return &DomainBlock{ + Header: block.Header, + Transactions: transactionClone, + } +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = DomainBlock{nil, []*DomainTransaction{}} + +// Equal returns whether block equals to other +func (block *DomainBlock) Equal(other *DomainBlock) bool { + if block == nil || other == nil { + return block == other + } + + if len(block.Transactions) != len(other.Transactions) { + return false + } + + if !block.Header.Equal(other.Header) { + return false + } + + for i, tx := range block.Transactions { + if !tx.Equal(other.Transactions[i]) { + return false + } + } + + return true +} + +// BlockHeader represents an immutable block header. +type BlockHeader interface { + BaseBlockHeader + ToMutable() MutableBlockHeader +} + +// BaseBlockHeader represents the header part of a Kaspa block +type BaseBlockHeader interface { + Version() uint16 + Parents() []BlockLevelParents + DirectParents() BlockLevelParents + HashMerkleRoot() *DomainHash + AcceptedIDMerkleRoot() *DomainHash + UTXOCommitment() *DomainHash + TimeInMilliseconds() int64 + Bits() uint32 + Nonce() uint64 + DAAScore() uint64 + BlueScore() uint64 + BlueWork() *big.Int + PruningPoint() *DomainHash + BlockLevel(maxBlockLevel int) int + Equal(other BaseBlockHeader) bool +} + +// MutableBlockHeader represents a block header that can be mutated, but only +// the fields that are relevant to mining (Nonce and TimeInMilliseconds). +type MutableBlockHeader interface { + BaseBlockHeader + ToImmutable() BlockHeader + SetNonce(nonce uint64) + SetTimeInMilliseconds(timeInMilliseconds int64) + SetHashMerkleRoot(hashMerkleRoot *DomainHash) +} diff --git a/consensus/model/externalapi/block_equal_clone_test.go b/consensus/model/externalapi/block_equal_clone_test.go new file mode 100644 index 00000000..8138cee9 --- /dev/null +++ b/consensus/model/externalapi/block_equal_clone_test.go @@ -0,0 +1,499 @@ +package externalapi_test + +import ( + "math/big" + "reflect" + "testing" + + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/utils/blockheader" +) + +type blockToCompare struct { + block *externalapi.DomainBlock + expectedResult bool +} + +type TestBlockStruct struct { + baseBlock *externalapi.DomainBlock + blocksToCompareTo []blockToCompare +} + +func initTestBaseTransactions() []*externalapi.DomainTransaction { + + testTx := []*externalapi.DomainTransaction{{ + Version: 1, + Inputs: []*externalapi.DomainTransactionInput{}, + Outputs: []*externalapi.DomainTransactionOutput{}, + LockTime: 1, + SubnetworkID: externalapi.DomainSubnetworkID{0x01}, + Gas: 1, + Payload: []byte{0x01}, + Fee: 0, + Mass: 1, + ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }} + return testTx +} + +func initTestAnotherTransactions() []*externalapi.DomainTransaction { + + testTx := []*externalapi.DomainTransaction{{ + Version: 1, + Inputs: []*externalapi.DomainTransactionInput{}, + Outputs: []*externalapi.DomainTransactionOutput{}, + LockTime: 1, + SubnetworkID: externalapi.DomainSubnetworkID{0x01}, + Gas: 1, + Payload: []byte{0x02}, + Fee: 0, + Mass: 1, + ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), + }} + return testTx +} + +func initTestTwoTransactions() []*externalapi.DomainTransaction { + + testTx := []*externalapi.DomainTransaction{{ + Version: 1, + Inputs: []*externalapi.DomainTransactionInput{}, + Outputs: []*externalapi.DomainTransactionOutput{}, + LockTime: 1, + SubnetworkID: externalapi.DomainSubnetworkID{0x01}, + Gas: 1, + Payload: []byte{0x01}, + Fee: 0, + Mass: 1, + ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), + }, { + Version: 1, + Inputs: []*externalapi.DomainTransactionInput{}, + Outputs: []*externalapi.DomainTransactionOutput{}, + LockTime: 1, + SubnetworkID: externalapi.DomainSubnetworkID{0x01}, + Gas: 1, + Payload: []byte{0x01}, + Fee: 0, + Mass: 1, + ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), + }} + return testTx +} + +func initTestBlockStructsForClone() []*externalapi.DomainBlock { + tests := []*externalapi.DomainBlock{ + { + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{0})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + 4, + 5, + 6, + 7, + 8, + big.NewInt(9), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{10}), + ), + initTestBaseTransactions(), + }, { + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + 4, + 5, + 6, + 7, + 8, + big.NewInt(9), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{10}), + ), + initTestBaseTransactions(), + }, + } + + return tests +} + +func initTestBlockStructsForEqual() *[]TestBlockStruct { + tests := []TestBlockStruct{ + { + baseBlock: nil, + blocksToCompareTo: []blockToCompare{ + { + block: nil, + expectedResult: true, + }, + { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{0})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + 4, + 5, + 6, + 7, + 8, + big.NewInt(9), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{10}), + ), + initTestBaseTransactions()}, + expectedResult: false, + }, + }, + }, { + baseBlock: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + blocksToCompareTo: []blockToCompare{ + { + block: nil, + expectedResult: false, + }, + { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestAnotherTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: true, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{ + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + }}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100})}}, // Changed + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestTwoTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 100, // Changed + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 100, // Changed + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 100, // Changed + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 100, // Changed + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 100, // Changed + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(100), // Changed + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, { + block: &externalapi.DomainBlock{ + blockheader.NewImmutableBlockHeader( + 0, + []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), + 5, + 6, + 7, + 8, + 9, + big.NewInt(10), + externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed + ), + initTestBaseTransactions(), + }, + expectedResult: false, + }, + }, + }, + } + + return &tests +} + +func TestDomainBlock_Equal(t *testing.T) { + + blockTests := initTestBlockStructsForEqual() + for i, test := range *blockTests { + for j, subTest := range test.blocksToCompareTo { + result1 := test.baseBlock.Equal(subTest.block) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.block.Equal(test.baseBlock) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } + +} + +func TestDomainBlock_Clone(t *testing.T) { + + blocks := initTestBlockStructsForClone() + for i, block := range blocks { + blockClone := block.Clone() + if !blockClone.Equal(block) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(block, blockClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/block_with_trusted_data.go b/consensus/model/externalapi/block_with_trusted_data.go new file mode 100644 index 00000000..48ab0fb9 --- /dev/null +++ b/consensus/model/externalapi/block_with_trusted_data.go @@ -0,0 +1,23 @@ +package externalapi + +// BlockWithTrustedData is a block with pre-filled data +// that is not validated by the consensus. +// This is used when bring the pruning point and its +// anticone on a pruned-headers node. +type BlockWithTrustedData struct { + Block *DomainBlock + DAAWindow []*TrustedDataDataDAAHeader + GHOSTDAGData []*BlockGHOSTDAGDataHashPair +} + +// TrustedDataDataDAAHeader is a block that belongs to BlockWithTrustedData.DAAWindow +type TrustedDataDataDAAHeader struct { + Header BlockHeader + GHOSTDAGData *BlockGHOSTDAGData +} + +// BlockGHOSTDAGDataHashPair is a pair of a block hash and its ghostdag data +type BlockGHOSTDAGDataHashPair struct { + Hash *DomainHash + GHOSTDAGData *BlockGHOSTDAGData +} diff --git a/consensus/model/externalapi/blockinfo.go b/consensus/model/externalapi/blockinfo.go new file mode 100644 index 00000000..43c914f9 --- /dev/null +++ b/consensus/model/externalapi/blockinfo.go @@ -0,0 +1,37 @@ +package externalapi + +import "math/big" + +// BlockInfo contains various information about a specific block +type BlockInfo struct { + Exists bool + BlockStatus BlockStatus + BlueScore uint64 + BlueWork *big.Int + SelectedParent *DomainHash + MergeSetBlues []*DomainHash + MergeSetReds []*DomainHash +} + +// HasHeader returns whether the block exists and has a valid header +func (bi *BlockInfo) HasHeader() bool { + return bi.Exists && bi.BlockStatus != StatusInvalid +} + +// HasBody returns whether the block exists and has a valid body +func (bi *BlockInfo) HasBody() bool { + return bi.Exists && bi.BlockStatus != StatusInvalid && bi.BlockStatus != StatusHeaderOnly +} + +// Clone returns a clone of BlockInfo +func (bi *BlockInfo) Clone() *BlockInfo { + return &BlockInfo{ + Exists: bi.Exists, + BlockStatus: bi.BlockStatus.Clone(), + BlueScore: bi.BlueScore, + BlueWork: new(big.Int).Set(bi.BlueWork), + SelectedParent: bi.SelectedParent, + MergeSetBlues: CloneHashes(bi.MergeSetBlues), + MergeSetReds: CloneHashes(bi.MergeSetReds), + } +} diff --git a/consensus/model/externalapi/blockinfo_clone_test.go b/consensus/model/externalapi/blockinfo_clone_test.go new file mode 100644 index 00000000..003d25ee --- /dev/null +++ b/consensus/model/externalapi/blockinfo_clone_test.go @@ -0,0 +1,108 @@ +package externalapi + +import ( + "math/big" + "reflect" + "testing" +) + +func initTestBlockInfoStructsForClone() []*BlockInfo { + + tests := []*BlockInfo{ + { + true, + BlockStatus(0x01), + 0, + big.NewInt(0), + nil, + []*DomainHash{}, + []*DomainHash{}, + }, { + true, + BlockStatus(0x02), + 0, + big.NewInt(0), + nil, + []*DomainHash{}, + []*DomainHash{}, + }, { + true, + 1, + 1, + big.NewInt(0), + nil, + []*DomainHash{}, + []*DomainHash{}, + }, { + true, + 255, + 2, + big.NewInt(0), + nil, + []*DomainHash{}, + []*DomainHash{}, + }, { + true, + 0, + 3, + big.NewInt(0), + nil, + []*DomainHash{}, + []*DomainHash{}, + }, { + true, + BlockStatus(0x01), + 0, + big.NewInt(1), + nil, + []*DomainHash{}, + []*DomainHash{}, + }, { + false, + BlockStatus(0x01), + 0, + big.NewInt(1), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), + []*DomainHash{ + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + }, + []*DomainHash{ + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05}), + }, + }, + } + return tests +} + +func TestBlockInfo_Clone(t *testing.T) { + + blockInfos := initTestBlockInfoStructsForClone() + for i, blockInfo := range blockInfos { + blockInfoClone := blockInfo.Clone() + if !reflect.DeepEqual(blockInfo, blockInfoClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/blocklevelparents.go b/consensus/model/externalapi/blocklevelparents.go new file mode 100644 index 00000000..a4768e49 --- /dev/null +++ b/consensus/model/externalapi/blocklevelparents.go @@ -0,0 +1,63 @@ +package externalapi + +// BlockLevelParents represent the parents within a single super-block level +// See https://github.com/kaspanet/research/issues/3 for further details +type BlockLevelParents []*DomainHash + +// Equal returns true if this BlockLevelParents is equal to `other` +func (sl BlockLevelParents) Equal(other BlockLevelParents) bool { + if len(sl) != len(other) { + return false + } + for _, thisHash := range sl { + found := false + for _, otherHash := range other { + if thisHash.Equal(otherHash) { + found = true + break + } + } + if !found { + return false + } + } + return true +} + +// Clone creates a clone of this BlockLevelParents +func (sl BlockLevelParents) Clone() BlockLevelParents { + return CloneHashes(sl) +} + +// Contains returns true if this BlockLevelParents contains the given blockHash +func (sl BlockLevelParents) Contains(blockHash *DomainHash) bool { + for _, blockLevelParent := range sl { + if blockLevelParent.Equal(blockHash) { + return true + } + } + return false +} + +// ParentsEqual returns true if all the BlockLevelParents in `a` and `b` are +// equal pairwise +func ParentsEqual(a, b []BlockLevelParents) bool { + if len(a) != len(b) { + return false + } + for i, blockLevelParents := range a { + if !blockLevelParents.Equal(b[i]) { + return false + } + } + return true +} + +// CloneParents creates a clone of the given BlockLevelParents slice +func CloneParents(parents []BlockLevelParents) []BlockLevelParents { + clone := make([]BlockLevelParents, len(parents)) + for i, blockLevelParents := range parents { + clone[i] = blockLevelParents.Clone() + } + return clone +} diff --git a/consensus/model/externalapi/blocklocator.go b/consensus/model/externalapi/blocklocator.go new file mode 100644 index 00000000..b6dd1ca2 --- /dev/null +++ b/consensus/model/externalapi/blocklocator.go @@ -0,0 +1,24 @@ +package externalapi + +// BlockLocator is used to help locate a specific block. The algorithm for +// building the block locator is to add block hashes in reverse order on the +// block's selected parent chain until the desired stop block is reached. +// In order to keep the list of locator hashes to a reasonable number of entries, +// the step between each entry is doubled each loop iteration to exponentially +// decrease the number of hashes as a function of the distance from the block +// being located. +// +// For example, assume a selected parent chain with IDs as depicted below, and the +// stop block is genesis: +// +// genesis -> 1 -> 2 -> ... -> 15 -> 16 -> 17 -> 18 +// +// The block locator for block 17 would be the hashes of blocks: +// +// [17 16 14 11 7 2 genesis] +type BlockLocator []*DomainHash + +// Clone returns a clone of BlockLocator +func (locator BlockLocator) Clone() BlockLocator { + return CloneHashes(locator) +} diff --git a/consensus/model/externalapi/blocklocator_clone_test.go b/consensus/model/externalapi/blocklocator_clone_test.go new file mode 100644 index 00000000..28e90768 --- /dev/null +++ b/consensus/model/externalapi/blocklocator_clone_test.go @@ -0,0 +1,76 @@ +package externalapi + +import ( + "reflect" + "testing" +) + +func initTestBlockLocatorForClone() []*BlockLocator { + + tests := []*BlockLocator{{ + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), + }, { + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 2}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 1}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 1, 1}), + NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 2, 1}), + }, + } + return tests +} + +func TestBlockLocator_Clone(t *testing.T) { + + testBlockLocator := initTestBlockLocatorForClone() + for i, blockLocator := range testBlockLocator { + blockLocatorClone := blockLocator.Clone() + if !reflect.DeepEqual(blockLocator, &blockLocatorClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/blockstatus.go b/consensus/model/externalapi/blockstatus.go new file mode 100644 index 00000000..7358bafd --- /dev/null +++ b/consensus/model/externalapi/blockstatus.go @@ -0,0 +1,49 @@ +package externalapi + +// BlockStatus represents the validation state of the block. +type BlockStatus byte + +// Clone returns a clone of BlockStatus +func (bs BlockStatus) Clone() BlockStatus { + return bs +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ BlockStatus = 0 + +// Equal returns whether bs equals to other +func (bs BlockStatus) Equal(other BlockStatus) bool { + return bs == other +} + +const ( + // StatusInvalid indicates that the block is invalid. + StatusInvalid BlockStatus = iota + + // StatusUTXOValid indicates the block is valid from any UTXO related aspects and has passed all the other validations as well. + StatusUTXOValid + + // StatusUTXOPendingVerification indicates that the block is pending verification against its past UTXO-Set, either + // because it was not yet verified since the block was never in the selected parent chain, or if the + // block violates finality. + StatusUTXOPendingVerification + + // StatusDisqualifiedFromChain indicates that the block is not eligible to be a selected parent. + StatusDisqualifiedFromChain + + // StatusHeaderOnly indicates that the block transactions are not held (pruned or wasn't added yet) + StatusHeaderOnly +) + +var blockStatusStrings = map[BlockStatus]string{ + StatusInvalid: "Invalid", + StatusUTXOValid: "Valid", + StatusUTXOPendingVerification: "UTXOPendingVerification", + StatusDisqualifiedFromChain: "DisqualifiedFromChain", + StatusHeaderOnly: "HeaderOnly", +} + +func (bs BlockStatus) String() string { + return blockStatusStrings[bs] +} diff --git a/consensus/model/externalapi/blockstatus_equal_clone_test.go b/consensus/model/externalapi/blockstatus_equal_clone_test.go new file mode 100644 index 00000000..7737296c --- /dev/null +++ b/consensus/model/externalapi/blockstatus_equal_clone_test.go @@ -0,0 +1,87 @@ +package externalapi + +import ( + "reflect" + "testing" +) + +func initTestBlockStatusForClone() []BlockStatus { + + tests := []BlockStatus{1, 2, 0xFF, 0} + + return tests +} + +type TestBlockStatusToCompare struct { + blockStatus BlockStatus + expectedResult bool +} + +type TestBlockStatusStruct struct { + baseBlockStatus BlockStatus + blockStatusesToCompareTo []TestBlockStatusToCompare +} + +func initTestBlockStatusForEqual() []TestBlockStatusStruct { + tests := []TestBlockStatusStruct{ + { + baseBlockStatus: 0, + blockStatusesToCompareTo: []TestBlockStatusToCompare{ + { + blockStatus: 1, + expectedResult: false, + }, + { + blockStatus: 0, + expectedResult: true, + }, + }, + }, { + baseBlockStatus: 255, + blockStatusesToCompareTo: []TestBlockStatusToCompare{ + { + blockStatus: 1, + expectedResult: false, + }, + { + blockStatus: 255, + expectedResult: true, + }, + }, + }, + } + return tests +} + +func TestBlockStatus_Equal(t *testing.T) { + + testBlockStatus := initTestBlockStatusForEqual() + + for i, test := range testBlockStatus { + for j, subTest := range test.blockStatusesToCompareTo { + result1 := test.baseBlockStatus.Equal(subTest.blockStatus) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + + result2 := subTest.blockStatus.Equal(test.baseBlockStatus) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} + +func TestBlockStatus_Clone(t *testing.T) { + + testBlockStatus := initTestBlockStatusForClone() + for i, blockStatus := range testBlockStatus { + blockStatusClone := blockStatus.Clone() + if !blockStatusClone.Equal(blockStatus) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(blockStatus, blockStatusClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/blocktemplate.go b/consensus/model/externalapi/blocktemplate.go new file mode 100644 index 00000000..5456ef2d --- /dev/null +++ b/consensus/model/externalapi/blocktemplate.go @@ -0,0 +1,19 @@ +package externalapi + +// DomainBlockTemplate contains a Block plus metadata related to its generation +type DomainBlockTemplate struct { + Block *DomainBlock + CoinbaseData *DomainCoinbaseData + CoinbaseHasRedReward bool + IsNearlySynced bool +} + +// Clone returns a clone of DomainBlockTemplate +func (bt *DomainBlockTemplate) Clone() *DomainBlockTemplate { + return &DomainBlockTemplate{ + Block: bt.Block.Clone(), + CoinbaseData: bt.CoinbaseData.Clone(), + CoinbaseHasRedReward: bt.CoinbaseHasRedReward, + IsNearlySynced: bt.IsNearlySynced, + } +} diff --git a/consensus/model/externalapi/coinbase.go b/consensus/model/externalapi/coinbase.go new file mode 100644 index 00000000..39dbfb98 --- /dev/null +++ b/consensus/model/externalapi/coinbase.go @@ -0,0 +1,38 @@ +package externalapi + +import "bytes" + +// DomainCoinbaseData contains data by which a coinbase transaction +// is built +type DomainCoinbaseData struct { + ScriptPublicKey *ScriptPublicKey + ExtraData []byte +} + +// Clone returns a clone of DomainCoinbaseData +func (dcd *DomainCoinbaseData) Clone() *DomainCoinbaseData { + + scriptPubKeyClone := make([]byte, len(dcd.ScriptPublicKey.Script)) + copy(scriptPubKeyClone, dcd.ScriptPublicKey.Script) + + extraDataClone := make([]byte, len(dcd.ExtraData)) + copy(extraDataClone, dcd.ExtraData) + + return &DomainCoinbaseData{ + ScriptPublicKey: &ScriptPublicKey{Script: scriptPubKeyClone, Version: dcd.ScriptPublicKey.Version}, + ExtraData: extraDataClone, + } +} + +// Equal returns whether dcd equals to other +func (dcd *DomainCoinbaseData) Equal(other *DomainCoinbaseData) bool { + if dcd == nil || other == nil { + return dcd == other + } + + if !bytes.Equal(dcd.ExtraData, other.ExtraData) { + return false + } + + return dcd.ScriptPublicKey.Equal(other.ScriptPublicKey) +} diff --git a/consensus/model/externalapi/coinbase_clone_test.go b/consensus/model/externalapi/coinbase_clone_test.go new file mode 100644 index 00000000..df465d2d --- /dev/null +++ b/consensus/model/externalapi/coinbase_clone_test.go @@ -0,0 +1,59 @@ +package externalapi + +import ( + "reflect" + "testing" +) + +func initTestCoinbaseDataStructsForClone() []*DomainCoinbaseData { + + tests := []*DomainCoinbaseData{ + { + &ScriptPublicKey{Script: []byte{1, 2, 3, 4, 5, 6}, Version: 0}, + []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, + }, { + &ScriptPublicKey{Script: []byte{0, 0, 0, 0, 55}, Version: 0}, + []byte{0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF}, + }, + } + return tests +} + +func TestDomainCoinbaseData_Clone(t *testing.T) { + + coinbaseData := initTestCoinbaseDataStructsForClone() + for i, coinbase := range coinbaseData { + coinbaseClone := coinbase.Clone() + if !reflect.DeepEqual(coinbase, coinbaseClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/consensus.go b/consensus/model/externalapi/consensus.go new file mode 100644 index 00000000..da065adb --- /dev/null +++ b/consensus/model/externalapi/consensus.go @@ -0,0 +1,59 @@ +package externalapi + +// Consensus maintains the current core state of the node +type Consensus interface { + Init(skipAddingGenesis bool) error + BuildBlock(coinbaseData *DomainCoinbaseData, transactions []*DomainTransaction) (*DomainBlock, error) + BuildBlockTemplate(coinbaseData *DomainCoinbaseData, transactions []*DomainTransaction) (*DomainBlockTemplate, error) + ValidateAndInsertBlock(block *DomainBlock, updateVirtual bool) error + ValidateAndInsertBlockWithTrustedData(block *BlockWithTrustedData, validateUTXO bool) error + ValidateTransactionAndPopulateWithConsensusData(transaction *DomainTransaction) error + ImportPruningPoints(pruningPoints []BlockHeader) error + BuildPruningPointProof() (*PruningPointProof, error) + ValidatePruningPointProof(pruningPointProof *PruningPointProof) error + ApplyPruningPointProof(pruningPointProof *PruningPointProof) error + + GetBlock(blockHash *DomainHash) (*DomainBlock, bool, error) + GetBlockEvenIfHeaderOnly(blockHash *DomainHash) (*DomainBlock, error) + GetBlockHeader(blockHash *DomainHash) (BlockHeader, error) + GetBlockInfo(blockHash *DomainHash) (*BlockInfo, error) + GetBlockRelations(blockHash *DomainHash) (parents []*DomainHash, children []*DomainHash, err error) + GetBlockAcceptanceData(blockHash *DomainHash) (AcceptanceData, error) + GetBlocksAcceptanceData(blockHashes []*DomainHash) ([]AcceptanceData, error) + + GetHashesBetween(lowHash, highHash *DomainHash, maxBlocks uint64) (hashes []*DomainHash, actualHighHash *DomainHash, err error) + GetAnticone(blockHash, contextHash *DomainHash, maxBlocks uint64) (hashes []*DomainHash, err error) + GetMissingBlockBodyHashes(highHash *DomainHash) ([]*DomainHash, error) + GetPruningPointUTXOs(expectedPruningPointHash *DomainHash, fromOutpoint *DomainOutpoint, limit int) ([]*OutpointAndUTXOEntryPair, error) + GetVirtualUTXOs(expectedVirtualParents []*DomainHash, fromOutpoint *DomainOutpoint, limit int) ([]*OutpointAndUTXOEntryPair, error) + PruningPoint() (*DomainHash, error) + PruningPointHeaders() ([]BlockHeader, error) + PruningPointAndItsAnticone() ([]*DomainHash, error) + ClearImportedPruningPointData() error + AppendImportedPruningPointUTXOs(outpointAndUTXOEntryPairs []*OutpointAndUTXOEntryPair) error + ValidateAndInsertImportedPruningPoint(newPruningPoint *DomainHash) error + GetVirtualSelectedParent() (*DomainHash, error) + CreateBlockLocatorFromPruningPoint(highHash *DomainHash, limit uint32) (BlockLocator, error) + CreateHeadersSelectedChainBlockLocator(lowHash, highHash *DomainHash) (BlockLocator, error) + CreateFullHeadersSelectedChainBlockLocator() (BlockLocator, error) + GetSyncInfo() (*SyncInfo, error) + Tips() ([]*DomainHash, error) + GetVirtualInfo() (*VirtualInfo, error) + GetVirtualDAAScore() (uint64, error) + IsValidPruningPoint(blockHash *DomainHash) (bool, error) + ArePruningPointsViolatingFinality(pruningPoints []BlockHeader) (bool, error) + GetVirtualSelectedParentChainFromBlock(blockHash *DomainHash) (*SelectedChainPath, error) + IsInSelectedParentChainOf(blockHashA *DomainHash, blockHashB *DomainHash) (bool, error) + GetHeadersSelectedTip() (*DomainHash, error) + Anticone(blockHash *DomainHash) ([]*DomainHash, error) + EstimateNetworkHashesPerSecond(startHash *DomainHash, windowSize int) (uint64, error) + PopulateMass(transaction *DomainTransaction) + ResolveVirtual(progressReportCallback func(uint64, uint64)) error + BlockDAAWindowHashes(blockHash *DomainHash) ([]*DomainHash, error) + TrustedDataDataDAAHeader(trustedBlockHash, daaBlockHash *DomainHash, daaBlockWindowIndex uint64) (*TrustedDataDataDAAHeader, error) + TrustedBlockAssociatedGHOSTDAGDataBlockHashes(blockHash *DomainHash) ([]*DomainHash, error) + TrustedGHOSTDAGData(blockHash *DomainHash) (*BlockGHOSTDAGData, error) + IsChainBlock(blockHash *DomainHash) (bool, error) + VirtualMergeDepthRoot() (*DomainHash, error) + IsNearlySynced() (bool, error) +} diff --git a/consensus/model/externalapi/consensus_events.go b/consensus/model/externalapi/consensus_events.go new file mode 100644 index 00000000..54c01584 --- /dev/null +++ b/consensus/model/externalapi/consensus_events.go @@ -0,0 +1,30 @@ +package externalapi + +// ConsensusEvent is an interface type that is implemented by all events raised by consensus +type ConsensusEvent interface { + isConsensusEvent() +} + +// BlockAdded is an event raised by consensus when a block was added to the dag +type BlockAdded struct { + Block *DomainBlock +} + +func (*BlockAdded) isConsensusEvent() {} + +// VirtualChangeSet is an event raised by consensus when virtual changes +type VirtualChangeSet struct { + VirtualSelectedParentChainChanges *SelectedChainPath + VirtualUTXODiff UTXODiff + VirtualParents []*DomainHash + VirtualSelectedParentBlueScore uint64 + VirtualDAAScore uint64 +} + +func (*VirtualChangeSet) isConsensusEvent() {} + +// SelectedChainPath is a path the of the selected chains between two blocks. +type SelectedChainPath struct { + Added []*DomainHash + Removed []*DomainHash +} diff --git a/consensus/model/externalapi/ghostdag.go b/consensus/model/externalapi/ghostdag.go new file mode 100644 index 00000000..d92918db --- /dev/null +++ b/consensus/model/externalapi/ghostdag.go @@ -0,0 +1,67 @@ +package externalapi + +import ( + "math/big" +) + +// KType defines the size of GHOSTDAG consensus algorithm K parameter. +type KType byte + +// BlockGHOSTDAGData represents GHOSTDAG data for some block +type BlockGHOSTDAGData struct { + blueScore uint64 + blueWork *big.Int + selectedParent *DomainHash + mergeSetBlues []*DomainHash + mergeSetReds []*DomainHash + bluesAnticoneSizes map[DomainHash]KType +} + +// NewBlockGHOSTDAGData creates a new instance of BlockGHOSTDAGData +func NewBlockGHOSTDAGData( + blueScore uint64, + blueWork *big.Int, + selectedParent *DomainHash, + mergeSetBlues []*DomainHash, + mergeSetReds []*DomainHash, + bluesAnticoneSizes map[DomainHash]KType) *BlockGHOSTDAGData { + + return &BlockGHOSTDAGData{ + blueScore: blueScore, + blueWork: blueWork, + selectedParent: selectedParent, + mergeSetBlues: mergeSetBlues, + mergeSetReds: mergeSetReds, + bluesAnticoneSizes: bluesAnticoneSizes, + } +} + +// BlueScore returns the BlueScore of the block +func (bgd *BlockGHOSTDAGData) BlueScore() uint64 { + return bgd.blueScore +} + +// BlueWork returns the BlueWork of the block +func (bgd *BlockGHOSTDAGData) BlueWork() *big.Int { + return bgd.blueWork +} + +// SelectedParent returns the SelectedParent of the block +func (bgd *BlockGHOSTDAGData) SelectedParent() *DomainHash { + return bgd.selectedParent +} + +// MergeSetBlues returns the MergeSetBlues of the block (not a copy) +func (bgd *BlockGHOSTDAGData) MergeSetBlues() []*DomainHash { + return bgd.mergeSetBlues +} + +// MergeSetReds returns the MergeSetReds of the block (not a copy) +func (bgd *BlockGHOSTDAGData) MergeSetReds() []*DomainHash { + return bgd.mergeSetReds +} + +// BluesAnticoneSizes returns a map between the blocks in its MergeSetBlues and the size of their anticone +func (bgd *BlockGHOSTDAGData) BluesAnticoneSizes() map[DomainHash]KType { + return bgd.bluesAnticoneSizes +} diff --git a/consensus/model/externalapi/hash.go b/consensus/model/externalapi/hash.go new file mode 100644 index 00000000..1ea8a900 --- /dev/null +++ b/consensus/model/externalapi/hash.go @@ -0,0 +1,123 @@ +package externalapi + +import ( + "bytes" + "encoding/hex" + + "github.com/pkg/errors" +) + +// DomainHashSize of array used to store hashes. +const DomainHashSize = 32 + +// DomainHash is the domain representation of a Hash +type DomainHash struct { + hashArray [DomainHashSize]byte +} + +// NewZeroHash returns a DomainHash that represents the zero value (0x000000...000) +func NewZeroHash() *DomainHash { + return &DomainHash{hashArray: [32]byte{}} +} + +// NewDomainHashFromByteArray constructs a new DomainHash out of a byte array +func NewDomainHashFromByteArray(hashBytes *[DomainHashSize]byte) *DomainHash { + return &DomainHash{ + hashArray: *hashBytes, + } +} + +// NewDomainHashFromByteSlice constructs a new DomainHash out of a byte slice. +// Returns an error if the length of the byte slice is not exactly `DomainHashSize` +func NewDomainHashFromByteSlice(hashBytes []byte) (*DomainHash, error) { + if len(hashBytes) != DomainHashSize { + return nil, errors.Errorf("invalid hash size. Want: %d, got: %d", + DomainHashSize, len(hashBytes)) + } + domainHash := DomainHash{ + hashArray: [DomainHashSize]byte{}, + } + copy(domainHash.hashArray[:], hashBytes) + return &domainHash, nil +} + +// NewDomainHashFromString constructs a new DomainHash out of a hex-encoded string. +// Returns an error if the length of the string is not exactly `DomainHashSize * 2` +func NewDomainHashFromString(hashString string) (*DomainHash, error) { + expectedLength := DomainHashSize * 2 + // Return error if hash string is too long. + if len(hashString) != expectedLength { + return nil, errors.Errorf("hash string length is %d, while it should be be %d", + len(hashString), expectedLength) + } + + hashBytes, err := hex.DecodeString(hashString) + if err != nil { + return nil, errors.WithStack(err) + } + + return NewDomainHashFromByteSlice(hashBytes) +} + +// String returns the Hash as the hexadecimal string of the hash. +func (hash DomainHash) String() string { + return hex.EncodeToString(hash.hashArray[:]) +} + +// ByteArray returns the bytes in this hash represented as a byte array. +// The hash bytes are cloned, therefore it is safe to modify the resulting array. +func (hash *DomainHash) ByteArray() *[DomainHashSize]byte { + arrayClone := hash.hashArray + return &arrayClone +} + +// ByteSlice returns the bytes in this hash represented as a byte slice. +// The hash bytes are cloned, therefore it is safe to modify the resulting slice. +func (hash *DomainHash) ByteSlice() []byte { + return hash.ByteArray()[:] +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ DomainHash = DomainHash{hashArray: [DomainHashSize]byte{}} + +// Equal returns whether hash equals to other +func (hash *DomainHash) Equal(other *DomainHash) bool { + if hash == nil || other == nil { + return hash == other + } + + return hash.hashArray == other.hashArray +} + +// Less returns true if hash is less than other +func (hash *DomainHash) Less(other *DomainHash) bool { + return bytes.Compare(hash.hashArray[:], other.hashArray[:]) < 0 +} + +// LessOrEqual returns true if hash is smaller or equal to other +func (hash *DomainHash) LessOrEqual(other *DomainHash) bool { + return bytes.Compare(hash.hashArray[:], other.hashArray[:]) <= 0 +} + +// CloneHashes returns a clone of the given hashes slice. +// Note: since DomainHash is a read-only type, the clone is shallow +func CloneHashes(hashes []*DomainHash) []*DomainHash { + clone := make([]*DomainHash, len(hashes)) + copy(clone, hashes) + return clone +} + +// HashesEqual returns whether the given hash slices are equal. +func HashesEqual(a, b []*DomainHash) bool { + if len(a) != len(b) { + return false + } + + for i, hash := range a { + if !hash.Equal(b[i]) { + return false + } + } + return true +} diff --git a/consensus/model/externalapi/hash_clone_equal_test.go b/consensus/model/externalapi/hash_clone_equal_test.go new file mode 100644 index 00000000..7e47b035 --- /dev/null +++ b/consensus/model/externalapi/hash_clone_equal_test.go @@ -0,0 +1,79 @@ +package externalapi + +import ( + "testing" +) + +type testHashToCompare struct { + hash *DomainHash + expectedResult bool +} + +type testHashStruct struct { + baseHash *DomainHash + hashesToCompareTo []testHashToCompare +} + +func initTestDomainHashForEqual() []*testHashStruct { + tests := []*testHashStruct{ + { + baseHash: nil, + hashesToCompareTo: []testHashToCompare{ + { + hash: nil, + expectedResult: true, + }, { + hash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}), + expectedResult: false, + }, + }, + }, { + baseHash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), + hashesToCompareTo: []testHashToCompare{ + { + hash: nil, + expectedResult: false, + }, { + hash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}), + expectedResult: false, + }, { + hash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), + expectedResult: true, + }, + }, + }, + } + return tests +} + +func TestDomainHash_Equal(t *testing.T) { + hashTests := initTestDomainHashForEqual() + for i, test := range hashTests { + for j, subTest := range test.hashesToCompareTo { + result1 := test.baseHash.Equal(subTest.hash) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.hash.Equal(test.baseHash) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} diff --git a/consensus/model/externalapi/pruning_point_proof.go b/consensus/model/externalapi/pruning_point_proof.go new file mode 100644 index 00000000..bee119c4 --- /dev/null +++ b/consensus/model/externalapi/pruning_point_proof.go @@ -0,0 +1,6 @@ +package externalapi + +// PruningPointProof is the data structure holding the pruning point proof +type PruningPointProof struct { + Headers [][]BlockHeader +} diff --git a/consensus/model/externalapi/readonlyutxoset.go b/consensus/model/externalapi/readonlyutxoset.go new file mode 100644 index 00000000..61b1827e --- /dev/null +++ b/consensus/model/externalapi/readonlyutxoset.go @@ -0,0 +1,10 @@ +package externalapi + +// ReadOnlyUTXOSetIterator is an iterator over all entries in a +// ReadOnlyUTXOSet +type ReadOnlyUTXOSetIterator interface { + First() bool + Next() bool + Get() (outpoint *DomainOutpoint, utxoEntry UTXOEntry, err error) + Close() error +} diff --git a/consensus/model/externalapi/subnetworkid.go b/consensus/model/externalapi/subnetworkid.go new file mode 100644 index 00000000..06069c63 --- /dev/null +++ b/consensus/model/externalapi/subnetworkid.go @@ -0,0 +1,33 @@ +package externalapi + +import "encoding/hex" + +// DomainSubnetworkIDSize is the size of the array used to store subnetwork IDs. +const DomainSubnetworkIDSize = 20 + +// DomainSubnetworkID is the domain representation of a Subnetwork ID +type DomainSubnetworkID [DomainSubnetworkIDSize]byte + +// String stringifies a subnetwork ID. +func (id DomainSubnetworkID) String() string { + return hex.EncodeToString(id[:]) +} + +// Clone returns a clone of DomainSubnetworkID +func (id *DomainSubnetworkID) Clone() *DomainSubnetworkID { + idClone := *id + return &idClone +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ DomainSubnetworkID = [DomainSubnetworkIDSize]byte{} + +// Equal returns whether id equals to other +func (id *DomainSubnetworkID) Equal(other *DomainSubnetworkID) bool { + if id == nil || other == nil { + return id == other + } + + return *id == *other +} diff --git a/consensus/model/externalapi/subnetworkid_clone_equal_test.go b/consensus/model/externalapi/subnetworkid_clone_equal_test.go new file mode 100644 index 00000000..dc6d7c5f --- /dev/null +++ b/consensus/model/externalapi/subnetworkid_clone_equal_test.go @@ -0,0 +1,99 @@ +package externalapi + +import ( + "reflect" + "testing" +) + +func initTestDomainSubnetworkIDForClone() []*DomainSubnetworkID { + + tests := []*DomainSubnetworkID{{1, 0, 0xFF, 0}, {0, 1, 0xFF, 1}, + {0, 1, 0xFF, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}} + return tests +} + +type testDomainSubnetworkIDToCompare struct { + domainSubnetworkID *DomainSubnetworkID + expectedResult bool +} + +type testDomainSubnetworkIDStruct struct { + baseDomainSubnetworkID *DomainSubnetworkID + domainSubnetworkIDToCompareTo []testDomainSubnetworkIDToCompare +} + +func initTestDomainSubnetworkIDForEqual() []testDomainSubnetworkIDStruct { + tests := []testDomainSubnetworkIDStruct{ + { + baseDomainSubnetworkID: nil, + domainSubnetworkIDToCompareTo: []testDomainSubnetworkIDToCompare{ + { + domainSubnetworkID: &DomainSubnetworkID{255, 255, 0xFF, 0}, + expectedResult: false, + }, + { + domainSubnetworkID: nil, + expectedResult: true, + }, + }, + }, { + baseDomainSubnetworkID: &DomainSubnetworkID{0}, + domainSubnetworkIDToCompareTo: []testDomainSubnetworkIDToCompare{ + { + domainSubnetworkID: &DomainSubnetworkID{255, 254, 0xFF, 0}, + expectedResult: false, + }, + { + domainSubnetworkID: &DomainSubnetworkID{0}, + expectedResult: true, + }, + }, + }, { + baseDomainSubnetworkID: &DomainSubnetworkID{0, 1, 0xFF, 1, 1, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, + domainSubnetworkIDToCompareTo: []testDomainSubnetworkIDToCompare{ + { + domainSubnetworkID: &DomainSubnetworkID{0, 1, 0xFF, 1, 1, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, + expectedResult: true, + }, + { + domainSubnetworkID: &DomainSubnetworkID{0, 10, 0xFF, 0}, + expectedResult: false, + }, + }, + }, + } + return tests +} + +func TestDomainSubnetworkID_Equal(t *testing.T) { + + domainSubnetworkIDs := initTestDomainSubnetworkIDForEqual() + for i, test := range domainSubnetworkIDs { + for j, subTest := range test.domainSubnetworkIDToCompareTo { + result1 := test.baseDomainSubnetworkID.Equal(subTest.domainSubnetworkID) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.domainSubnetworkID.Equal(test.baseDomainSubnetworkID) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} + +func TestDomainSubnetworkID_Clone(t *testing.T) { + + domainSubnetworkIDs := initTestDomainSubnetworkIDForClone() + for i, domainSubnetworkID := range domainSubnetworkIDs { + domainSubnetworkIDClone := domainSubnetworkID.Clone() + if !domainSubnetworkIDClone.Equal(domainSubnetworkID) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(domainSubnetworkID, domainSubnetworkIDClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/sync.go b/consensus/model/externalapi/sync.go new file mode 100644 index 00000000..797401e9 --- /dev/null +++ b/consensus/model/externalapi/sync.go @@ -0,0 +1,36 @@ +package externalapi + +// SyncInfo holds info about the current sync state of the consensus +type SyncInfo struct { + HeaderCount uint64 + BlockCount uint64 +} + +// Clone returns a clone of SyncInfo +func (si *SyncInfo) Clone() *SyncInfo { + return &SyncInfo{ + HeaderCount: si.HeaderCount, + BlockCount: si.BlockCount, + } +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = SyncInfo{0, 0} + +// Equal returns whether si equals to other +func (si *SyncInfo) Equal(other *SyncInfo) bool { + if si == nil || other == nil { + return si == other + } + + if si.HeaderCount != other.HeaderCount { + return false + } + + if si.BlockCount != other.BlockCount { + return false + } + + return true +} diff --git a/consensus/model/externalapi/sync_equal_clone_test.go b/consensus/model/externalapi/sync_equal_clone_test.go new file mode 100644 index 00000000..d4959aca --- /dev/null +++ b/consensus/model/externalapi/sync_equal_clone_test.go @@ -0,0 +1,99 @@ +package externalapi + +import ( + "reflect" + "testing" +) + +func initTestSyncInfoForClone() []*SyncInfo { + + tests := []*SyncInfo{{ + 0xF, + 0xF}} + return tests +} + +type testSyncInfoToCompare struct { + syncInfo *SyncInfo + expectedResult bool +} + +type testSyncInfoStruct struct { + baseSyncInfo *SyncInfo + syncInfoToCompareTo []testSyncInfoToCompare +} + +func initTestSyncInfoForEqual() []*testSyncInfoStruct { + tests := []*testSyncInfoStruct{ + { + baseSyncInfo: nil, + syncInfoToCompareTo: []testSyncInfoToCompare{ + { + syncInfo: &SyncInfo{ + 0xF, + 0xF}, + expectedResult: false, + }, { + syncInfo: nil, + expectedResult: true, + }, + }}, { + baseSyncInfo: &SyncInfo{ + 0xF, + 0xF}, + syncInfoToCompareTo: []testSyncInfoToCompare{ + { + syncInfo: &SyncInfo{ + 0xF, + 0xF}, + expectedResult: true, + }, + { + syncInfo: &SyncInfo{ + 0xF1, + 0xF}, + expectedResult: false, + }, { + syncInfo: nil, + expectedResult: false, + }, { + syncInfo: &SyncInfo{ + 0xF, + 0xF1}, + expectedResult: false}, + }, + }, + } + return tests +} + +func TestSyncInfo_Equal(t *testing.T) { + + testSyncState := initTestSyncInfoForEqual() + for i, test := range testSyncState { + for j, subTest := range test.syncInfoToCompareTo { + result1 := test.baseSyncInfo.Equal(subTest.syncInfo) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.syncInfo.Equal(test.baseSyncInfo) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} + +func TestSyncInfo_Clone(t *testing.T) { + + testSyncInfo := initTestSyncInfoForClone() + for i, syncInfo := range testSyncInfo { + syncStateClone := syncInfo.Clone() + if !syncStateClone.Equal(syncInfo) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(syncInfo, syncStateClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} diff --git a/consensus/model/externalapi/transaction.go b/consensus/model/externalapi/transaction.go new file mode 100644 index 00000000..b12baec4 --- /dev/null +++ b/consensus/model/externalapi/transaction.go @@ -0,0 +1,363 @@ +package externalapi + +import ( + "bytes" + "encoding/binary" + "fmt" + + "github.com/pkg/errors" +) + +// DomainTransaction represents a Kaspa transaction +type DomainTransaction struct { + Version uint16 + Inputs []*DomainTransactionInput + Outputs []*DomainTransactionOutput + LockTime uint64 + SubnetworkID DomainSubnetworkID + Gas uint64 + Payload []byte + + Fee uint64 + Mass uint64 + + // ID is a field that is used to cache the transaction ID. + // Always use consensushashing.TransactionID instead of accessing this field directly + ID *DomainTransactionID +} + +// Clone returns a clone of DomainTransaction +func (tx *DomainTransaction) Clone() *DomainTransaction { + payloadClone := make([]byte, len(tx.Payload)) + copy(payloadClone, tx.Payload) + + inputsClone := make([]*DomainTransactionInput, len(tx.Inputs)) + for i, input := range tx.Inputs { + inputsClone[i] = input.Clone() + } + + outputsClone := make([]*DomainTransactionOutput, len(tx.Outputs)) + for i, output := range tx.Outputs { + outputsClone[i] = output.Clone() + } + + var idClone *DomainTransactionID + if tx.ID != nil { + idClone = tx.ID.Clone() + } + + return &DomainTransaction{ + Version: tx.Version, + Inputs: inputsClone, + Outputs: outputsClone, + LockTime: tx.LockTime, + SubnetworkID: *tx.SubnetworkID.Clone(), + Gas: tx.Gas, + Payload: payloadClone, + Fee: tx.Fee, + Mass: tx.Mass, + ID: idClone, + } +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = DomainTransaction{0, []*DomainTransactionInput{}, []*DomainTransactionOutput{}, 0, + DomainSubnetworkID{}, 0, []byte{}, 0, 0, + &DomainTransactionID{}} + +// Equal returns whether tx equals to other +func (tx *DomainTransaction) Equal(other *DomainTransaction) bool { + if tx == nil || other == nil { + return tx == other + } + + if tx.Version != other.Version { + return false + } + + if len(tx.Inputs) != len(other.Inputs) { + return false + } + + for i, input := range tx.Inputs { + if !input.Equal(other.Inputs[i]) { + return false + } + } + + if len(tx.Outputs) != len(other.Outputs) { + return false + } + + for i, output := range tx.Outputs { + if !output.Equal(other.Outputs[i]) { + return false + } + } + + if tx.LockTime != other.LockTime { + return false + } + + if !tx.SubnetworkID.Equal(&other.SubnetworkID) { + return false + } + + if tx.Gas != other.Gas { + return false + } + + if !bytes.Equal(tx.Payload, other.Payload) { + return false + } + + if tx.Fee != 0 && other.Fee != 0 && tx.Fee != other.Fee { + panic(errors.New("identical transactions should always have the same fee")) + } + + if tx.Mass != 0 && other.Mass != 0 && tx.Mass != other.Mass { + panic(errors.New("identical transactions should always have the same mass")) + } + + if tx.ID != nil && other.ID != nil && !tx.ID.Equal(other.ID) { + panic(errors.New("identical transactions should always have the same ID")) + } + + return true +} + +// DomainTransactionInput represents a Kaspa transaction input +type DomainTransactionInput struct { + PreviousOutpoint DomainOutpoint + SignatureScript []byte + Sequence uint64 + SigOpCount byte + + UTXOEntry UTXOEntry +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = &DomainTransactionInput{DomainOutpoint{}, []byte{}, 0, 0, nil} + +// Equal returns whether input equals to other +func (input *DomainTransactionInput) Equal(other *DomainTransactionInput) bool { + if input == nil || other == nil { + return input == other + } + + if !input.PreviousOutpoint.Equal(&other.PreviousOutpoint) { + return false + } + + if !bytes.Equal(input.SignatureScript, other.SignatureScript) { + return false + } + + if input.Sequence != other.Sequence { + return false + } + + if input.SigOpCount != other.SigOpCount { + return false + } + + if input.UTXOEntry != nil && other.UTXOEntry != nil && !input.UTXOEntry.Equal(other.UTXOEntry) { + panic(errors.New("identical inputs should always have the same UTXO entry")) + } + + return true +} + +// Clone returns a clone of DomainTransactionInput +func (input *DomainTransactionInput) Clone() *DomainTransactionInput { + signatureScriptClone := make([]byte, len(input.SignatureScript)) + copy(signatureScriptClone, input.SignatureScript) + + return &DomainTransactionInput{ + PreviousOutpoint: *input.PreviousOutpoint.Clone(), + SignatureScript: signatureScriptClone, + Sequence: input.Sequence, + SigOpCount: input.SigOpCount, + UTXOEntry: input.UTXOEntry, + } +} + +// DomainOutpoint represents a Kaspa transaction outpoint +type DomainOutpoint struct { + TransactionID DomainTransactionID + Index uint32 +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = DomainOutpoint{DomainTransactionID{}, 0} + +// Equal returns whether op equals to other +func (op *DomainOutpoint) Equal(other *DomainOutpoint) bool { + if op == nil || other == nil { + return op == other + } + + return *op == *other +} + +// Clone returns a clone of DomainOutpoint +func (op *DomainOutpoint) Clone() *DomainOutpoint { + return &DomainOutpoint{ + TransactionID: *op.TransactionID.Clone(), + Index: op.Index, + } +} + +// String stringifies an outpoint. +func (op DomainOutpoint) String() string { + return fmt.Sprintf("(%s: %d)", op.TransactionID, op.Index) +} + +// NewDomainOutpoint instantiates a new DomainOutpoint with the given id and index +func NewDomainOutpoint(id *DomainTransactionID, index uint32) *DomainOutpoint { + return &DomainOutpoint{ + TransactionID: *id, + Index: index, + } +} + +// ScriptPublicKey represents a Kaspad ScriptPublicKey +type ScriptPublicKey struct { + Script []byte + Version uint16 +} + +// Equal returns whether spk equals to other +func (spk *ScriptPublicKey) Equal(other *ScriptPublicKey) bool { + if spk == nil || other == nil { + return spk == other + } + + if spk.Version != other.Version { + return false + } + + return bytes.Equal(spk.Script, other.Script) +} + +// String stringifies a ScriptPublicKey. +func (spk *ScriptPublicKey) String() string { + var versionBytes = make([]byte, 2) // uint16 + binary.LittleEndian.PutUint16(versionBytes, spk.Version) + versionString := string(versionBytes) + scriptString := string(spk.Script) + return versionString + scriptString +} + +// NewScriptPublicKeyFromString converts the given string to a scriptPublicKey +func NewScriptPublicKeyFromString(ScriptPublicKeyString string) *ScriptPublicKey { + bytes := []byte(ScriptPublicKeyString) + version := binary.LittleEndian.Uint16(bytes[:2]) + script := bytes[2:] + return &ScriptPublicKey{Script: script, Version: version} +} + +// DomainTransactionOutput represents a Kaspad transaction output +type DomainTransactionOutput struct { + Value uint64 + ScriptPublicKey *ScriptPublicKey +} + +// If this doesn't compile, it means the type definition has been changed, so it's +// an indication to update Equal and Clone accordingly. +var _ = DomainTransactionOutput{0, &ScriptPublicKey{Script: []byte{}, Version: 0}} + +// Equal returns whether output equals to other +func (output *DomainTransactionOutput) Equal(other *DomainTransactionOutput) bool { + if output == nil || other == nil { + return output == other + } + + if output.Value != other.Value { + return false + } + + return output.ScriptPublicKey.Equal(other.ScriptPublicKey) +} + +// Clone returns a clone of DomainTransactionOutput +func (output *DomainTransactionOutput) Clone() *DomainTransactionOutput { + scriptPublicKeyClone := &ScriptPublicKey{ + Script: make([]byte, len(output.ScriptPublicKey.Script)), + Version: output.ScriptPublicKey.Version} + copy(scriptPublicKeyClone.Script, output.ScriptPublicKey.Script) + + return &DomainTransactionOutput{ + Value: output.Value, + ScriptPublicKey: scriptPublicKeyClone, + } +} + +// DomainTransactionID represents the ID of a Kaspa transaction +type DomainTransactionID DomainHash + +// NewDomainTransactionIDFromByteArray constructs a new TransactionID out of a byte array +func NewDomainTransactionIDFromByteArray(transactionIDBytes *[DomainHashSize]byte) *DomainTransactionID { + return (*DomainTransactionID)(NewDomainHashFromByteArray(transactionIDBytes)) +} + +// NewDomainTransactionIDFromByteSlice constructs a new TransactionID out of a byte slice +// Returns an error if the length of the byte slice is not exactly `DomainHashSize` +func NewDomainTransactionIDFromByteSlice(transactionIDBytes []byte) (*DomainTransactionID, error) { + hash, err := NewDomainHashFromByteSlice(transactionIDBytes) + if err != nil { + return nil, err + } + return (*DomainTransactionID)(hash), nil +} + +// NewDomainTransactionIDFromString constructs a new TransactionID out of a string +// Returns an error if the length of the string is not exactly `DomainHashSize * 2` +func NewDomainTransactionIDFromString(transactionIDString string) (*DomainTransactionID, error) { + hash, err := NewDomainHashFromString(transactionIDString) + if err != nil { + return nil, err + } + return (*DomainTransactionID)(hash), nil +} + +// String stringifies a transaction ID. +func (id DomainTransactionID) String() string { + return DomainHash(id).String() +} + +// Clone returns a clone of DomainTransactionID +func (id *DomainTransactionID) Clone() *DomainTransactionID { + idClone := *id + return &idClone +} + +// Equal returns whether id equals to other +func (id *DomainTransactionID) Equal(other *DomainTransactionID) bool { + return (*DomainHash)(id).Equal((*DomainHash)(other)) +} + +// Less returns true if id is less than other +func (id *DomainTransactionID) Less(other *DomainTransactionID) bool { + return (*DomainHash)(id).Less((*DomainHash)(other)) +} + +// LessOrEqual returns true if id is smaller or equal to other +func (id *DomainTransactionID) LessOrEqual(other *DomainTransactionID) bool { + return (*DomainHash)(id).LessOrEqual((*DomainHash)(other)) +} + +// ByteArray returns the bytes in this transactionID represented as a byte array. +// The transactionID bytes are cloned, therefore it is safe to modify the resulting array. +func (id *DomainTransactionID) ByteArray() *[DomainHashSize]byte { + return (*DomainHash)(id).ByteArray() +} + +// ByteSlice returns the bytes in this transactionID represented as a byte slice. +// The transactionID bytes are cloned, therefore it is safe to modify the resulting slice. +func (id *DomainTransactionID) ByteSlice() []byte { + return (*DomainHash)(id).ByteSlice() +} diff --git a/consensus/model/externalapi/transaction_equal_clone_test.go b/consensus/model/externalapi/transaction_equal_clone_test.go new file mode 100644 index 00000000..3534ae07 --- /dev/null +++ b/consensus/model/externalapi/transaction_equal_clone_test.go @@ -0,0 +1,1107 @@ +package externalapi_test + +import ( + "reflect" + "testing" + + "github.com/Qitmeer/qng/consensus/model/externalapi" + "github.com/Qitmeer/qng/consensus/utils/utxo" +) + +// Changed fields of a test struct compared to a base test struct marked as "changed" and +// pointing in some cases name changed struct field + +type transactionToCompare struct { + tx *externalapi.DomainTransaction + expectedResult bool + expectsPanic bool +} + +type testDomainTransactionStruct struct { + baseTx *externalapi.DomainTransaction + transactionToCompareTo []*transactionToCompare +} + +type transactionInputToCompare struct { + tx *externalapi.DomainTransactionInput + expectedResult bool + expectsPanic bool +} + +type testDomainTransactionInputStruct struct { + baseTx *externalapi.DomainTransactionInput + transactionInputToCompareTo []*transactionInputToCompare +} + +type transactionOutputToCompare struct { + tx *externalapi.DomainTransactionOutput + expectedResult bool +} + +type testDomainTransactionOutputStruct struct { + baseTx *externalapi.DomainTransactionOutput + transactionOutputToCompareTo []*transactionOutputToCompare +} + +type domainOutpointToCompare struct { + domainOutpoint *externalapi.DomainOutpoint + expectedResult bool +} + +type testDomainOutpointStruct struct { + baseDomainOutpoint *externalapi.DomainOutpoint + domainOutpointToCompareTo []*domainOutpointToCompare +} + +type domainTransactionIDToCompare struct { + domainTransactionID *externalapi.DomainTransactionID + expectedResult bool +} + +type testDomainTransactionIDStruct struct { + baseDomainTransactionID *externalapi.DomainTransactionID + domainTransactionIDToCompareTo []*domainTransactionIDToCompare +} + +func initTestBaseTransaction() *externalapi.DomainTransaction { + + testTx := &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, + {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + } + return testTx +} + +func initTestTransactionToCompare() []*transactionToCompare { + + testTx := []*transactionToCompare{{ + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}, //Changed + {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, + {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01, 0x02}, //Changed + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, + {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01, 0x02}, //Changed + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: true, + }, + { + // ID changed + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + }, + expectsPanic: true, + }, + { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, + {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 1000000000, //Changed + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: true, + }, { + tx: &externalapi.DomainTransaction{ + 2, //Changed + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 2, //Changed + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), + }, + expectsPanic: true, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 2, //Changed + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}, + {externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}, {uint64(0xFFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2, 3}, Version: 0}}}, //changed Outputs + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + nil, //changed + }, + expectedResult: true, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFF0), // Changed sequence + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 3, // Changed SigOpCount + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, + { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, + []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, + {uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 2, // Changed + []byte{0x01}, + 0, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + expectedResult: false, + }, + } + return testTx +} + +func initTestDomainTransactionForClone() []*externalapi.DomainTransaction { + + tests := []*externalapi.DomainTransaction{ + { + Version: 1, + Inputs: []*externalapi.DomainTransactionInput{ + {externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}, + }, + Outputs: []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), + &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}}, + LockTime: 1, + SubnetworkID: externalapi.DomainSubnetworkID{0x01}, + Gas: 1, + Payload: []byte{0x01}, + Fee: 5555555555, + Mass: 1, + ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, { + Version: 1, + Inputs: []*externalapi.DomainTransactionInput{}, + Outputs: []*externalapi.DomainTransactionOutput{}, + LockTime: 1, + SubnetworkID: externalapi.DomainSubnetworkID{0x01}, + Gas: 1, + Payload: []byte{0x01}, + Fee: 0, + Mass: 1, + ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{}), + }, + } + return tests +} + +func initTestDomainTransactionForEqual() []testDomainTransactionStruct { + + tests := []testDomainTransactionStruct{ + { + baseTx: initTestBaseTransaction(), + transactionToCompareTo: initTestTransactionToCompare(), + }, + { + baseTx: nil, + transactionToCompareTo: []*transactionToCompare{{ + tx: nil, + expectedResult: true}}, + }, { + baseTx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{}, + []*externalapi.DomainTransactionOutput{}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 1, + 1, + externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + }, + transactionToCompareTo: []*transactionToCompare{{ + tx: nil, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{}, + []*externalapi.DomainTransactionOutput{}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 0, + []byte{0x01}, + 1, + 1, + nil, + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{}, + []*externalapi.DomainTransactionOutput{}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 1, + 1, + nil, + }, + expectedResult: true, + }, { + tx: &externalapi.DomainTransaction{ + 1, + []*externalapi.DomainTransactionInput{}, + []*externalapi.DomainTransactionOutput{}, + 1, + externalapi.DomainSubnetworkID{0x01}, + 1, + []byte{0x01}, + 2, // Changed fee + 1, + nil, + }, + expectsPanic: true, + }}, + }, + } + return tests +} + +func initTestBaseDomainTransactionInput() *externalapi.DomainTransactionInput { + basetxInput := &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + } + return basetxInput +} + +func initTestDomainTxInputToCompare() []*transactionInputToCompare { + txInput := []*transactionInputToCompare{{ + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, + expectedResult: true, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, false, 2), // Changed + }, + expectsPanic: true, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + nil, // Changed + }, + expectedResult: true, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFF0), // Changed + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFF0), + 5, // Changed + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3, 4}, // Changed + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01, 0x02}), 0xFFFF}, // Changed + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01, 0x02}), 0xFFFF}, // Changed + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(2 /* Changed */, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), // Changed + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionInput{ + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01, 0x02}), 0xFFFF}, // Changed + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(3 /* Changed */, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 3), // Changed + }, + expectedResult: false, + }, { + tx: nil, + expectedResult: false, + }} + return txInput + +} + +func initTestDomainTransactionInputForClone() []*externalapi.DomainTransactionInput { + txInput := []*externalapi.DomainTransactionInput{ + { + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, { + + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFFF), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }, { + + externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, + []byte{1, 2, 3}, + uint64(0xFFFFFFF0), + 1, + utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), + }} + return txInput +} + +func initTestBaseDomainTransactionOutput() *externalapi.DomainTransactionOutput { + basetxOutput := &externalapi.DomainTransactionOutput{ + 0xFFFFFFFF, + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, + } + return basetxOutput +} + +func initTestDomainTransactionOutputForClone() []*externalapi.DomainTransactionOutput { + txInput := []*externalapi.DomainTransactionOutput{ + { + 0xFFFFFFFF, + &externalapi.ScriptPublicKey{Script: []byte{0xF0, 0xFF}, Version: 0}, + }, { + 0xFFFFFFF1, + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, + }} + return txInput +} + +func initTestDomainTransactionOutputForEqual() []testDomainTransactionOutputStruct { + tests := []testDomainTransactionOutputStruct{ + { + baseTx: initTestBaseDomainTransactionOutput(), + transactionOutputToCompareTo: []*transactionOutputToCompare{{ + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFFF, + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}}, + expectedResult: true, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFFF, + &externalapi.ScriptPublicKey{Script: []byte{0xF0, 0xFF}, Version: 0}, // Changed + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFF0, // Changed + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, + }, + expectedResult: false, + }, { + tx: nil, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFF0, // Changed + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF, 0x01}, Version: 0}}, // Changed + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFF0, // Changed + &externalapi.ScriptPublicKey{Script: []byte{}, Version: 0}, // Changed + }, + expectedResult: false, + }}, + }, + { + baseTx: nil, + transactionOutputToCompareTo: []*transactionOutputToCompare{{ + tx: nil, + expectedResult: true, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFFF, + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}}, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFFF, + &externalapi.ScriptPublicKey{Script: []byte{0xF0, 0xFF}, Version: 0}, // Changed + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFF0, // Changed + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFF0, + &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF, 0x01}, Version: 0}, // Changed + }, + expectedResult: false, + }, { + tx: &externalapi.DomainTransactionOutput{ + 0xFFFFFFF0, + &externalapi.ScriptPublicKey{Script: []byte{}, Version: 0}, // Changed + }, + expectedResult: false, + }}, + }, + } + return tests +} + +func initTestDomainTransactionInputForEqual() []testDomainTransactionInputStruct { + + tests := []testDomainTransactionInputStruct{ + { + baseTx: initTestBaseDomainTransactionInput(), + transactionInputToCompareTo: initTestDomainTxInputToCompare(), + }, + } + return tests +} + +func TestDomainTransaction_Equal(t *testing.T) { + + txTests := initTestDomainTransactionForEqual() + for i, test := range txTests { + for j, subTest := range test.transactionToCompareTo { + func() { + defer func() { + r := recover() + panicked := r != nil + if panicked != subTest.expectsPanic { + t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) + } + }() + result1 := test.baseTx.Equal(subTest.tx) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + }() + func() { + defer func() { + r := recover() + panicked := r != nil + if panicked != subTest.expectsPanic { + t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) + } + }() + result2 := subTest.tx.Equal(test.baseTx) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + }() + } + } +} + +func TestDomainTransaction_Clone(t *testing.T) { + + txs := initTestDomainTransactionForClone() + for i, tx := range txs { + txClone := tx.Clone() + if !txClone.Equal(tx) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(tx, txClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} + +func TestDomainTransactionInput_Equal(t *testing.T) { + + txTests := initTestDomainTransactionInputForEqual() + for i, test := range txTests { + for j, subTest := range test.transactionInputToCompareTo { + func() { + defer func() { + r := recover() + panicked := r != nil + if panicked != subTest.expectsPanic { + t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) + } + }() + result1 := test.baseTx.Equal(subTest.tx) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + }() + func() { + defer func() { + r := recover() + panicked := r != nil + if panicked != subTest.expectsPanic { + t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) + } + }() + result2 := subTest.tx.Equal(test.baseTx) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + }() + } + } +} + +func TestDomainTransactionInput_Clone(t *testing.T) { + + txInputs := initTestDomainTransactionInputForClone() + for i, txInput := range txInputs { + txInputClone := txInput.Clone() + if !txInputClone.Equal(txInput) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(txInput, txInputClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} + +func TestDomainTransactionOutput_Equal(t *testing.T) { + + txTests := initTestDomainTransactionOutputForEqual() + for i, test := range txTests { + for j, subTest := range test.transactionOutputToCompareTo { + result1 := test.baseTx.Equal(subTest.tx) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.tx.Equal(test.baseTx) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} + +func TestDomainTransactionOutput_Clone(t *testing.T) { + + txInputs := initTestDomainTransactionOutputForClone() + for i, txOutput := range txInputs { + txOutputClone := txOutput.Clone() + if !txOutputClone.Equal(txOutput) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(txOutput, txOutputClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} + +func initTestDomainOutpointForClone() []*externalapi.DomainOutpoint { + outpoint := []*externalapi.DomainOutpoint{{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + 1}, + } + return outpoint +} + +func initTestDomainOutpointForEqual() []testDomainOutpointStruct { + + var outpoint = []*domainOutpointToCompare{{ + domainOutpoint: &externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + 1}, + expectedResult: true, + }, { + domainOutpoint: &externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + 1}, + expectedResult: false, + }, { + domainOutpoint: &externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0}), + 2}, + expectedResult: false, + }} + tests := []testDomainOutpointStruct{ + { + baseDomainOutpoint: &externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + 1}, + domainOutpointToCompareTo: outpoint, + }, {baseDomainOutpoint: &externalapi.DomainOutpoint{ + *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + 1}, + domainOutpointToCompareTo: []*domainOutpointToCompare{{domainOutpoint: nil, expectedResult: false}}, + }, {baseDomainOutpoint: nil, + domainOutpointToCompareTo: []*domainOutpointToCompare{{domainOutpoint: nil, expectedResult: true}}, + }, + } + return tests +} + +func TestDomainOutpoint_Equal(t *testing.T) { + + domainOutpoints := initTestDomainOutpointForEqual() + for i, test := range domainOutpoints { + for j, subTest := range test.domainOutpointToCompareTo { + result1 := test.baseDomainOutpoint.Equal(subTest.domainOutpoint) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.domainOutpoint.Equal(test.baseDomainOutpoint) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} + +func TestDomainOutpoint_Clone(t *testing.T) { + + domainOutpoints := initTestDomainOutpointForClone() + for i, outpoint := range domainOutpoints { + outpointClone := outpoint.Clone() + if !outpointClone.Equal(outpoint) { + t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) + } + if !reflect.DeepEqual(outpoint, outpointClone) { + t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) + } + } +} + +func initTestDomainTransactionIDForEqual() []testDomainTransactionIDStruct { + + var outpoint = []*domainTransactionIDToCompare{{ + domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + expectedResult: true, + }, { + domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + expectedResult: false, + }, { + domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0}), + expectedResult: false, + }} + tests := []testDomainTransactionIDStruct{ + { + baseDomainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), + domainTransactionIDToCompareTo: outpoint, + }, { + baseDomainTransactionID: nil, + domainTransactionIDToCompareTo: []*domainTransactionIDToCompare{{ + domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), + expectedResult: false, + }}, + }, + } + return tests +} + +func TestDomainTransactionID_Equal(t *testing.T) { + domainDomainTransactionIDs := initTestDomainTransactionIDForEqual() + for i, test := range domainDomainTransactionIDs { + for j, subTest := range test.domainTransactionIDToCompareTo { + result1 := test.baseDomainTransactionID.Equal(subTest.domainTransactionID) + if result1 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) + } + result2 := subTest.domainTransactionID.Equal(test.baseDomainTransactionID) + if result2 != subTest.expectedResult { + t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) + } + } + } +} diff --git a/consensus/model/externalapi/utxodiff.go b/consensus/model/externalapi/utxodiff.go new file mode 100644 index 00000000..30d7e1f3 --- /dev/null +++ b/consensus/model/externalapi/utxodiff.go @@ -0,0 +1,32 @@ +package externalapi + +// UTXOCollection represents a collection of UTXO entries, indexed by their outpoint +type UTXOCollection interface { + Iterator() ReadOnlyUTXOSetIterator + Get(outpoint *DomainOutpoint) (UTXOEntry, bool) + Contains(outpoint *DomainOutpoint) bool + Len() int +} + +// UTXODiff represents the diff between two UTXO sets +type UTXODiff interface { + ToAdd() UTXOCollection + ToRemove() UTXOCollection + WithDiff(other UTXODiff) (UTXODiff, error) + DiffFrom(other UTXODiff) (UTXODiff, error) + Reversed() UTXODiff + CloneMutable() MutableUTXODiff +} + +// MutableUTXODiff represents a UTXO-Diff that can be mutated +type MutableUTXODiff interface { + ToImmutable() UTXODiff + + WithDiff(other UTXODiff) (UTXODiff, error) + DiffFrom(other UTXODiff) (UTXODiff, error) + ToAdd() UTXOCollection + ToRemove() UTXOCollection + + WithDiffInPlace(other UTXODiff) error + AddTransaction(transaction *DomainTransaction, blockDAAScore uint64) error +} diff --git a/consensus/model/externalapi/utxoentry.go b/consensus/model/externalapi/utxoentry.go new file mode 100644 index 00000000..fb628b60 --- /dev/null +++ b/consensus/model/externalapi/utxoentry.go @@ -0,0 +1,20 @@ +package externalapi + +// UTXOEntry houses details about an individual transaction output in a utxo +// set such as whether or not it was contained in a coinbase tx, the daa +// score of the block that accepts the tx, its public key script, and how +// much it pays. +type UTXOEntry interface { + Amount() uint64 // Utxo amount in Sompis + ScriptPublicKey() *ScriptPublicKey // The public key script for the output. + BlockDAAScore() uint64 // Daa score of the block accepting the tx. + IsCoinbase() bool + Equal(other UTXOEntry) bool +} + +// OutpointAndUTXOEntryPair is an outpoint along with its +// respective UTXO entry +type OutpointAndUTXOEntryPair struct { + Outpoint *DomainOutpoint + UTXOEntry UTXOEntry +} diff --git a/consensus/model/externalapi/virtual.go b/consensus/model/externalapi/virtual.go new file mode 100644 index 00000000..42eb975b --- /dev/null +++ b/consensus/model/externalapi/virtual.go @@ -0,0 +1,10 @@ +package externalapi + +// VirtualInfo represents information about the virtual block needed by external components +type VirtualInfo struct { + ParentHashes []*DomainHash + Bits uint32 + PastMedianTime int64 + BlueScore uint64 + DAAScore uint64 +} diff --git a/consensus/model/interface_datastructures_blockheaderstore.go b/consensus/model/interface_datastructures_blockheaderstore.go new file mode 100644 index 00000000..6c683084 --- /dev/null +++ b/consensus/model/interface_datastructures_blockheaderstore.go @@ -0,0 +1,15 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// BlockHeaderStore represents a store of block headers +type BlockHeaderStore interface { + Store + Stage(stagingArea *StagingArea, blockHash *externalapi.DomainHash, blockHeader externalapi.BlockHeader) + IsStaged(stagingArea *StagingArea) bool + BlockHeader(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) (externalapi.BlockHeader, error) + HasBlockHeader(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) (bool, error) + BlockHeaders(dbContext DBReader, stagingArea *StagingArea, blockHashes []*externalapi.DomainHash) ([]externalapi.BlockHeader, error) + Delete(stagingArea *StagingArea, blockHash *externalapi.DomainHash) + Count(stagingArea *StagingArea) uint64 +} diff --git a/consensus/model/interface_datastructures_daablocksstore.go b/consensus/model/interface_datastructures_daablocksstore.go new file mode 100644 index 00000000..d750c995 --- /dev/null +++ b/consensus/model/interface_datastructures_daablocksstore.go @@ -0,0 +1,14 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// DAABlocksStore represents a store of ??? +type DAABlocksStore interface { + Store + StageDAAScore(stagingArea *StagingArea, blockHash *externalapi.DomainHash, daaScore uint64) + StageBlockDAAAddedBlocks(stagingArea *StagingArea, blockHash *externalapi.DomainHash, addedBlocks []*externalapi.DomainHash) + IsStaged(stagingArea *StagingArea) bool + DAAAddedBlocks(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) + DAAScore(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) (uint64, error) + Delete(stagingArea *StagingArea, blockHash *externalapi.DomainHash) +} diff --git a/consensus/model/interface_datastructures_ghostdagdatastore.go b/consensus/model/interface_datastructures_ghostdagdatastore.go new file mode 100644 index 00000000..80a4e1fc --- /dev/null +++ b/consensus/model/interface_datastructures_ghostdagdatastore.go @@ -0,0 +1,12 @@ +package model + +import "github.com/Qitmeer/qng/domain/consensus/model/externalapi" + +// GHOSTDAGDataStore represents a store of BlockGHOSTDAGData +type GHOSTDAGDataStore interface { + Store + Stage(stagingArea *StagingArea, blockHash *externalapi.DomainHash, blockGHOSTDAGData *externalapi.BlockGHOSTDAGData, isTrustedData bool) + IsStaged(stagingArea *StagingArea) bool + Get(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash, isTrustedData bool) (*externalapi.BlockGHOSTDAGData, error) + UnstageAll(stagingArea *StagingArea) +} diff --git a/consensus/model/interface_processes_dagtopologymanager.go b/consensus/model/interface_processes_dagtopologymanager.go new file mode 100644 index 00000000..81b496d6 --- /dev/null +++ b/consensus/model/interface_processes_dagtopologymanager.go @@ -0,0 +1,19 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// DAGTopologyManager exposes methods for querying relationships +// between blocks in the DAG +type DAGTopologyManager interface { + Parents(stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) + Children(stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) + IsParentOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) + IsChildOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) + IsAncestorOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) + IsAncestorOfAny(stagingArea *StagingArea, blockHash *externalapi.DomainHash, potentialDescendants []*externalapi.DomainHash) (bool, error) + IsAnyAncestorOf(stagingArea *StagingArea, potentialAncestors []*externalapi.DomainHash, blockHash *externalapi.DomainHash) (bool, error) + IsInSelectedParentChainOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) + ChildInSelectedParentChainOf(stagingArea *StagingArea, lowHash, highHash *externalapi.DomainHash) (*externalapi.DomainHash, error) + + SetParents(stagingArea *StagingArea, blockHash *externalapi.DomainHash, parentHashes []*externalapi.DomainHash) error +} diff --git a/consensus/model/interface_processes_dagtraversalmanager.go b/consensus/model/interface_processes_dagtraversalmanager.go new file mode 100644 index 00000000..c1597832 --- /dev/null +++ b/consensus/model/interface_processes_dagtraversalmanager.go @@ -0,0 +1,21 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// DAGTraversalManager exposes methods for traversing blocks +// in the DAG +type DAGTraversalManager interface { + LowestChainBlockAboveOrEqualToBlueScore(stagingArea *StagingArea, highHash *externalapi.DomainHash, blueScore uint64) (*externalapi.DomainHash, error) + // SelectedChildIterator should return a BlockIterator that iterates + // from lowHash (exclusive) to highHash (inclusive) over highHash's selected parent chain + SelectedChildIterator(stagingArea *StagingArea, highHash, lowHash *externalapi.DomainHash, includeLowHash bool) (BlockIterator, error) + SelectedChild(stagingArea *StagingArea, highHash, lowHash *externalapi.DomainHash) (*externalapi.DomainHash, error) + AnticoneFromBlocks(stagingArea *StagingArea, tips []*externalapi.DomainHash, blockHash *externalapi.DomainHash, maxTraversalAllowed uint64) ([]*externalapi.DomainHash, error) + AnticoneFromVirtualPOV(stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) + BlockWindow(stagingArea *StagingArea, highHash *externalapi.DomainHash, windowSize int) ([]*externalapi.DomainHash, error) + DAABlockWindow(stagingArea *StagingArea, highHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) + NewDownHeap(stagingArea *StagingArea) BlockHeap + NewUpHeap(stagingArea *StagingArea) BlockHeap + CalculateChainPath(stagingArea *StagingArea, fromBlockHash, toBlockHash *externalapi.DomainHash) ( + *externalapi.SelectedChainPath, error) +} diff --git a/consensus/model/interface_processes_difficultymanager.go b/consensus/model/interface_processes_difficultymanager.go new file mode 100644 index 00000000..9deafac0 --- /dev/null +++ b/consensus/model/interface_processes_difficultymanager.go @@ -0,0 +1,13 @@ +package model + +import ( + "github.com/Qitmeer/qng/consensus/model/externalapi" +) + +// DifficultyManager provides a method to resolve the +// difficulty value of a block +type DifficultyManager interface { + StageDAADataAndReturnRequiredDifficulty(stagingArea *StagingArea, blockHash *externalapi.DomainHash, isBlockWithTrustedData bool) (uint32, error) + RequiredDifficulty(stagingArea *StagingArea, blockHash *externalapi.DomainHash) (uint32, error) + EstimateNetworkHashesPerSecond(startHash *externalapi.DomainHash, windowSize int) (uint64, error) +} diff --git a/consensus/model/interface_processes_ghostdagmanager.go b/consensus/model/interface_processes_ghostdagmanager.go new file mode 100644 index 00000000..5330a989 --- /dev/null +++ b/consensus/model/interface_processes_ghostdagmanager.go @@ -0,0 +1,12 @@ +package model + +import "github.com/Qitmeer/qng/consensus/model/externalapi" + +// GHOSTDAGManager resolves and manages GHOSTDAG block data +type GHOSTDAGManager interface { + GHOSTDAG(stagingArea *StagingArea, blockHash *externalapi.DomainHash) error + ChooseSelectedParent(stagingArea *StagingArea, blockHashes ...*externalapi.DomainHash) (*externalapi.DomainHash, error) + Less(blockHashA *externalapi.DomainHash, ghostdagDataA *externalapi.BlockGHOSTDAGData, + blockHashB *externalapi.DomainHash, ghostdagDataB *externalapi.BlockGHOSTDAGData) bool + GetSortedMergeSet(stagingArea *StagingArea, current *externalapi.DomainHash) ([]*externalapi.DomainHash, error) +} diff --git a/core/blockchain/blockchain.go b/core/blockchain/blockchain.go index 5e626114..d7205dd6 100644 --- a/core/blockchain/blockchain.go +++ b/core/blockchain/blockchain.go @@ -5,6 +5,10 @@ package blockchain import ( "container/list" "fmt" + "sort" + "sync" + "time" + "github.com/Qitmeer/qng/common/hash" "github.com/Qitmeer/qng/common/roughtime" "github.com/Qitmeer/qng/common/system" @@ -28,9 +32,6 @@ import ( "github.com/Qitmeer/qng/params" "github.com/Qitmeer/qng/services/progresslog" "github.com/schollz/progressbar/v3" - "sort" - "sync" - "time" ) const ( @@ -136,6 +137,7 @@ type BlockChain struct { quit chan struct{} meerChain *meer.MeerChain + dm *model.DifficultyManager } func (b *BlockChain) Init() error { diff --git a/core/types/pow/diff.go b/core/types/pow/diff.go index d05ff176..456a3239 100644 --- a/core/types/pow/diff.go +++ b/core/types/pow/diff.go @@ -5,8 +5,9 @@ package pow import ( "fmt" - "github.com/Qitmeer/qng/common/hash" "math/big" + + "github.com/Qitmeer/qng/common/hash" ) var ( @@ -40,18 +41,21 @@ func HashToBig(hash *hash.Hash) *big.Int { // Like IEEE754 floating point, there are three basic components: the sign, // the exponent, and the mantissa. They are broken out as follows: // -// * the most significant 8 bits represent the unsigned base 256 exponent -// * bit 23 (the 24th bit) represents the sign bit -// * the least significant 23 bits represent the mantissa +// - the most significant 8 bits represent the unsigned base 256 exponent // -// ------------------------------------------------- -// | Exponent | Sign | Mantissa | -// ------------------------------------------------- -// | 8 bits [31-24] | 1 bit [23] | 23 bits [22-00] | -// ------------------------------------------------- +// - bit 23 (the 24th bit) represents the sign bit +// +// - the least significant 23 bits represent the mantissa +// +// ------------------------------------------------- +// | Exponent | Sign | Mantissa | +// ------------------------------------------------- +// | 8 bits [31-24] | 1 bit [23] | 23 bits [22-00] | +// ------------------------------------------------- // // The formula to calculate N is: -// N = (-1^sign) * mantissa * 256^(exponent-3) +// +// N = (-1^sign) * mantissa * 256^(exponent-3) // // This compact form is only used to encode unsigned 256-bit numbers which // represent difficulty targets, thus there really is not a need for a sign @@ -190,7 +194,7 @@ func mergeDifficulty(oldDiff int64, newDiff1 int64, newDiff2 int64) int64 { return summedChange.Int64() } -//calc cuckoo diff +// calc cuckoo diff func CalcCuckooDiff(scale uint64, blockHash hash.Hash) *big.Int { c := HashToBig(&blockHash) max := big.NewInt(1).Lsh(bigOne, 256) @@ -207,7 +211,7 @@ func CalcCuckooDiff(scale uint64, blockHash hash.Hash) *big.Int { return e } -//calc cuckoo diff convert to target hash like 7fff000000000000000000000000000000000000000000000000000000000000 +// calc cuckoo diff convert to target hash like 7fff000000000000000000000000000000000000000000000000000000000000 func CuckooDiffToTarget(scale uint64, diff *big.Int) string { a := &big.Int{} a.SetUint64(scale) @@ -220,3 +224,32 @@ func CuckooDiffToTarget(scale uint64, diff *big.Int) string { b := a.Bytes() return fmt.Sprintf("%064x", b) } + +// CompactToBigWithDestination is a version of CompactToBig that +// takes a destination parameter. This is useful for saving memory, +// as then the destination big.Int can be reused. +// See CompactToBig for further details. +func CompactToBigWithDestination(compact uint32, destination *big.Int) { + // Extract the mantissa, sign bit, and exponent. + mantissa := compact & 0x007fffff + isNegative := compact&0x00800000 != 0 + exponent := uint(compact >> 24) + + // Since the base for the exponent is 256, the exponent can be treated + // as the number of bytes to represent the full 256-bit number. So, + // treat the exponent as the number of bytes and shift the mantissa + // right or left accordingly. This is equivalent to: + // N = mantissa * 256^(exponent-3) + if exponent <= 3 { + mantissa >>= 8 * (3 - exponent) + destination.SetInt64(int64(mantissa)) + } else { + destination.SetInt64(int64(mantissa)) + destination.Lsh(destination, 8*(exponent-3)) + } + + // Make it negative if the sign bit is set. + if isNegative { + destination.Neg(destination) + } +} From e39e14ba0c3f8c75fbaef1b825f721bf044ecd88 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Thu, 30 Nov 2023 00:26:18 +0800 Subject: [PATCH 03/15] Modify difficulty adjustment --- consensus/dagtraversalmanager/anticone.go | 77 -- consensus/dagtraversalmanager/block_heap.go | 204 --- .../dagtraversalmanager.go | 129 -- .../dagtraversalmanager_test.go | 117 -- .../selected_child_iterator.go | 109 -- consensus/dagtraversalmanager/window.go | 200 --- consensus/dagtraversalmanager/window_test.go | 369 ------ consensus/difficultymanager/blockwindow.go | 103 -- .../difficultymanager/difficultymanager.go | 129 +- .../difficultymanager_test.go | 357 ------ consensus/difficultymanager/hashrate.go | 74 -- consensus/model/block_heap.go | 12 - consensus/model/blockiterator.go | 11 - consensus/model/externalapi/acceptancedata.go | 145 --- consensus/model/externalapi/block.go | 84 -- .../externalapi/block_equal_clone_test.go | 499 -------- .../externalapi/block_with_trusted_data.go | 23 - consensus/model/externalapi/blockinfo.go | 37 - .../model/externalapi/blockinfo_clone_test.go | 108 -- .../model/externalapi/blocklevelparents.go | 63 - consensus/model/externalapi/blocklocator.go | 24 - .../externalapi/blocklocator_clone_test.go | 76 -- consensus/model/externalapi/blockstatus.go | 49 - .../blockstatus_equal_clone_test.go | 87 -- consensus/model/externalapi/blocktemplate.go | 19 - consensus/model/externalapi/coinbase.go | 38 - .../model/externalapi/coinbase_clone_test.go | 59 - consensus/model/externalapi/consensus.go | 59 - .../model/externalapi/consensus_events.go | 30 - consensus/model/externalapi/ghostdag.go | 67 - consensus/model/externalapi/hash.go | 123 -- .../externalapi/hash_clone_equal_test.go | 79 -- .../model/externalapi/pruning_point_proof.go | 6 - .../model/externalapi/readonlyutxoset.go | 10 - consensus/model/externalapi/subnetworkid.go | 33 - .../subnetworkid_clone_equal_test.go | 99 -- consensus/model/externalapi/sync.go | 36 - .../externalapi/sync_equal_clone_test.go | 99 -- consensus/model/externalapi/transaction.go | 363 ------ .../transaction_equal_clone_test.go | 1107 ----------------- consensus/model/externalapi/utxodiff.go | 32 - consensus/model/externalapi/utxoentry.go | 20 - consensus/model/externalapi/virtual.go | 10 - ...terface_datastructures_blockheaderstore.go | 15 - ...interface_datastructures_daablocksstore.go | 14 - ...erface_datastructures_ghostdagdatastore.go | 12 - .../interface_processes_dagtopologymanager.go | 19 - ...interface_processes_dagtraversalmanager.go | 21 - .../interface_processes_difficultymanager.go | 55 +- .../interface_processes_ghostdagmanager.go | 12 - core/blockchain/blockchain.go | 4 +- core/blockchain/difficulty.go | 53 +- params/params_mixnet.go | 9 +- 53 files changed, 122 insertions(+), 5467 deletions(-) delete mode 100644 consensus/dagtraversalmanager/anticone.go delete mode 100644 consensus/dagtraversalmanager/block_heap.go delete mode 100644 consensus/dagtraversalmanager/dagtraversalmanager.go delete mode 100644 consensus/dagtraversalmanager/dagtraversalmanager_test.go delete mode 100644 consensus/dagtraversalmanager/selected_child_iterator.go delete mode 100644 consensus/dagtraversalmanager/window.go delete mode 100644 consensus/dagtraversalmanager/window_test.go delete mode 100644 consensus/difficultymanager/blockwindow.go delete mode 100644 consensus/difficultymanager/difficultymanager_test.go delete mode 100644 consensus/difficultymanager/hashrate.go delete mode 100644 consensus/model/block_heap.go delete mode 100644 consensus/model/blockiterator.go delete mode 100644 consensus/model/externalapi/acceptancedata.go delete mode 100644 consensus/model/externalapi/block.go delete mode 100644 consensus/model/externalapi/block_equal_clone_test.go delete mode 100644 consensus/model/externalapi/block_with_trusted_data.go delete mode 100644 consensus/model/externalapi/blockinfo.go delete mode 100644 consensus/model/externalapi/blockinfo_clone_test.go delete mode 100644 consensus/model/externalapi/blocklevelparents.go delete mode 100644 consensus/model/externalapi/blocklocator.go delete mode 100644 consensus/model/externalapi/blocklocator_clone_test.go delete mode 100644 consensus/model/externalapi/blockstatus.go delete mode 100644 consensus/model/externalapi/blockstatus_equal_clone_test.go delete mode 100644 consensus/model/externalapi/blocktemplate.go delete mode 100644 consensus/model/externalapi/coinbase.go delete mode 100644 consensus/model/externalapi/coinbase_clone_test.go delete mode 100644 consensus/model/externalapi/consensus.go delete mode 100644 consensus/model/externalapi/consensus_events.go delete mode 100644 consensus/model/externalapi/ghostdag.go delete mode 100644 consensus/model/externalapi/hash.go delete mode 100644 consensus/model/externalapi/hash_clone_equal_test.go delete mode 100644 consensus/model/externalapi/pruning_point_proof.go delete mode 100644 consensus/model/externalapi/readonlyutxoset.go delete mode 100644 consensus/model/externalapi/subnetworkid.go delete mode 100644 consensus/model/externalapi/subnetworkid_clone_equal_test.go delete mode 100644 consensus/model/externalapi/sync.go delete mode 100644 consensus/model/externalapi/sync_equal_clone_test.go delete mode 100644 consensus/model/externalapi/transaction.go delete mode 100644 consensus/model/externalapi/transaction_equal_clone_test.go delete mode 100644 consensus/model/externalapi/utxodiff.go delete mode 100644 consensus/model/externalapi/utxoentry.go delete mode 100644 consensus/model/externalapi/virtual.go delete mode 100644 consensus/model/interface_datastructures_blockheaderstore.go delete mode 100644 consensus/model/interface_datastructures_daablocksstore.go delete mode 100644 consensus/model/interface_datastructures_ghostdagdatastore.go delete mode 100644 consensus/model/interface_processes_dagtopologymanager.go delete mode 100644 consensus/model/interface_processes_dagtraversalmanager.go delete mode 100644 consensus/model/interface_processes_ghostdagmanager.go diff --git a/consensus/dagtraversalmanager/anticone.go b/consensus/dagtraversalmanager/anticone.go deleted file mode 100644 index 21cfb3ce..00000000 --- a/consensus/dagtraversalmanager/anticone.go +++ /dev/null @@ -1,77 +0,0 @@ -package dagtraversalmanager - -import ( - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/consensus/utils/hashset" - "github.com/pkg/errors" -) - -func (dtm *dagTraversalManager) AnticoneFromVirtualPOV(stagingArea *model.StagingArea, blockHash *externalapi.DomainHash) ( - []*externalapi.DomainHash, error) { - - virtualParents, err := dtm.dagTopologyManager.Parents(stagingArea, model.VirtualBlockHash) - if err != nil { - return nil, err - } - - return dtm.AnticoneFromBlocks(stagingArea, virtualParents, blockHash, 0) -} - -func (dtm *dagTraversalManager) AnticoneFromBlocks(stagingArea *model.StagingArea, tips []*externalapi.DomainHash, - blockHash *externalapi.DomainHash, maxTraversalAllowed uint64) ( - []*externalapi.DomainHash, error) { - - anticone := []*externalapi.DomainHash{} - queue := tips - visited := hashset.New() - - traversalCounter := uint64(0) - for len(queue) > 0 { - var current *externalapi.DomainHash - current, queue = queue[0], queue[1:] - - if visited.Contains(current) { - continue - } - - visited.Add(current) - - currentIsAncestorOfBlock, err := dtm.dagTopologyManager.IsAncestorOf(stagingArea, current, blockHash) - if err != nil { - return nil, err - } - - if currentIsAncestorOfBlock { - continue - } - - blockIsAncestorOfCurrent, err := dtm.dagTopologyManager.IsAncestorOf(stagingArea, blockHash, current) - if err != nil { - return nil, err - } - - // We count the number of blocks in past(tips) \setminus past(blockHash). - // We don't use `len(visited)` since it includes some maximal blocks in past(blockHash) as well. - traversalCounter++ - if maxTraversalAllowed > 0 && traversalCounter > maxTraversalAllowed { - return nil, errors.Wrapf(model.ErrReachedMaxTraversalAllowed, - "Passed max allowed traversal (%d > %d)", traversalCounter, maxTraversalAllowed) - } - - if !blockIsAncestorOfCurrent { - anticone = append(anticone, current) - } - - currentParents, err := dtm.dagTopologyManager.Parents(stagingArea, current) - if err != nil { - return nil, err - } - - for _, parent := range currentParents { - queue = append(queue, parent) - } - } - - return anticone, nil -} diff --git a/consensus/dagtraversalmanager/block_heap.go b/consensus/dagtraversalmanager/block_heap.go deleted file mode 100644 index 639ff54f..00000000 --- a/consensus/dagtraversalmanager/block_heap.go +++ /dev/null @@ -1,204 +0,0 @@ -package dagtraversalmanager - -import ( - "container/heap" - - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" -) - -func blockGHOSTDAGDataHashPairLess(left, right *externalapi.BlockGHOSTDAGDataHashPair, gm model.GHOSTDAGManager) bool { - return gm.Less(left.Hash, left.GHOSTDAGData, right.Hash, right.GHOSTDAGData) -} - -// baseHeap is an implementation for heap.Interface that sorts blocks by their blueWork+hash -type baseHeap struct { - slice []*externalapi.BlockGHOSTDAGDataHashPair - ghostdagManager model.GHOSTDAGManager -} - -func (h *baseHeap) Len() int { return len(h.slice) } -func (h *baseHeap) Swap(i, j int) { h.slice[i], h.slice[j] = h.slice[j], h.slice[i] } - -func (h *baseHeap) Push(x interface{}) { - h.slice = append(h.slice, x.(*externalapi.BlockGHOSTDAGDataHashPair)) -} - -func (h *baseHeap) Pop() interface{} { - oldSlice := h.slice - oldLength := len(oldSlice) - popped := oldSlice[oldLength-1] - h.slice = oldSlice[0 : oldLength-1] - return popped -} - -// peek returns the block with lowest blueWork+hash from this heap without removing it -func (h *baseHeap) peek() *externalapi.BlockGHOSTDAGDataHashPair { - return h.slice[0] -} - -// upHeap extends baseHeap to include Less operation that traverses from bottom to top -type upHeap struct{ baseHeap } - -func (h *upHeap) Less(i, j int) bool { - heapNodeI := h.slice[i] - heapNodeJ := h.slice[j] - return blockGHOSTDAGDataHashPairLess(heapNodeI, heapNodeJ, h.ghostdagManager) -} - -// downHeap extends baseHeap to include Less operation that traverses from top to bottom -type downHeap struct{ baseHeap } - -func (h *downHeap) Less(i, j int) bool { - heapNodeI := h.slice[i] - heapNodeJ := h.slice[j] - return !blockGHOSTDAGDataHashPairLess(heapNodeI, heapNodeJ, h.ghostdagManager) -} - -// blockHeap represents a mutable heap of blocks, sorted by their blueWork+hash -type blockHeap struct { - impl heap.Interface - ghostdagStore model.GHOSTDAGDataStore - dbContext model.DBReader - stagingArea *model.StagingArea -} - -// NewDownHeap initializes and returns a new blockHeap -func (dtm *dagTraversalManager) NewDownHeap(stagingArea *model.StagingArea) model.BlockHeap { - h := blockHeap{ - impl: &downHeap{baseHeap{ghostdagManager: dtm.ghostdagManager}}, - ghostdagStore: dtm.ghostdagDataStore, - dbContext: dtm.databaseContext, - stagingArea: stagingArea, - } - heap.Init(h.impl) - return &h -} - -// NewUpHeap initializes and returns a new blockHeap -func (dtm *dagTraversalManager) NewUpHeap(stagingArea *model.StagingArea) model.BlockHeap { - h := blockHeap{ - impl: &upHeap{baseHeap{ghostdagManager: dtm.ghostdagManager}}, - ghostdagStore: dtm.ghostdagDataStore, - dbContext: dtm.databaseContext, - stagingArea: stagingArea, - } - heap.Init(h.impl) - return &h -} - -// Pop removes the block with lowest blueWork+hash from this heap and returns it -func (bh *blockHeap) Pop() *externalapi.DomainHash { - return heap.Pop(bh.impl).(*externalapi.BlockGHOSTDAGDataHashPair).Hash -} - -// Push pushes the block onto the heap -func (bh *blockHeap) Push(blockHash *externalapi.DomainHash) error { - ghostdagData, err := bh.ghostdagStore.Get(bh.dbContext, bh.stagingArea, blockHash, false) - if err != nil { - return err - } - - heap.Push(bh.impl, &externalapi.BlockGHOSTDAGDataHashPair{ - Hash: blockHash, - GHOSTDAGData: ghostdagData, - }) - - return nil -} - -func (bh *blockHeap) PushSlice(blockHashes []*externalapi.DomainHash) error { - for _, blockHash := range blockHashes { - err := bh.Push(blockHash) - if err != nil { - return err - } - } - return nil -} - -// Len returns the length of this heap -func (bh *blockHeap) Len() int { - return bh.impl.Len() -} - -// ToSlice copies this heap to a slice -func (bh *blockHeap) ToSlice() []*externalapi.DomainHash { - length := bh.Len() - hashes := make([]*externalapi.DomainHash, length) - for i := 0; i < length; i++ { - hashes[i] = bh.Pop() - } - return hashes -} - -// sizedUpBlockHeap represents a mutable heap of Blocks, sorted by their blueWork+hash, capped by a specific size. -type sizedUpBlockHeap struct { - impl upHeap - ghostdagStore model.GHOSTDAGDataStore - dbContext model.DBReader - stagingArea *model.StagingArea -} - -// newSizedUpHeap initializes and returns a new sizedUpBlockHeap -func (dtm *dagTraversalManager) newSizedUpHeap(stagingArea *model.StagingArea, cap int) *sizedUpBlockHeap { - h := sizedUpBlockHeap{ - impl: upHeap{baseHeap{slice: make([]*externalapi.BlockGHOSTDAGDataHashPair, 0, cap), ghostdagManager: dtm.ghostdagManager}}, - ghostdagStore: dtm.ghostdagDataStore, - dbContext: dtm.databaseContext, - stagingArea: stagingArea, - } - heap.Init(&h.impl) - return &h -} - -func (dtm *dagTraversalManager) newSizedUpHeapFromSlice(stagingArea *model.StagingArea, slice []*externalapi.BlockGHOSTDAGDataHashPair) *sizedUpBlockHeap { - sliceClone := make([]*externalapi.BlockGHOSTDAGDataHashPair, len(slice), cap(slice)) - copy(sliceClone, slice) - h := sizedUpBlockHeap{ - impl: upHeap{baseHeap{slice: sliceClone, ghostdagManager: dtm.ghostdagManager}}, - ghostdagStore: dtm.ghostdagDataStore, - dbContext: dtm.databaseContext, - stagingArea: stagingArea, - } - return &h -} - -// len returns the length of this heap -func (sbh *sizedUpBlockHeap) len() int { - return sbh.impl.Len() -} - -// pop removes the block with lowest blueWork+hash from this heap and returns it -func (sbh *sizedUpBlockHeap) pop() *externalapi.DomainHash { - return heap.Pop(&sbh.impl).(*externalapi.BlockGHOSTDAGDataHashPair).Hash -} - -// tryPushWithGHOSTDAGData is just like tryPush but the caller provides the ghostdagData of the block. -func (sbh *sizedUpBlockHeap) tryPushWithGHOSTDAGData(blockHash *externalapi.DomainHash, - ghostdagData *externalapi.BlockGHOSTDAGData) (bool, error) { - - node := &externalapi.BlockGHOSTDAGDataHashPair{ - Hash: blockHash, - GHOSTDAGData: ghostdagData, - } - if len(sbh.impl.slice) == cap(sbh.impl.slice) { - min := sbh.impl.peek() - // if the heap is full, and the new block is less than the minimum, return false - if blockGHOSTDAGDataHashPairLess(node, min, sbh.impl.ghostdagManager) { - return false, nil - } - sbh.pop() - } - heap.Push(&sbh.impl, node) - return true, nil -} - -// tryPush tries to push the block onto the heap, if the heap is full and it's less than the minimum it rejects it -func (sbh *sizedUpBlockHeap) tryPush(blockHash *externalapi.DomainHash) (bool, error) { - ghostdagData, err := sbh.ghostdagStore.Get(sbh.dbContext, sbh.stagingArea, blockHash, false) - if err != nil { - return false, err - } - return sbh.tryPushWithGHOSTDAGData(blockHash, ghostdagData) -} diff --git a/consensus/dagtraversalmanager/dagtraversalmanager.go b/consensus/dagtraversalmanager/dagtraversalmanager.go deleted file mode 100644 index d2f0f8b4..00000000 --- a/consensus/dagtraversalmanager/dagtraversalmanager.go +++ /dev/null @@ -1,129 +0,0 @@ -package dagtraversalmanager - -import ( - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/pkg/errors" -) - -// dagTraversalManager exposes methods for traversing blocks -// in the DAG -type dagTraversalManager struct { - databaseContext model.DBReader - - dagTopologyManager model.DAGTopologyManager - ghostdagManager model.GHOSTDAGManager - ghostdagDataStore model.GHOSTDAGDataStore - reachabilityManager model.ReachabilityManager - daaWindowStore model.BlocksWithTrustedDataDAAWindowStore - genesisHash *externalapi.DomainHash - difficultyAdjustmentWindowSize int - windowHeapSliceStore model.WindowHeapSliceStore -} - -// New instantiates a new DAGTraversalManager -func New( - databaseContext model.DBReader, - dagTopologyManager model.DAGTopologyManager, - ghostdagDataStore model.GHOSTDAGDataStore, - reachabilityManager model.ReachabilityManager, - ghostdagManager model.GHOSTDAGManager, - daaWindowStore model.BlocksWithTrustedDataDAAWindowStore, - windowHeapSliceStore model.WindowHeapSliceStore, - genesisHash *externalapi.DomainHash, - difficultyAdjustmentWindowSize int) model.DAGTraversalManager { - return &dagTraversalManager{ - databaseContext: databaseContext, - dagTopologyManager: dagTopologyManager, - ghostdagDataStore: ghostdagDataStore, - reachabilityManager: reachabilityManager, - ghostdagManager: ghostdagManager, - daaWindowStore: daaWindowStore, - - genesisHash: genesisHash, - difficultyAdjustmentWindowSize: difficultyAdjustmentWindowSize, - windowHeapSliceStore: windowHeapSliceStore, - } -} - -func (dtm *dagTraversalManager) LowestChainBlockAboveOrEqualToBlueScore(stagingArea *model.StagingArea, highHash *externalapi.DomainHash, blueScore uint64) (*externalapi.DomainHash, error) { - highBlockGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, highHash, false) - if err != nil { - return nil, err - } - - if highBlockGHOSTDAGData.BlueScore() < blueScore { - return nil, errors.Errorf("the given blue score %d is higher than block %s blue score of %d", - blueScore, highHash, highBlockGHOSTDAGData.BlueScore()) - } - - currentHash := highHash - currentBlockGHOSTDAGData := highBlockGHOSTDAGData - - for !currentHash.Equal(dtm.genesisHash) { - selectedParentBlockGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, - currentBlockGHOSTDAGData.SelectedParent(), false) - if err != nil { - return nil, err - } - - if selectedParentBlockGHOSTDAGData.BlueScore() < blueScore { - break - } - currentHash = currentBlockGHOSTDAGData.SelectedParent() - currentBlockGHOSTDAGData = selectedParentBlockGHOSTDAGData - } - - return currentHash, nil -} - -func (dtm *dagTraversalManager) CalculateChainPath(stagingArea *model.StagingArea, - fromBlockHash, toBlockHash *externalapi.DomainHash) (*externalapi.SelectedChainPath, error) { - - // Walk down from fromBlockHash until we reach the common selected - // parent chain ancestor of fromBlockHash and toBlockHash. Note - // that this slice will be empty if fromBlockHash is the selected - // parent of toBlockHash - var removed []*externalapi.DomainHash - current := fromBlockHash - for { - isCurrentInTheSelectedParentChainOfNewVirtualSelectedParent, err := - dtm.dagTopologyManager.IsInSelectedParentChainOf(stagingArea, current, toBlockHash) - if err != nil { - return nil, err - } - if isCurrentInTheSelectedParentChainOfNewVirtualSelectedParent { - break - } - removed = append(removed, current) - - currentGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, current, false) - if err != nil { - return nil, err - } - current = currentGHOSTDAGData.SelectedParent() - } - commonAncestor := current - - // Walk down from the toBlockHash to the common ancestor - var added []*externalapi.DomainHash - current = toBlockHash - for !current.Equal(commonAncestor) { - added = append(added, current) - currentGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, current, false) - if err != nil { - return nil, err - } - current = currentGHOSTDAGData.SelectedParent() - } - - // Reverse the order of `added` so that it's sorted from low hash to high hash - for i, j := 0, len(added)-1; i < j; i, j = i+1, j-1 { - added[i], added[j] = added[j], added[i] - } - - return &externalapi.SelectedChainPath{ - Added: added, - Removed: removed, - }, nil -} diff --git a/consensus/dagtraversalmanager/dagtraversalmanager_test.go b/consensus/dagtraversalmanager/dagtraversalmanager_test.go deleted file mode 100644 index 72cc82df..00000000 --- a/consensus/dagtraversalmanager/dagtraversalmanager_test.go +++ /dev/null @@ -1,117 +0,0 @@ -package dagtraversalmanager_test - -import ( - "testing" - - "github.com/Qitmeer/qng/consensus/model" - - "github.com/Qitmeer/qng/consensus" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/consensus/utils/testutils" -) - -func TestLowestChainBlockAboveOrEqualToBlueScore(t *testing.T) { - testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { - consensusConfig.FinalityDuration = 10 * consensusConfig.TargetTimePerBlock - factory := consensus.NewFactory() - tc, tearDown, err := factory.NewTestConsensus(consensusConfig, - "TestLowestChainBlockAboveOrEqualToBlueScore") - if err != nil { - t.Fatalf("NewTestConsensus: %s", err) - } - defer tearDown(false) - - stagingArea := model.NewStagingArea() - - checkExpectedBlock := func(highHash *externalapi.DomainHash, blueScore uint64, expected *externalapi.DomainHash) { - blockHash, err := tc.DAGTraversalManager().LowestChainBlockAboveOrEqualToBlueScore(stagingArea, highHash, blueScore) - if err != nil { - t.Fatalf("LowestChainBlockAboveOrEqualToBlueScore: %+v", err) - } - - if !blockHash.Equal(expected) { - t.Fatalf("Expected block %s but got %s", expected, blockHash) - } - } - - checkBlueScore := func(blockHash *externalapi.DomainHash, expectedBlueScore uint64) { - ghostdagData, err := tc.GHOSTDAGDataStore().Get(tc.DatabaseContext(), stagingArea, blockHash, false) - if err != nil { - t.Fatalf("GHOSTDAGDataStore().Get: %+v", err) - } - - if ghostdagData.BlueScore() != expectedBlueScore { - t.Fatalf("Expected blue score %d but got %d", expectedBlueScore, ghostdagData.BlueScore()) - } - } - - chain := []*externalapi.DomainHash{consensusConfig.GenesisHash} - tipHash := consensusConfig.GenesisHash - for i := 0; i < 9; i++ { - var err error - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - chain = append(chain, tipHash) - } - - sideChain1TipHash, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{sideChain1TipHash, tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - chain = append(chain, tipHash) - blueScore11BlockHash := tipHash - checkBlueScore(blueScore11BlockHash, 11) - - for i := 0; i < 5; i++ { - var err error - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - chain = append(chain, tipHash) - } - - sideChain2TipHash, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{sideChain2TipHash, tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - chain = append(chain, tipHash) - - blueScore18BlockHash := tipHash - checkBlueScore(blueScore18BlockHash, 18) - - for i := 0; i < 3; i++ { - var err error - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - chain = append(chain, tipHash) - } - - // Check by exact blue score - checkExpectedBlock(tipHash, 0, consensusConfig.GenesisHash) - checkExpectedBlock(tipHash, 5, chain[5]) - checkExpectedBlock(tipHash, 19, chain[len(chain)-3]) - - // Check by non exact blue score - checkExpectedBlock(tipHash, 17, blueScore18BlockHash) - checkExpectedBlock(tipHash, 10, blueScore11BlockHash) - }) -} diff --git a/consensus/dagtraversalmanager/selected_child_iterator.go b/consensus/dagtraversalmanager/selected_child_iterator.go deleted file mode 100644 index 92e9cf57..00000000 --- a/consensus/dagtraversalmanager/selected_child_iterator.go +++ /dev/null @@ -1,109 +0,0 @@ -package dagtraversalmanager - -import ( - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/pkg/errors" -) - -type selectedChildIterator struct { - dagTraversalManager model.DAGTraversalManager - - includeLowHash bool - highHash, lowHash *externalapi.DomainHash - current *externalapi.DomainHash - err error - isClosed bool - stagingArea *model.StagingArea -} - -func (s *selectedChildIterator) First() bool { - if s.isClosed { - panic("Tried using a closed SelectedChildIterator") - } - s.current = s.lowHash - if s.includeLowHash { - return true - } - - return s.Next() -} - -func (s *selectedChildIterator) Next() bool { - if s.isClosed { - panic("Tried using a closed SelectedChildIterator") - } - if s.err != nil { - return true - } - - selectedChild, err := s.dagTraversalManager.SelectedChild(s.stagingArea, s.highHash, s.current) - if errors.Is(err, errNoSelectedChild) { - return false - } - if err != nil { - s.current = nil - s.err = err - return true - } - - s.current = selectedChild - return true -} - -func (s *selectedChildIterator) Get() (*externalapi.DomainHash, error) { - if s.isClosed { - return nil, errors.New("Tried using a closed SelectedChildIterator") - } - return s.current, s.err -} - -func (s *selectedChildIterator) Close() error { - if s.isClosed { - return errors.New("Tried using a closed SelectedChildIterator") - } - s.isClosed = true - s.highHash = nil - s.lowHash = nil - s.current = nil - s.err = nil - return nil -} - -// SelectedChildIterator returns a BlockIterator that iterates from lowHash (exclusive) to highHash (inclusive) over -// highHash's selected parent chain -func (dtm *dagTraversalManager) SelectedChildIterator(stagingArea *model.StagingArea, - highHash, lowHash *externalapi.DomainHash, includeLowHash bool) (model.BlockIterator, error) { - - isLowHashInSelectedParentChainOfHighHash, err := dtm.dagTopologyManager.IsInSelectedParentChainOf( - stagingArea, lowHash, highHash) - if err != nil { - return nil, err - } - - if !isLowHashInSelectedParentChainOfHighHash { - return nil, errors.Errorf("%s is not in the selected parent chain of %s", highHash, lowHash) - } - return &selectedChildIterator{ - dagTraversalManager: dtm, - includeLowHash: includeLowHash, - highHash: highHash, - lowHash: lowHash, - current: lowHash, - stagingArea: stagingArea, - }, nil -} - -var errNoSelectedChild = errors.New("errNoSelectedChild") - -func (dtm *dagTraversalManager) SelectedChild(stagingArea *model.StagingArea, - highHash, lowHash *externalapi.DomainHash) (*externalapi.DomainHash, error) { - - // The selected child is in fact the next reachability tree nextAncestor - nextAncestor, err := dtm.reachabilityManager.FindNextAncestor(stagingArea, highHash, lowHash) - if err != nil { - return nil, errors.Wrapf(errNoSelectedChild, "no selected child for %s from the point of view of %s", - lowHash, highHash) - } - return nextAncestor, nil -} diff --git a/consensus/dagtraversalmanager/window.go b/consensus/dagtraversalmanager/window.go deleted file mode 100644 index ffd2cde3..00000000 --- a/consensus/dagtraversalmanager/window.go +++ /dev/null @@ -1,200 +0,0 @@ -package dagtraversalmanager - -import ( - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/kaspanet/kaspad/infrastructure/db/database" -) - -func (dtm *dagTraversalManager) DAABlockWindow(stagingArea *model.StagingArea, highHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) { - return dtm.BlockWindow(stagingArea, highHash, dtm.difficultyAdjustmentWindowSize) -} - -// BlockWindow returns a blockWindow of the given size that contains the -// blocks in the past of highHash, the sorting is unspecified. -// If the number of blocks in the past of startingNode is less then windowSize, -func (dtm *dagTraversalManager) BlockWindow(stagingArea *model.StagingArea, highHash *externalapi.DomainHash, - windowSize int) ([]*externalapi.DomainHash, error) { - - windowHeap, err := dtm.blockWindowHeap(stagingArea, highHash, windowSize) - if err != nil { - return nil, err - } - - window := make([]*externalapi.DomainHash, 0, len(windowHeap.impl.slice)) - for _, b := range windowHeap.impl.slice { - window = append(window, b.Hash) - } - return window, nil -} - -func (dtm *dagTraversalManager) blockWindowHeap(stagingArea *model.StagingArea, - highHash *externalapi.DomainHash, windowSize int) (*sizedUpBlockHeap, error) { - windowHeapSlice, err := dtm.windowHeapSliceStore.Get(stagingArea, highHash, windowSize) - sliceNotCached := database.IsNotFoundError(err) - if !sliceNotCached && err != nil { - return nil, err - } - if !sliceNotCached { - return dtm.newSizedUpHeapFromSlice(stagingArea, windowHeapSlice), nil - } - - heap, err := dtm.calculateBlockWindowHeap(stagingArea, highHash, windowSize) - if err != nil { - return nil, err - } - - if !highHash.Equal(model.VirtualBlockHash) { - dtm.windowHeapSliceStore.Stage(stagingArea, highHash, windowSize, heap.impl.slice) - } - return heap, nil -} - -func (dtm *dagTraversalManager) calculateBlockWindowHeap(stagingArea *model.StagingArea, - highHash *externalapi.DomainHash, windowSize int) (*sizedUpBlockHeap, error) { - - windowHeap := dtm.newSizedUpHeap(stagingArea, windowSize) - if highHash.Equal(dtm.genesisHash) { - return windowHeap, nil - } - if windowSize == 0 { - return windowHeap, nil - } - - current := highHash - currentGHOSTDAGData, err := dtm.ghostdagDataStore.Get(dtm.databaseContext, stagingArea, highHash, false) - if err != nil { - return nil, err - } - - // If the block has a trusted DAA window attached, we just take it as is and don't use cache of selected parent to - // build the window. This is because tryPushMergeSet might not be able to find all the GHOSTDAG data that is - // associated with the block merge set. - _, err = dtm.daaWindowStore.DAAWindowBlock(dtm.databaseContext, stagingArea, current, 0) - isNonTrustedBlock := database.IsNotFoundError(err) - if !isNonTrustedBlock && err != nil { - return nil, err - } - - if isNonTrustedBlock && currentGHOSTDAGData.SelectedParent() != nil { - windowHeapSlice, err := dtm.windowHeapSliceStore.Get(stagingArea, currentGHOSTDAGData.SelectedParent(), windowSize) - selectedParentNotCached := database.IsNotFoundError(err) - if !selectedParentNotCached && err != nil { - return nil, err - } - if !selectedParentNotCached { - windowHeap := dtm.newSizedUpHeapFromSlice(stagingArea, windowHeapSlice) - if !currentGHOSTDAGData.SelectedParent().Equal(dtm.genesisHash) { - selectedParentGHOSTDAGData, err := dtm.ghostdagDataStore.Get( - dtm.databaseContext, stagingArea, currentGHOSTDAGData.SelectedParent(), false) - if err != nil { - return nil, err - } - - _, err = dtm.tryPushMergeSet(windowHeap, currentGHOSTDAGData, selectedParentGHOSTDAGData) - if err != nil { - return nil, err - } - } - - return windowHeap, nil - } - } - - // Walk down the chain until you finish or find a trusted block and then take complete the rest - // of the window with the trusted window. - for { - if currentGHOSTDAGData.SelectedParent().Equal(dtm.genesisHash) { - break - } - - _, err := dtm.daaWindowStore.DAAWindowBlock(dtm.databaseContext, stagingArea, current, 0) - currentIsNonTrustedBlock := database.IsNotFoundError(err) - if !currentIsNonTrustedBlock && err != nil { - return nil, err - } - - if !currentIsNonTrustedBlock { - for i := uint64(0); ; i++ { - daaBlock, err := dtm.daaWindowStore.DAAWindowBlock(dtm.databaseContext, stagingArea, current, i) - if database.IsNotFoundError(err) { - break - } - if err != nil { - return nil, err - } - - _, err = windowHeap.tryPushWithGHOSTDAGData(daaBlock.Hash, daaBlock.GHOSTDAGData) - if err != nil { - return nil, err - } - - // Right now we go over all of the window of `current` and filter blocks on the fly. - // We can optimize it if we make sure that daaWindowStore stores sorted windows, and - // then return from this function once one block was not added to the heap. - } - break - } - - selectedParentGHOSTDAGData, err := dtm.ghostdagDataStore.Get( - dtm.databaseContext, stagingArea, currentGHOSTDAGData.SelectedParent(), false) - if err != nil { - return nil, err - } - - done, err := dtm.tryPushMergeSet(windowHeap, currentGHOSTDAGData, selectedParentGHOSTDAGData) - if err != nil { - return nil, err - } - if done { - break - } - - current = currentGHOSTDAGData.SelectedParent() - currentGHOSTDAGData = selectedParentGHOSTDAGData - } - - return windowHeap, nil -} - -func (dtm *dagTraversalManager) tryPushMergeSet(windowHeap *sizedUpBlockHeap, currentGHOSTDAGData, selectedParentGHOSTDAGData *externalapi.BlockGHOSTDAGData) (bool, error) { - added, err := windowHeap.tryPushWithGHOSTDAGData(currentGHOSTDAGData.SelectedParent(), selectedParentGHOSTDAGData) - if err != nil { - return false, err - } - - // If the window is full and the selected parent is less than the minimum then we break - // because this means that there cannot be any more blocks in the past with higher blueWork - if !added { - return true, nil - } - - // Now we go over the merge set. - // Remove the SP from the blue merge set because we already added it. - mergeSetBlues := currentGHOSTDAGData.MergeSetBlues()[1:] - // Go over the merge set in reverse because it's ordered in reverse by blueWork. - for i := len(mergeSetBlues) - 1; i >= 0; i-- { - added, err := windowHeap.tryPush(mergeSetBlues[i]) - if err != nil { - return false, err - } - // If it's smaller than minimum then we won't be able to add the rest because they're even smaller. - if !added { - break - } - } - - mergeSetReds := currentGHOSTDAGData.MergeSetReds() - for i := len(mergeSetReds) - 1; i >= 0; i-- { - added, err := windowHeap.tryPush(mergeSetReds[i]) - if err != nil { - return false, err - } - // If it's smaller than minimum then we won't be able to add the rest because they're even smaller. - if !added { - break - } - } - - return false, nil -} diff --git a/consensus/dagtraversalmanager/window_test.go b/consensus/dagtraversalmanager/window_test.go deleted file mode 100644 index c10dac30..00000000 --- a/consensus/dagtraversalmanager/window_test.go +++ /dev/null @@ -1,369 +0,0 @@ -package dagtraversalmanager_test - -import ( - "reflect" - "sort" - "testing" - - "github.com/Qitmeer/qng/consensus/model" - - "github.com/Qitmeer/qng/consensus" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/consensus/utils/hashset" - "github.com/Qitmeer/qng/consensus/utils/testutils" - "github.com/kaspanet/kaspad/domain/dagconfig" - "github.com/pkg/errors" -) - -func TestBlockWindow(t *testing.T) { - tests := map[string][]*struct { - parents []string - id string //id is a virtual entity that is used only for tests so we can define relations between blocks without knowing their hash - expectedWindow []string - }{ - dagconfig.MainnetParams.Name: { - { - parents: []string{"A"}, - id: "B", - expectedWindow: []string{}, - }, - { - parents: []string{"B"}, - id: "C", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"B"}, - id: "D", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"C", "D"}, - id: "E", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"C", "D"}, - id: "F", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"A"}, - id: "G", - expectedWindow: []string{}, - }, - { - parents: []string{"G"}, - id: "H", - expectedWindow: []string{"G"}, - }, - { - parents: []string{"H", "F"}, - id: "I", - expectedWindow: []string{"F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"I"}, - id: "J", - expectedWindow: []string{"I", "F", "C", "H", "D", "B", "G"}, - }, - // - { - parents: []string{"J"}, - id: "K", - expectedWindow: []string{"J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"K"}, - id: "L", - expectedWindow: []string{"K", "J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"L"}, - id: "M", - expectedWindow: []string{"L", "K", "J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"M"}, - id: "N", - expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "H", "D", "B"}, - }, - { - parents: []string{"N"}, - id: "O", - expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "H", "D"}, - }, - }, - dagconfig.TestnetParams.Name: { - { - parents: []string{"A"}, - id: "B", - expectedWindow: []string{}, - }, - { - parents: []string{"B"}, - id: "C", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"B"}, - id: "D", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"C", "D"}, - id: "E", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"C", "D"}, - id: "F", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"A"}, - id: "G", - expectedWindow: []string{}, - }, - { - parents: []string{"G"}, - id: "H", - expectedWindow: []string{"G"}, - }, - { - parents: []string{"H", "F"}, - id: "I", - expectedWindow: []string{"F", "C", "D", "H", "B", "G"}, - }, - { - parents: []string{"I"}, - id: "J", - expectedWindow: []string{"I", "F", "C", "D", "H", "B", "G"}, - }, - { - parents: []string{"J"}, - id: "K", - expectedWindow: []string{"J", "I", "F", "C", "D", "H", "B", "G"}, - }, - { - parents: []string{"K"}, - id: "L", - expectedWindow: []string{"K", "J", "I", "F", "C", "D", "H", "B", "G"}, - }, - { - parents: []string{"L"}, - id: "M", - expectedWindow: []string{"L", "K", "J", "I", "F", "C", "D", "H", "B", "G"}, - }, - { - parents: []string{"M"}, - id: "N", - expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "D", "H", "B"}, - }, - { - parents: []string{"N"}, - id: "O", - expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "D", "H"}, - }, - }, - dagconfig.DevnetParams.Name: { - { - parents: []string{"A"}, - id: "B", - expectedWindow: []string{}, - }, - { - parents: []string{"B"}, - id: "C", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"B"}, - id: "D", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"C", "D"}, - id: "E", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"C", "D"}, - id: "F", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"A"}, - id: "G", - expectedWindow: []string{}, - }, - { - parents: []string{"G"}, - id: "H", - expectedWindow: []string{"G"}, - }, - { - parents: []string{"H", "F"}, - id: "I", - expectedWindow: []string{"F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"I"}, - id: "J", - expectedWindow: []string{"I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"J"}, - id: "K", - expectedWindow: []string{"J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"K"}, - id: "L", - expectedWindow: []string{"K", "J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"L"}, - id: "M", - expectedWindow: []string{"L", "K", "J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"M"}, - id: "N", - expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "H", "D", "B"}, - }, - { - parents: []string{"N"}, - id: "O", - expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "H", "D"}, - }, - }, - dagconfig.SimnetParams.Name: { - { - parents: []string{"A"}, - id: "B", - expectedWindow: []string{}, - }, - { - parents: []string{"B"}, - id: "C", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"B"}, - id: "D", - expectedWindow: []string{"B"}, - }, - { - parents: []string{"C", "D"}, - id: "E", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"C", "D"}, - id: "F", - expectedWindow: []string{"C", "D", "B"}, - }, - { - parents: []string{"A"}, - id: "G", - expectedWindow: []string{}, - }, - { - parents: []string{"G"}, - id: "H", - expectedWindow: []string{"G"}, - }, - { - parents: []string{"H", "F"}, - id: "I", - expectedWindow: []string{"F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"I"}, - id: "J", - expectedWindow: []string{"I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"J"}, - id: "K", - expectedWindow: []string{"J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"K"}, - id: "L", - expectedWindow: []string{"K", "J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"L"}, - id: "M", - expectedWindow: []string{"L", "K", "J", "I", "F", "C", "H", "D", "B", "G"}, - }, - { - parents: []string{"M"}, - id: "N", - expectedWindow: []string{"M", "L", "K", "J", "I", "F", "C", "H", "D", "B"}, - }, - { - parents: []string{"N"}, - id: "O", - expectedWindow: []string{"N", "M", "L", "K", "J", "I", "F", "C", "H", "D"}, - }, - }, - } - testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { - consensusConfig.K = 1 - factory := consensus.NewFactory() - tc, tearDown, err := factory.NewTestConsensus(consensusConfig, "TestBlockWindow") - if err != nil { - t.Fatalf("NewTestConsensus: %s", err) - } - defer tearDown(false) - - windowSize := 10 - blockByIDMap := make(map[string]*externalapi.DomainHash) - idByBlockMap := make(map[externalapi.DomainHash]string) - blockByIDMap["A"] = consensusConfig.GenesisHash - idByBlockMap[*consensusConfig.GenesisHash] = "A" - - blocksData := tests[consensusConfig.Name] - - for _, blockData := range blocksData { - parents := hashset.New() - for _, parentID := range blockData.parents { - parent := blockByIDMap[parentID] - parents.Add(parent) - } - - block, _, err := tc.AddBlock(parents.ToSlice(), nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - blockByIDMap[blockData.id] = block - idByBlockMap[*block] = blockData.id - - stagingArea := model.NewStagingArea() - - window, err := tc.DAGTraversalManager().BlockWindow(stagingArea, block, windowSize) - if err != nil { - t.Fatalf("BlockWindow: %s", err) - } - sort.Sort(testutils.NewTestGhostDAGSorter(stagingArea, window, tc, t)) - if err := checkWindowIDs(window, blockData.expectedWindow, idByBlockMap); err != nil { - t.Errorf("Unexpected values for window for block %s: %s", blockData.id, err) - } - } - }) -} - -func checkWindowIDs(window []*externalapi.DomainHash, expectedIDs []string, idByBlockMap map[externalapi.DomainHash]string) error { - ids := make([]string, len(window)) - for i, node := range window { - ids[i] = idByBlockMap[*node] - } - if !reflect.DeepEqual(ids, expectedIDs) { - return errors.Errorf("window expected to have blocks %s but got %s", expectedIDs, ids) - } - return nil -} diff --git a/consensus/difficultymanager/blockwindow.go b/consensus/difficultymanager/blockwindow.go deleted file mode 100644 index aa3f3f13..00000000 --- a/consensus/difficultymanager/blockwindow.go +++ /dev/null @@ -1,103 +0,0 @@ -package difficultymanager - -import ( - "math" - "math/big" - - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/core/types/pow" -) - -type difficultyBlock struct { - timeInMilliseconds int64 - Bits uint32 - hash *externalapi.DomainHash - blueWork *big.Int -} - -type blockWindow []difficultyBlock - -func (dm *difficultyManager) getDifficultyBlock( - stagingArea *model.StagingArea, blockHash *externalapi.DomainHash) (difficultyBlock, error) { - - header, err := dm.headerStore.BlockHeader(dm.databaseContext, stagingArea, blockHash) - if err != nil { - return difficultyBlock{}, err - } - return difficultyBlock{ - timeInMilliseconds: header.TimeInMilliseconds(), - Bits: header.Bits(), - hash: blockHash, - blueWork: header.BlueWork(), - }, nil -} - -// blockWindow returns a blockWindow of the given size that contains the -// blocks in the past of startingNode, the sorting is unspecified. -// If the number of blocks in the past of startingNode is less then windowSize, -// the window will be padded by genesis blocks to achieve a size of windowSize. -func (dm *difficultyManager) blockWindow(stagingArea *model.StagingArea, startingNode *externalapi.DomainHash, windowSize int) (blockWindow, - []*externalapi.DomainHash, error) { - - window := make(blockWindow, 0, windowSize) - windowHashes, err := dm.dagTraversalManager.BlockWindow(stagingArea, startingNode, windowSize) - if err != nil { - return nil, nil, err - } - - for _, hash := range windowHashes { - block, err := dm.getDifficultyBlock(stagingArea, hash) - if err != nil { - return nil, nil, err - } - window = append(window, block) - } - return window, windowHashes, nil -} - -func ghostdagLess(blockA *difficultyBlock, blockB *difficultyBlock) bool { - switch blockA.blueWork.Cmp(blockB.blueWork) { - case -1: - return true - case 1: - return false - case 0: - return blockA.hash.Less(blockB.hash) - default: - panic("big.Int.Cmp is defined to always return -1/1/0 and nothing else") - } -} - -func (window blockWindow) minMaxTimestamps() (min, max int64, minIndex int) { - min = math.MaxInt64 - minIndex = 0 - max = 0 - for i, block := range window { - // If timestamps are equal we ghostdag compare in order to reach consensus on `minIndex` - if block.timeInMilliseconds < min || - (block.timeInMilliseconds == min && ghostdagLess(&block, &window[minIndex])) { - min = block.timeInMilliseconds - minIndex = i - } - if block.timeInMilliseconds > max { - max = block.timeInMilliseconds - } - } - return -} - -func (window *blockWindow) remove(n int) { - (*window)[n] = (*window)[len(*window)-1] - *window = (*window)[:len(*window)-1] -} - -func (window blockWindow) averageTarget() *big.Int { - averageTarget := new(big.Int) - targetTmp := new(big.Int) - for _, block := range window { - pow.CompactToBigWithDestination(block.Bits, targetTmp) - averageTarget.Add(averageTarget, targetTmp) - } - return averageTarget.Div(averageTarget, big.NewInt(int64(len(window)))) -} diff --git a/consensus/difficultymanager/difficultymanager.go b/consensus/difficultymanager/difficultymanager.go index e5c737bc..4b3c101b 100644 --- a/consensus/difficultymanager/difficultymanager.go +++ b/consensus/difficultymanager/difficultymanager.go @@ -8,72 +8,38 @@ import ( "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types/pow" "github.com/Qitmeer/qng/params" - - "github.com/Qitmeer/qng/consensus/model/externalapi" ) // DifficultyManager provides a method to resolve the // difficulty value of a block type difficultyManager struct { - databaseContext model.DBReader - ghostdagManager model.GHOSTDAGManager - ghostdagStore model.GHOSTDAGDataStore - headerStore model.BlockHeaderStore - daaBlocksStore model.DAABlocksStore - dagTopologyManager model.DAGTopologyManager - dagTraversalManager model.DAGTraversalManager - genesisHash *externalapi.DomainHash powMax *big.Int difficultyAdjustmentWindowSize int disableDifficultyAdjustment bool targetTimePerBlock time.Duration genesisBits uint32 - cfg *params.Params } // New instantiates a new DifficultyManager func New(cfg *params.Params) model.DifficultyManager { return &difficultyManager{ - cfg: cfg, + powMax: cfg.PowConfig.MeerXKeccakV1PowLimit, + difficultyAdjustmentWindowSize: int(cfg.WorkDiffWindowSize), + disableDifficultyAdjustment: false, + targetTimePerBlock: cfg.TargetTimePerBlock, + genesisBits: cfg.PowConfig.MeerXKeccakV1PowLimitBits, } } -// StageDAADataAndReturnRequiredDifficulty calculates the DAA window, stages the DAA score and DAA added -// blocks, and returns the required difficulty for the given block. -// The reason this function both stages DAA data and returns the difficulty is because in order to calculate -// both of them we need to calculate the DAA window, which is a relatively heavy operation, so we reuse the -// block window instead of recalculating it for the two purposes. -// For cases where no staging should happen and the caller only needs to know the difficulty he should -// use RequiredDifficulty. -func (dm *difficultyManager) StageDAADataAndReturnRequiredDifficulty( - stagingArea *model.StagingArea, - blockHash *externalapi.DomainHash, - isBlockWithTrustedData bool) (uint32, error) { - - targetsWindow, windowHashes, err := dm.blockWindow(stagingArea, blockHash, dm.difficultyAdjustmentWindowSize) - if err != nil { - return 0, err - } - - err = dm.stageDAAScoreAndAddedBlocks(stagingArea, blockHash, windowHashes, isBlockWithTrustedData) - if err != nil { - return 0, err - } - - return dm.requiredDifficultyFromTargetsWindow(targetsWindow) -} - // RequiredDifficulty returns the difficulty required for some block -func (dm *difficultyManager) RequiredDifficulty(stagingArea *model.StagingArea, blockHash *externalapi.DomainHash) (uint32, error) { - targetsWindow, _, err := dm.blockWindow(stagingArea, blockHash, dm.difficultyAdjustmentWindowSize) - if err != nil { - return 0, err +func (dm *difficultyManager) RequiredDifficulty(targetsWindow model.BlockWindow, powInstance pow.IPow) (uint32, error) { + if powInstance.GetPowType() != pow.MEERXKECCAKV1 || len(targetsWindow) < 1 { + return dm.genesisBits, nil } - return dm.requiredDifficultyFromTargetsWindow(targetsWindow) } -func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow blockWindow) (uint32, error) { +func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow model.BlockWindow) (uint32, error) { if dm.disableDifficultyAdjustment { return dm.genesisBits, nil } @@ -89,16 +55,16 @@ func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow b return dm.genesisBits, nil } - windowMinTimestamp, windowMaxTimeStamp, windowMinIndex := targetsWindow.minMaxTimestamps() + windowMinTimestamp, windowMaxTimeStamp, windowMinIndex := targetsWindow.MinMaxTimestamps() // Remove the last block from the window so to calculate the average target of dag.difficultyAdjustmentWindowSize blocks - targetsWindow.remove(windowMinIndex) + targetsWindow.Remove(windowMinIndex) // Calculate new target difficulty as: // averageWindowTarget * (windowMinTimestamp / (targetTimePerBlock * windowSize)) // The result uses integer division which means it will be slightly // rounded down. div := new(big.Int) - newTarget := targetsWindow.averageTarget() + newTarget := targetsWindow.AverageTarget() newTarget. // We need to clamp the timestamp difference to 1 so that we'll never get a 0 target. Mul(newTarget, div.SetInt64(math.MaxInt64(windowMaxTimeStamp-windowMinTimestamp, 1))). @@ -110,74 +76,3 @@ func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow b newTargetBits := pow.BigToCompact(newTarget) return newTargetBits, nil } - -func (dm *difficultyManager) stageDAAScoreAndAddedBlocks(stagingArea *model.StagingArea, - blockHash *externalapi.DomainHash, - windowHashes []*externalapi.DomainHash, - isBlockWithTrustedData bool) error { - - daaScore, addedBlocks, err := dm.calculateDaaScoreAndAddedBlocks(stagingArea, blockHash, windowHashes, isBlockWithTrustedData) - if err != nil { - return err - } - - dm.daaBlocksStore.StageDAAScore(stagingArea, blockHash, daaScore) - dm.daaBlocksStore.StageBlockDAAAddedBlocks(stagingArea, blockHash, addedBlocks) - return nil -} - -func (dm *difficultyManager) calculateDaaScoreAndAddedBlocks(stagingArea *model.StagingArea, - blockHash *externalapi.DomainHash, - windowHashes []*externalapi.DomainHash, - isBlockWithTrustedData bool) (uint64, []*externalapi.DomainHash, error) { - - if blockHash.Equal(dm.genesisHash) { - genesisHeader, err := dm.headerStore.BlockHeader(dm.databaseContext, stagingArea, dm.genesisHash) - if err != nil { - return 0, nil, err - } - return genesisHeader.DAAScore(), nil, nil - } - - ghostdagData, err := dm.ghostdagStore.Get(dm.databaseContext, stagingArea, blockHash, false) - if err != nil { - return 0, nil, err - } - mergeSetLength := len(ghostdagData.MergeSetBlues()) + len(ghostdagData.MergeSetReds()) - mergeSet := make(map[externalapi.DomainHash]struct{}, mergeSetLength) - for _, hash := range ghostdagData.MergeSetBlues() { - mergeSet[*hash] = struct{}{} - } - - for _, hash := range ghostdagData.MergeSetReds() { - mergeSet[*hash] = struct{}{} - } - - // TODO: Consider optimizing by breaking the loop once you arrive to the - // window block with blue work higher than all non-added merge set blocks. - daaAddedBlocks := make([]*externalapi.DomainHash, 0, len(mergeSet)) - for _, hash := range windowHashes { - if _, exists := mergeSet[*hash]; exists { - daaAddedBlocks = append(daaAddedBlocks, hash) - if len(daaAddedBlocks) == len(mergeSet) { - break - } - } - } - - var daaScore uint64 - if isBlockWithTrustedData { - daaScore, err = dm.daaBlocksStore.DAAScore(dm.databaseContext, stagingArea, blockHash) - if err != nil { - return 0, nil, err - } - } else { - selectedParentDAAScore, err := dm.daaBlocksStore.DAAScore(dm.databaseContext, stagingArea, ghostdagData.SelectedParent()) - if err != nil { - return 0, nil, err - } - daaScore = selectedParentDAAScore + uint64(len(daaAddedBlocks)) - } - - return daaScore, daaAddedBlocks, nil -} diff --git a/consensus/difficultymanager/difficultymanager_test.go b/consensus/difficultymanager/difficultymanager_test.go deleted file mode 100644 index bd9134e9..00000000 --- a/consensus/difficultymanager/difficultymanager_test.go +++ /dev/null @@ -1,357 +0,0 @@ -package difficultymanager_test - -import ( - "testing" - "time" - - "github.com/Qitmeer/qng/core/types/pow" - "github.com/kaspanet/kaspad/util/mstime" - - "github.com/Qitmeer/qng/consensus/utils/consensushashing" - - "github.com/Qitmeer/qng/consensus" - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/consensus/utils/testutils" - "github.com/kaspanet/kaspad/domain/dagconfig" -) - -func TestDifficulty(t *testing.T) { - testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { - if consensusConfig.DisableDifficultyAdjustment { - return - } - // This test generates 3066 blocks above genesis with at least 1 second between each block, amounting to - // a bit less then an hour of timestamps. - // To prevent rejected blocks due to timestamps in the future, the following safeguard makes sure - // the genesis block is at least 1 hour in the past. - if consensusConfig.GenesisBlock.Header.TimeInMilliseconds() > mstime.ToMSTime(time.Now().Add(-time.Hour)).UnixMilliseconds() { - t.Fatalf("TestDifficulty requires the GenesisBlock to be at least 1 hour old to pass") - } - - consensusConfig.K = 1 - consensusConfig.DifficultyAdjustmentWindowSize = 140 - - factory := consensus.NewFactory() - tc, teardown, err := factory.NewTestConsensus(consensusConfig, "TestDifficulty") - if err != nil { - t.Fatalf("Error setting up consensus: %+v", err) - } - defer teardown(false) - - stagingArea := model.NewStagingArea() - - addBlock := func(blockTime int64, parents ...*externalapi.DomainHash) (*externalapi.DomainBlock, *externalapi.DomainHash) { - bluestParent, err := tc.GHOSTDAGManager().ChooseSelectedParent(stagingArea, parents...) - if err != nil { - t.Fatalf("ChooseSelectedParent: %+v", err) - } - - if blockTime == 0 { - header, err := tc.BlockHeaderStore().BlockHeader(tc.DatabaseContext(), stagingArea, bluestParent) - if err != nil { - t.Fatalf("BlockHeader: %+v", err) - } - - blockTime = header.TimeInMilliseconds() + consensusConfig.TargetTimePerBlock.Milliseconds() - } - - block, _, err := tc.BuildBlockWithParents(parents, nil, nil) - if err != nil { - t.Fatalf("BuildBlockWithParents: %+v", err) - } - - newHeader := block.Header.ToMutable() - newHeader.SetTimeInMilliseconds(blockTime) - block.Header = newHeader.ToImmutable() - err = tc.ValidateAndInsertBlock(block, true) - if err != nil { - t.Fatalf("ValidateAndInsertBlock: %+v", err) - } - - return block, consensushashing.BlockHash(block) - } - - minimumTime := func(parents ...*externalapi.DomainHash) int64 { - var tempHash externalapi.DomainHash - stagingArea := model.NewStagingArea() - tc.BlockRelationStore().StageBlockRelation(stagingArea, &tempHash, &model.BlockRelations{ - Parents: parents, - Children: nil, - }) - - err = tc.GHOSTDAGManager().GHOSTDAG(stagingArea, &tempHash) - if err != nil { - t.Fatalf("GHOSTDAG: %+v", err) - } - - pastMedianTime, err := tc.PastMedianTimeManager().PastMedianTime(stagingArea, &tempHash) - if err != nil { - t.Fatalf("PastMedianTime: %+v", err) - } - - return pastMedianTime + 1 - } - - addBlockWithMinimumTime := func(parents ...*externalapi.DomainHash) (*externalapi.DomainBlock, *externalapi.DomainHash) { - minTime := minimumTime(parents...) - return addBlock(minTime, parents...) - } - - tipHash := consensusConfig.GenesisHash - tip := consensusConfig.GenesisBlock - for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize; i++ { - tip, tipHash = addBlock(0, tipHash) - if tip.Header.Bits() != consensusConfig.GenesisBlock.Header.Bits() { - t.Fatalf("As long as the block blue score is less then the difficulty adjustment " + - "window size, the difficulty should be the same as genesis'") - } - } - for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize+10; i++ { - tip, tipHash = addBlock(0, tipHash) - if tip.Header.Bits() != consensusConfig.GenesisBlock.Header.Bits() { - t.Fatalf("As long as the block rate remains the same, the difficulty shouldn't change") - } - } - - blockInThePast, tipHash := addBlockWithMinimumTime(tipHash) - if blockInThePast.Header.Bits() != tip.Header.Bits() { - t.Fatalf("The difficulty should only change when blockInThePast is in the past of a block") - } - tip = blockInThePast - - tip, tipHash = addBlock(0, tipHash) - if compareBits(tip.Header.Bits(), blockInThePast.Header.Bits()) >= 0 { - t.Fatalf("tip.bits should be smaller than blockInThePast.bits because blockInThePast increased the " + - "block rate, so the difficulty should increase as well") - } - - var expectedBits uint32 - switch consensusConfig.Name { - case dagconfig.TestnetParams.Name: - expectedBits = uint32(0x1e7f1441) - case dagconfig.DevnetParams.Name: - expectedBits = uint32(0x1f4e54ab) - case dagconfig.MainnetParams.Name: - expectedBits = uint32(0x1d02c50f) - } - - if tip.Header.Bits() != expectedBits { - t.Errorf("tip.bits was expected to be %x but got %x", expectedBits, tip.Header.Bits()) - } - - // Increase block rate to increase difficulty - for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize; i++ { - tip, tipHash = addBlockWithMinimumTime(tipHash) - tipGHOSTDAGData, err := tc.GHOSTDAGDataStore().Get(tc.DatabaseContext(), stagingArea, tipHash, false) - if err != nil { - t.Fatalf("GHOSTDAGDataStore: %+v", err) - } - - selectedParentHeader, err := - tc.BlockHeaderStore().BlockHeader(tc.DatabaseContext(), stagingArea, tipGHOSTDAGData.SelectedParent()) - if err != nil { - t.Fatalf("BlockHeader: %+v", err) - } - - if compareBits(tip.Header.Bits(), selectedParentHeader.Bits()) > 0 { - t.Fatalf("Because we're increasing the block rate, the difficulty can't decrease") - } - } - - // Add blocks until difficulty stabilizes - lastBits := tip.Header.Bits() - sameBitsCount := 0 - for sameBitsCount < consensusConfig.DifficultyAdjustmentWindowSize+1 { - tip, tipHash = addBlock(0, tipHash) - if tip.Header.Bits() == lastBits { - sameBitsCount++ - } else { - lastBits = tip.Header.Bits() - sameBitsCount = 0 - } - } - - slowBlockTime := tip.Header.TimeInMilliseconds() + consensusConfig.TargetTimePerBlock.Milliseconds() + 1000 - slowBlock, tipHash := addBlock(slowBlockTime, tipHash) - if slowBlock.Header.Bits() != tip.Header.Bits() { - t.Fatalf("The difficulty should only change when slowBlock is in the past of a block") - } - - tip = slowBlock - - tip, tipHash = addBlock(0, tipHash) - if compareBits(tip.Header.Bits(), slowBlock.Header.Bits()) <= 0 { - t.Fatalf("tip.bits should be smaller than slowBlock.bits because slowBlock decreased the block" + - " rate, so the difficulty should decrease as well") - } - - // Here we create two chains: a chain of blue blocks, and a chain of red blocks with - // very low timestamps. Because the red blocks should be part of the difficulty - // window, their low timestamps should lower the difficulty, and we check it by - // comparing the bits of two blocks with the same blue score, one with the red - // blocks in its past and one without. - splitBlockHash := tipHash - blueTipHash := splitBlockHash - for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize; i++ { - _, blueTipHash = addBlock(0, blueTipHash) - } - - redChainTipHash := splitBlockHash - const redChainLength = 10 - for i := 0; i < redChainLength; i++ { - _, redChainTipHash = addBlockWithMinimumTime(redChainTipHash) - } - tipWithRedPast, _ := addBlock(0, redChainTipHash, blueTipHash) - tipWithoutRedPast, _ := addBlock(0, blueTipHash) - if tipWithRedPast.Header.Bits() <= tipWithoutRedPast.Header.Bits() { - t.Fatalf("tipWithRedPast.bits should be greater than tipWithoutRedPast.bits because the red blocks" + - " blocks have very low timestamp and should lower the difficulty") - } - - // We repeat the test, but now we make the blue chain longer in order to filter - // out the red blocks from the window, and check that the red blocks don't - // affect the difficulty. - blueTipHash = splitBlockHash - for i := 0; i < consensusConfig.DifficultyAdjustmentWindowSize+redChainLength+1; i++ { - _, blueTipHash = addBlock(0, blueTipHash) - } - - redChainTipHash = splitBlockHash - for i := 0; i < redChainLength; i++ { - _, redChainTipHash = addBlockWithMinimumTime(redChainTipHash) - } - tipWithRedPast, _ = addBlock(0, redChainTipHash, blueTipHash) - tipWithoutRedPast, _ = addBlock(0, blueTipHash) - if tipWithRedPast.Header.Bits() != tipWithoutRedPast.Header.Bits() { - t.Fatalf("tipWithoutRedPast.bits should be the same as tipWithRedPast.bits because the red blocks" + - " are not part of the difficulty window") - } - }) -} - -func TestDAAScore(t *testing.T) { - testutils.ForAllNets(t, true, func(t *testing.T, consensusConfig *consensus.Config) { - consensusConfig.DifficultyAdjustmentWindowSize = 86 - - stagingArea := model.NewStagingArea() - - factory := consensus.NewFactory() - tc, teardown, err := factory.NewTestConsensus(consensusConfig, "TestDAAScore") - if err != nil { - t.Fatalf("Error setting up consensus: %+v", err) - } - defer teardown(false) - - // We create a small DAG in order to skip from block with blue score of 1 directly to 3 - split1Hash, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - block, _, err := tc.AddBlock([]*externalapi.DomainHash{consensusConfig.GenesisHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - blockBlueScore3, _, err := tc.AddBlock([]*externalapi.DomainHash{split1Hash, block}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - tipHash := blockBlueScore3 - blockBlueScore3DAAScore, err := tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) - if err != nil { - t.Fatalf("DAAScore: %+v", err) - } - - blockBlueScore3ExpectedDAAScore := uint64(2) + consensusConfig.GenesisBlock.Header.DAAScore() - if blockBlueScore3DAAScore != blockBlueScore3ExpectedDAAScore { - t.Fatalf("DAA score is expected to be %d but got %d", blockBlueScore3ExpectedDAAScore, blockBlueScore3DAAScore) - } - tipDAAScore := blockBlueScore3ExpectedDAAScore - - for i := uint64(0); i < 10; i++ { - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - tipDAAScore, err = tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) - if err != nil { - t.Fatalf("DAAScore: %+v", err) - } - - expectedDAAScore := blockBlueScore3ExpectedDAAScore + i + 1 - if tipDAAScore != expectedDAAScore { - t.Fatalf("DAA score is expected to be %d but got %d", expectedDAAScore, tipDAAScore) - } - } - - split2Hash := tipHash - split2DAAScore := tipDAAScore - for i := uint64(0); i < uint64(consensusConfig.DifficultyAdjustmentWindowSize)-1; i++ { - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - tipDAAScore, err = tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) - if err != nil { - t.Fatalf("DAAScore: %+v", err) - } - - expectedDAAScore := split2DAAScore + i + 1 - if tipDAAScore != expectedDAAScore { - t.Fatalf("DAA score is expected to be %d but got %d", expectedDAAScore, split2DAAScore) - } - } - - // This block should have blue score of 2 so it shouldn't be added to the DAA window of a merging block - blockAboveSplit1, _, err := tc.AddBlock([]*externalapi.DomainHash{split1Hash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - // This block is in the anticone of consensusConfig.DifficultyAdjustmentWindowSize-1 blocks, so it must be part - // of the DAA window of a merging block - blockAboveSplit2, _, err := tc.AddBlock([]*externalapi.DomainHash{split2Hash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - currentSelectedTipDAAScore := tipDAAScore - currentSelectedTip := tipHash - tipHash, _, err = tc.AddBlock([]*externalapi.DomainHash{blockAboveSplit1, blockAboveSplit2, tipHash}, nil, nil) - if err != nil { - t.Fatalf("AddBlock: %+v", err) - } - - tipDAAScore, err = tc.DAABlocksStore().DAAScore(tc.DatabaseContext(), stagingArea, tipHash) - if err != nil { - t.Fatalf("DAAScore: %+v", err) - } - - // The DAA score should be increased only by 2, because 1 of the 3 merged blocks - // is not in the DAA window - expectedDAAScore := currentSelectedTipDAAScore + 2 - if tipDAAScore != expectedDAAScore { - t.Fatalf("DAA score is expected to be %d but got %d", expectedDAAScore, tipDAAScore) - } - - tipDAAAddedBlocks, err := tc.DAABlocksStore().DAAAddedBlocks(tc.DatabaseContext(), stagingArea, tipHash) - if err != nil { - t.Fatalf("DAAScore: %+v", err) - } - - // blockAboveSplit2 should be excluded from the DAA added blocks because it's not in the tip's - // DAA window. - expectedDAABlocks := []*externalapi.DomainHash{blockAboveSplit2, currentSelectedTip} - if !externalapi.HashesEqual(tipDAAAddedBlocks, expectedDAABlocks) { - t.Fatalf("DAA added blocks are expected to be %s but got %s", expectedDAABlocks, tipDAAAddedBlocks) - } - }) -} - -func compareBits(a uint32, b uint32) int { - aTarget := pow.CompactToBig(a) - bTarget := pow.CompactToBig(b) - return aTarget.Cmp(bTarget) -} diff --git a/consensus/difficultymanager/hashrate.go b/consensus/difficultymanager/hashrate.go deleted file mode 100644 index 29555655..00000000 --- a/consensus/difficultymanager/hashrate.go +++ /dev/null @@ -1,74 +0,0 @@ -package difficultymanager - -import ( - "math/big" - - "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/kaspanet/kaspad/infrastructure/logger" - "github.com/pkg/errors" -) - -func (dm *difficultyManager) EstimateNetworkHashesPerSecond(startHash *externalapi.DomainHash, windowSize int) (uint64, error) { - onEnd := logger.LogAndMeasureExecutionTime(log, "EstimateNetworkHashesPerSecond") - defer onEnd() - - stagingArea := model.NewStagingArea() - return dm.estimateNetworkHashesPerSecond(stagingArea, startHash, windowSize) -} - -func (dm *difficultyManager) estimateNetworkHashesPerSecond(stagingArea *model.StagingArea, - startHash *externalapi.DomainHash, windowSize int) (uint64, error) { - - const minWindowSize = 1000 - if windowSize < minWindowSize { - return 0, errors.Errorf("windowSize must be equal to or greater than %d", minWindowSize) - } - - blockWindow, windowHashes, err := dm.blockWindow(stagingArea, startHash, windowSize) - if err != nil { - return 0, err - } - - // return 0 if no blocks had been mined yet - if len(windowHashes) == 0 { - return 0, nil - } - - minWindowTimestamp, maxWindowTimestamp, _ := blockWindow.minMaxTimestamps() - if minWindowTimestamp == maxWindowTimestamp { - return 0, errors.Errorf("min window timestamp is equal to the max window timestamp") - } - - firstHash := windowHashes[0] - firstBlockGHOSTDAGData, err := dm.ghostdagStore.Get(dm.databaseContext, stagingArea, firstHash, false) - if err != nil { - return 0, err - } - firstBlockBlueWork := firstBlockGHOSTDAGData.BlueWork() - minWindowBlueWork := firstBlockBlueWork - maxWindowBlueWork := firstBlockBlueWork - for _, hash := range windowHashes[1:] { - blockGHOSTDAGData, err := dm.ghostdagStore.Get(dm.databaseContext, stagingArea, hash, false) - if err != nil { - return 0, err - } - blockBlueWork := blockGHOSTDAGData.BlueWork() - if blockBlueWork.Cmp(minWindowBlueWork) < 0 { - minWindowBlueWork = blockBlueWork - } - if blockBlueWork.Cmp(maxWindowBlueWork) > 0 { - maxWindowBlueWork = blockBlueWork - } - } - - windowsDiff := (maxWindowTimestamp - minWindowTimestamp) / 1000 // Divided by 1000 to convert milliseconds to seconds - if windowsDiff == 0 { - return 0, nil - } - - nominator := new(big.Int).Sub(maxWindowBlueWork, minWindowBlueWork) - denominator := big.NewInt(windowsDiff) - networkHashesPerSecondBigInt := new(big.Int).Div(nominator, denominator) - return networkHashesPerSecondBigInt.Uint64(), nil -} diff --git a/consensus/model/block_heap.go b/consensus/model/block_heap.go deleted file mode 100644 index c8cba85c..00000000 --- a/consensus/model/block_heap.go +++ /dev/null @@ -1,12 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// BlockHeap represents a heap of block hashes, providing a priority-queue functionality -type BlockHeap interface { - Push(blockHash *externalapi.DomainHash) error - PushSlice(blockHash []*externalapi.DomainHash) error - Pop() *externalapi.DomainHash - Len() int - ToSlice() []*externalapi.DomainHash -} diff --git a/consensus/model/blockiterator.go b/consensus/model/blockiterator.go deleted file mode 100644 index bf35519c..00000000 --- a/consensus/model/blockiterator.go +++ /dev/null @@ -1,11 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// BlockIterator is an iterator over blocks according to some order. -type BlockIterator interface { - First() bool - Next() bool - Get() (*externalapi.DomainHash, error) - Close() error -} diff --git a/consensus/model/externalapi/acceptancedata.go b/consensus/model/externalapi/acceptancedata.go deleted file mode 100644 index 4b85741f..00000000 --- a/consensus/model/externalapi/acceptancedata.go +++ /dev/null @@ -1,145 +0,0 @@ -package externalapi - -// AcceptanceData stores data about which transactions were accepted by a block. -// It's ordered in the same way as the block merge set blues. -type AcceptanceData []*BlockAcceptanceData - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ AcceptanceData = []*BlockAcceptanceData{} - -// Equal returns whether ad equals to other -func (ad AcceptanceData) Equal(other AcceptanceData) bool { - if len(ad) != len(other) { - return false - } - - for i, blockAcceptanceData := range ad { - if !blockAcceptanceData.Equal(other[i]) { - return false - } - } - - return true -} - -// Clone clones the AcceptanceData -func (ad AcceptanceData) Clone() AcceptanceData { - clone := make(AcceptanceData, len(ad)) - for i, blockAcceptanceData := range ad { - clone[i] = blockAcceptanceData.Clone() - } - - return clone -} - -// BlockAcceptanceData stores all transactions in a block with an indication -// if they were accepted or not by some other block -type BlockAcceptanceData struct { - BlockHash *DomainHash - TransactionAcceptanceData []*TransactionAcceptanceData -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = &BlockAcceptanceData{&DomainHash{}, []*TransactionAcceptanceData{}} - -// Equal returns whether bad equals to other -func (bad *BlockAcceptanceData) Equal(other *BlockAcceptanceData) bool { - if bad == nil || other == nil { - return bad == other - } - - if !bad.BlockHash.Equal(other.BlockHash) { - return false - } - - if len(bad.TransactionAcceptanceData) != len(other.TransactionAcceptanceData) { - return false - } - - for i, acceptanceData := range bad.TransactionAcceptanceData { - if !acceptanceData.Equal(other.TransactionAcceptanceData[i]) { - return false - } - } - - return true -} - -// Clone returns a clone of BlockAcceptanceData -func (bad *BlockAcceptanceData) Clone() *BlockAcceptanceData { - if bad == nil { - return nil - } - - clone := &BlockAcceptanceData{ - BlockHash: bad.BlockHash, - TransactionAcceptanceData: make([]*TransactionAcceptanceData, len(bad.TransactionAcceptanceData)), - } - for i, acceptanceData := range bad.TransactionAcceptanceData { - clone.TransactionAcceptanceData[i] = acceptanceData.Clone() - } - - return clone -} - -// TransactionAcceptanceData stores a transaction together with an indication -// if it was accepted or not by some block -type TransactionAcceptanceData struct { - Transaction *DomainTransaction - Fee uint64 - IsAccepted bool - TransactionInputUTXOEntries []UTXOEntry -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = &TransactionAcceptanceData{&DomainTransaction{}, 0, false, []UTXOEntry{}} - -// Equal returns whether tad equals to other -func (tad *TransactionAcceptanceData) Equal(other *TransactionAcceptanceData) bool { - if tad == nil || other == nil { - return tad == other - } - - if !tad.Transaction.Equal(other.Transaction) { - return false - } - - if tad.Fee != other.Fee { - return false - } - - if tad.IsAccepted != other.IsAccepted { - return false - } - - if len(tad.TransactionInputUTXOEntries) != len(other.TransactionInputUTXOEntries) { - return false - } - - for i, thisUTXOEntry := range tad.TransactionInputUTXOEntries { - otherUTXOEntry := other.TransactionInputUTXOEntries[i] - if !thisUTXOEntry.Equal(otherUTXOEntry) { - return false - } - } - - return true -} - -// Clone returns a clone of TransactionAcceptanceData -func (tad *TransactionAcceptanceData) Clone() *TransactionAcceptanceData { - cloneTransactionInputUTXOEntries := make([]UTXOEntry, len(tad.TransactionInputUTXOEntries)) - for i, utxoEntry := range tad.TransactionInputUTXOEntries { - cloneTransactionInputUTXOEntries[i] = utxoEntry - } - - return &TransactionAcceptanceData{ - Transaction: tad.Transaction.Clone(), - Fee: tad.Fee, - IsAccepted: tad.IsAccepted, - TransactionInputUTXOEntries: cloneTransactionInputUTXOEntries, - } -} diff --git a/consensus/model/externalapi/block.go b/consensus/model/externalapi/block.go deleted file mode 100644 index a65d452e..00000000 --- a/consensus/model/externalapi/block.go +++ /dev/null @@ -1,84 +0,0 @@ -package externalapi - -import "math/big" - -// DomainBlock represents a Kaspa block -type DomainBlock struct { - Header BlockHeader - Transactions []*DomainTransaction -} - -// Clone returns a clone of DomainBlock -func (block *DomainBlock) Clone() *DomainBlock { - transactionClone := make([]*DomainTransaction, len(block.Transactions)) - for i, tx := range block.Transactions { - transactionClone[i] = tx.Clone() - } - - return &DomainBlock{ - Header: block.Header, - Transactions: transactionClone, - } -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = DomainBlock{nil, []*DomainTransaction{}} - -// Equal returns whether block equals to other -func (block *DomainBlock) Equal(other *DomainBlock) bool { - if block == nil || other == nil { - return block == other - } - - if len(block.Transactions) != len(other.Transactions) { - return false - } - - if !block.Header.Equal(other.Header) { - return false - } - - for i, tx := range block.Transactions { - if !tx.Equal(other.Transactions[i]) { - return false - } - } - - return true -} - -// BlockHeader represents an immutable block header. -type BlockHeader interface { - BaseBlockHeader - ToMutable() MutableBlockHeader -} - -// BaseBlockHeader represents the header part of a Kaspa block -type BaseBlockHeader interface { - Version() uint16 - Parents() []BlockLevelParents - DirectParents() BlockLevelParents - HashMerkleRoot() *DomainHash - AcceptedIDMerkleRoot() *DomainHash - UTXOCommitment() *DomainHash - TimeInMilliseconds() int64 - Bits() uint32 - Nonce() uint64 - DAAScore() uint64 - BlueScore() uint64 - BlueWork() *big.Int - PruningPoint() *DomainHash - BlockLevel(maxBlockLevel int) int - Equal(other BaseBlockHeader) bool -} - -// MutableBlockHeader represents a block header that can be mutated, but only -// the fields that are relevant to mining (Nonce and TimeInMilliseconds). -type MutableBlockHeader interface { - BaseBlockHeader - ToImmutable() BlockHeader - SetNonce(nonce uint64) - SetTimeInMilliseconds(timeInMilliseconds int64) - SetHashMerkleRoot(hashMerkleRoot *DomainHash) -} diff --git a/consensus/model/externalapi/block_equal_clone_test.go b/consensus/model/externalapi/block_equal_clone_test.go deleted file mode 100644 index 8138cee9..00000000 --- a/consensus/model/externalapi/block_equal_clone_test.go +++ /dev/null @@ -1,499 +0,0 @@ -package externalapi_test - -import ( - "math/big" - "reflect" - "testing" - - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/consensus/utils/blockheader" -) - -type blockToCompare struct { - block *externalapi.DomainBlock - expectedResult bool -} - -type TestBlockStruct struct { - baseBlock *externalapi.DomainBlock - blocksToCompareTo []blockToCompare -} - -func initTestBaseTransactions() []*externalapi.DomainTransaction { - - testTx := []*externalapi.DomainTransaction{{ - Version: 1, - Inputs: []*externalapi.DomainTransactionInput{}, - Outputs: []*externalapi.DomainTransactionOutput{}, - LockTime: 1, - SubnetworkID: externalapi.DomainSubnetworkID{0x01}, - Gas: 1, - Payload: []byte{0x01}, - Fee: 0, - Mass: 1, - ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }} - return testTx -} - -func initTestAnotherTransactions() []*externalapi.DomainTransaction { - - testTx := []*externalapi.DomainTransaction{{ - Version: 1, - Inputs: []*externalapi.DomainTransactionInput{}, - Outputs: []*externalapi.DomainTransactionOutput{}, - LockTime: 1, - SubnetworkID: externalapi.DomainSubnetworkID{0x01}, - Gas: 1, - Payload: []byte{0x02}, - Fee: 0, - Mass: 1, - ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), - }} - return testTx -} - -func initTestTwoTransactions() []*externalapi.DomainTransaction { - - testTx := []*externalapi.DomainTransaction{{ - Version: 1, - Inputs: []*externalapi.DomainTransactionInput{}, - Outputs: []*externalapi.DomainTransactionOutput{}, - LockTime: 1, - SubnetworkID: externalapi.DomainSubnetworkID{0x01}, - Gas: 1, - Payload: []byte{0x01}, - Fee: 0, - Mass: 1, - ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), - }, { - Version: 1, - Inputs: []*externalapi.DomainTransactionInput{}, - Outputs: []*externalapi.DomainTransactionOutput{}, - LockTime: 1, - SubnetworkID: externalapi.DomainSubnetworkID{0x01}, - Gas: 1, - Payload: []byte{0x01}, - Fee: 0, - Mass: 1, - ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), - }} - return testTx -} - -func initTestBlockStructsForClone() []*externalapi.DomainBlock { - tests := []*externalapi.DomainBlock{ - { - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{0})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - 4, - 5, - 6, - 7, - 8, - big.NewInt(9), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{10}), - ), - initTestBaseTransactions(), - }, { - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - 4, - 5, - 6, - 7, - 8, - big.NewInt(9), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{10}), - ), - initTestBaseTransactions(), - }, - } - - return tests -} - -func initTestBlockStructsForEqual() *[]TestBlockStruct { - tests := []TestBlockStruct{ - { - baseBlock: nil, - blocksToCompareTo: []blockToCompare{ - { - block: nil, - expectedResult: true, - }, - { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{0})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - 4, - 5, - 6, - 7, - 8, - big.NewInt(9), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{10}), - ), - initTestBaseTransactions()}, - expectedResult: false, - }, - }, - }, { - baseBlock: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - blocksToCompareTo: []blockToCompare{ - { - block: nil, - expectedResult: false, - }, - { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestAnotherTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: true, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{ - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - }}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100})}}, // Changed - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestTwoTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 100, // Changed - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 100, // Changed - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 100, // Changed - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 100, // Changed - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 100, // Changed - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(100), // Changed - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{11}), - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, { - block: &externalapi.DomainBlock{ - blockheader.NewImmutableBlockHeader( - 0, - []externalapi.BlockLevelParents{[]*externalapi.DomainHash{externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{1})}}, - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{2}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{3}), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{4}), - 5, - 6, - 7, - 8, - 9, - big.NewInt(10), - externalapi.NewDomainHashFromByteArray(&[externalapi.DomainHashSize]byte{100}), // Changed - ), - initTestBaseTransactions(), - }, - expectedResult: false, - }, - }, - }, - } - - return &tests -} - -func TestDomainBlock_Equal(t *testing.T) { - - blockTests := initTestBlockStructsForEqual() - for i, test := range *blockTests { - for j, subTest := range test.blocksToCompareTo { - result1 := test.baseBlock.Equal(subTest.block) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.block.Equal(test.baseBlock) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } - -} - -func TestDomainBlock_Clone(t *testing.T) { - - blocks := initTestBlockStructsForClone() - for i, block := range blocks { - blockClone := block.Clone() - if !blockClone.Equal(block) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(block, blockClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/block_with_trusted_data.go b/consensus/model/externalapi/block_with_trusted_data.go deleted file mode 100644 index 48ab0fb9..00000000 --- a/consensus/model/externalapi/block_with_trusted_data.go +++ /dev/null @@ -1,23 +0,0 @@ -package externalapi - -// BlockWithTrustedData is a block with pre-filled data -// that is not validated by the consensus. -// This is used when bring the pruning point and its -// anticone on a pruned-headers node. -type BlockWithTrustedData struct { - Block *DomainBlock - DAAWindow []*TrustedDataDataDAAHeader - GHOSTDAGData []*BlockGHOSTDAGDataHashPair -} - -// TrustedDataDataDAAHeader is a block that belongs to BlockWithTrustedData.DAAWindow -type TrustedDataDataDAAHeader struct { - Header BlockHeader - GHOSTDAGData *BlockGHOSTDAGData -} - -// BlockGHOSTDAGDataHashPair is a pair of a block hash and its ghostdag data -type BlockGHOSTDAGDataHashPair struct { - Hash *DomainHash - GHOSTDAGData *BlockGHOSTDAGData -} diff --git a/consensus/model/externalapi/blockinfo.go b/consensus/model/externalapi/blockinfo.go deleted file mode 100644 index 43c914f9..00000000 --- a/consensus/model/externalapi/blockinfo.go +++ /dev/null @@ -1,37 +0,0 @@ -package externalapi - -import "math/big" - -// BlockInfo contains various information about a specific block -type BlockInfo struct { - Exists bool - BlockStatus BlockStatus - BlueScore uint64 - BlueWork *big.Int - SelectedParent *DomainHash - MergeSetBlues []*DomainHash - MergeSetReds []*DomainHash -} - -// HasHeader returns whether the block exists and has a valid header -func (bi *BlockInfo) HasHeader() bool { - return bi.Exists && bi.BlockStatus != StatusInvalid -} - -// HasBody returns whether the block exists and has a valid body -func (bi *BlockInfo) HasBody() bool { - return bi.Exists && bi.BlockStatus != StatusInvalid && bi.BlockStatus != StatusHeaderOnly -} - -// Clone returns a clone of BlockInfo -func (bi *BlockInfo) Clone() *BlockInfo { - return &BlockInfo{ - Exists: bi.Exists, - BlockStatus: bi.BlockStatus.Clone(), - BlueScore: bi.BlueScore, - BlueWork: new(big.Int).Set(bi.BlueWork), - SelectedParent: bi.SelectedParent, - MergeSetBlues: CloneHashes(bi.MergeSetBlues), - MergeSetReds: CloneHashes(bi.MergeSetReds), - } -} diff --git a/consensus/model/externalapi/blockinfo_clone_test.go b/consensus/model/externalapi/blockinfo_clone_test.go deleted file mode 100644 index 003d25ee..00000000 --- a/consensus/model/externalapi/blockinfo_clone_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package externalapi - -import ( - "math/big" - "reflect" - "testing" -) - -func initTestBlockInfoStructsForClone() []*BlockInfo { - - tests := []*BlockInfo{ - { - true, - BlockStatus(0x01), - 0, - big.NewInt(0), - nil, - []*DomainHash{}, - []*DomainHash{}, - }, { - true, - BlockStatus(0x02), - 0, - big.NewInt(0), - nil, - []*DomainHash{}, - []*DomainHash{}, - }, { - true, - 1, - 1, - big.NewInt(0), - nil, - []*DomainHash{}, - []*DomainHash{}, - }, { - true, - 255, - 2, - big.NewInt(0), - nil, - []*DomainHash{}, - []*DomainHash{}, - }, { - true, - 0, - 3, - big.NewInt(0), - nil, - []*DomainHash{}, - []*DomainHash{}, - }, { - true, - BlockStatus(0x01), - 0, - big.NewInt(1), - nil, - []*DomainHash{}, - []*DomainHash{}, - }, { - false, - BlockStatus(0x01), - 0, - big.NewInt(1), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), - []*DomainHash{ - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - }, - []*DomainHash{ - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05}), - }, - }, - } - return tests -} - -func TestBlockInfo_Clone(t *testing.T) { - - blockInfos := initTestBlockInfoStructsForClone() - for i, blockInfo := range blockInfos { - blockInfoClone := blockInfo.Clone() - if !reflect.DeepEqual(blockInfo, blockInfoClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/blocklevelparents.go b/consensus/model/externalapi/blocklevelparents.go deleted file mode 100644 index a4768e49..00000000 --- a/consensus/model/externalapi/blocklevelparents.go +++ /dev/null @@ -1,63 +0,0 @@ -package externalapi - -// BlockLevelParents represent the parents within a single super-block level -// See https://github.com/kaspanet/research/issues/3 for further details -type BlockLevelParents []*DomainHash - -// Equal returns true if this BlockLevelParents is equal to `other` -func (sl BlockLevelParents) Equal(other BlockLevelParents) bool { - if len(sl) != len(other) { - return false - } - for _, thisHash := range sl { - found := false - for _, otherHash := range other { - if thisHash.Equal(otherHash) { - found = true - break - } - } - if !found { - return false - } - } - return true -} - -// Clone creates a clone of this BlockLevelParents -func (sl BlockLevelParents) Clone() BlockLevelParents { - return CloneHashes(sl) -} - -// Contains returns true if this BlockLevelParents contains the given blockHash -func (sl BlockLevelParents) Contains(blockHash *DomainHash) bool { - for _, blockLevelParent := range sl { - if blockLevelParent.Equal(blockHash) { - return true - } - } - return false -} - -// ParentsEqual returns true if all the BlockLevelParents in `a` and `b` are -// equal pairwise -func ParentsEqual(a, b []BlockLevelParents) bool { - if len(a) != len(b) { - return false - } - for i, blockLevelParents := range a { - if !blockLevelParents.Equal(b[i]) { - return false - } - } - return true -} - -// CloneParents creates a clone of the given BlockLevelParents slice -func CloneParents(parents []BlockLevelParents) []BlockLevelParents { - clone := make([]BlockLevelParents, len(parents)) - for i, blockLevelParents := range parents { - clone[i] = blockLevelParents.Clone() - } - return clone -} diff --git a/consensus/model/externalapi/blocklocator.go b/consensus/model/externalapi/blocklocator.go deleted file mode 100644 index b6dd1ca2..00000000 --- a/consensus/model/externalapi/blocklocator.go +++ /dev/null @@ -1,24 +0,0 @@ -package externalapi - -// BlockLocator is used to help locate a specific block. The algorithm for -// building the block locator is to add block hashes in reverse order on the -// block's selected parent chain until the desired stop block is reached. -// In order to keep the list of locator hashes to a reasonable number of entries, -// the step between each entry is doubled each loop iteration to exponentially -// decrease the number of hashes as a function of the distance from the block -// being located. -// -// For example, assume a selected parent chain with IDs as depicted below, and the -// stop block is genesis: -// -// genesis -> 1 -> 2 -> ... -> 15 -> 16 -> 17 -> 18 -// -// The block locator for block 17 would be the hashes of blocks: -// -// [17 16 14 11 7 2 genesis] -type BlockLocator []*DomainHash - -// Clone returns a clone of BlockLocator -func (locator BlockLocator) Clone() BlockLocator { - return CloneHashes(locator) -} diff --git a/consensus/model/externalapi/blocklocator_clone_test.go b/consensus/model/externalapi/blocklocator_clone_test.go deleted file mode 100644 index 28e90768..00000000 --- a/consensus/model/externalapi/blocklocator_clone_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package externalapi - -import ( - "reflect" - "testing" -) - -func initTestBlockLocatorForClone() []*BlockLocator { - - tests := []*BlockLocator{{ - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), - }, { - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 2}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 1}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 1, 1}), - NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 2, 1}), - }, - } - return tests -} - -func TestBlockLocator_Clone(t *testing.T) { - - testBlockLocator := initTestBlockLocatorForClone() - for i, blockLocator := range testBlockLocator { - blockLocatorClone := blockLocator.Clone() - if !reflect.DeepEqual(blockLocator, &blockLocatorClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/blockstatus.go b/consensus/model/externalapi/blockstatus.go deleted file mode 100644 index 7358bafd..00000000 --- a/consensus/model/externalapi/blockstatus.go +++ /dev/null @@ -1,49 +0,0 @@ -package externalapi - -// BlockStatus represents the validation state of the block. -type BlockStatus byte - -// Clone returns a clone of BlockStatus -func (bs BlockStatus) Clone() BlockStatus { - return bs -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ BlockStatus = 0 - -// Equal returns whether bs equals to other -func (bs BlockStatus) Equal(other BlockStatus) bool { - return bs == other -} - -const ( - // StatusInvalid indicates that the block is invalid. - StatusInvalid BlockStatus = iota - - // StatusUTXOValid indicates the block is valid from any UTXO related aspects and has passed all the other validations as well. - StatusUTXOValid - - // StatusUTXOPendingVerification indicates that the block is pending verification against its past UTXO-Set, either - // because it was not yet verified since the block was never in the selected parent chain, or if the - // block violates finality. - StatusUTXOPendingVerification - - // StatusDisqualifiedFromChain indicates that the block is not eligible to be a selected parent. - StatusDisqualifiedFromChain - - // StatusHeaderOnly indicates that the block transactions are not held (pruned or wasn't added yet) - StatusHeaderOnly -) - -var blockStatusStrings = map[BlockStatus]string{ - StatusInvalid: "Invalid", - StatusUTXOValid: "Valid", - StatusUTXOPendingVerification: "UTXOPendingVerification", - StatusDisqualifiedFromChain: "DisqualifiedFromChain", - StatusHeaderOnly: "HeaderOnly", -} - -func (bs BlockStatus) String() string { - return blockStatusStrings[bs] -} diff --git a/consensus/model/externalapi/blockstatus_equal_clone_test.go b/consensus/model/externalapi/blockstatus_equal_clone_test.go deleted file mode 100644 index 7737296c..00000000 --- a/consensus/model/externalapi/blockstatus_equal_clone_test.go +++ /dev/null @@ -1,87 +0,0 @@ -package externalapi - -import ( - "reflect" - "testing" -) - -func initTestBlockStatusForClone() []BlockStatus { - - tests := []BlockStatus{1, 2, 0xFF, 0} - - return tests -} - -type TestBlockStatusToCompare struct { - blockStatus BlockStatus - expectedResult bool -} - -type TestBlockStatusStruct struct { - baseBlockStatus BlockStatus - blockStatusesToCompareTo []TestBlockStatusToCompare -} - -func initTestBlockStatusForEqual() []TestBlockStatusStruct { - tests := []TestBlockStatusStruct{ - { - baseBlockStatus: 0, - blockStatusesToCompareTo: []TestBlockStatusToCompare{ - { - blockStatus: 1, - expectedResult: false, - }, - { - blockStatus: 0, - expectedResult: true, - }, - }, - }, { - baseBlockStatus: 255, - blockStatusesToCompareTo: []TestBlockStatusToCompare{ - { - blockStatus: 1, - expectedResult: false, - }, - { - blockStatus: 255, - expectedResult: true, - }, - }, - }, - } - return tests -} - -func TestBlockStatus_Equal(t *testing.T) { - - testBlockStatus := initTestBlockStatusForEqual() - - for i, test := range testBlockStatus { - for j, subTest := range test.blockStatusesToCompareTo { - result1 := test.baseBlockStatus.Equal(subTest.blockStatus) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - - result2 := subTest.blockStatus.Equal(test.baseBlockStatus) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} - -func TestBlockStatus_Clone(t *testing.T) { - - testBlockStatus := initTestBlockStatusForClone() - for i, blockStatus := range testBlockStatus { - blockStatusClone := blockStatus.Clone() - if !blockStatusClone.Equal(blockStatus) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(blockStatus, blockStatusClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/blocktemplate.go b/consensus/model/externalapi/blocktemplate.go deleted file mode 100644 index 5456ef2d..00000000 --- a/consensus/model/externalapi/blocktemplate.go +++ /dev/null @@ -1,19 +0,0 @@ -package externalapi - -// DomainBlockTemplate contains a Block plus metadata related to its generation -type DomainBlockTemplate struct { - Block *DomainBlock - CoinbaseData *DomainCoinbaseData - CoinbaseHasRedReward bool - IsNearlySynced bool -} - -// Clone returns a clone of DomainBlockTemplate -func (bt *DomainBlockTemplate) Clone() *DomainBlockTemplate { - return &DomainBlockTemplate{ - Block: bt.Block.Clone(), - CoinbaseData: bt.CoinbaseData.Clone(), - CoinbaseHasRedReward: bt.CoinbaseHasRedReward, - IsNearlySynced: bt.IsNearlySynced, - } -} diff --git a/consensus/model/externalapi/coinbase.go b/consensus/model/externalapi/coinbase.go deleted file mode 100644 index 39dbfb98..00000000 --- a/consensus/model/externalapi/coinbase.go +++ /dev/null @@ -1,38 +0,0 @@ -package externalapi - -import "bytes" - -// DomainCoinbaseData contains data by which a coinbase transaction -// is built -type DomainCoinbaseData struct { - ScriptPublicKey *ScriptPublicKey - ExtraData []byte -} - -// Clone returns a clone of DomainCoinbaseData -func (dcd *DomainCoinbaseData) Clone() *DomainCoinbaseData { - - scriptPubKeyClone := make([]byte, len(dcd.ScriptPublicKey.Script)) - copy(scriptPubKeyClone, dcd.ScriptPublicKey.Script) - - extraDataClone := make([]byte, len(dcd.ExtraData)) - copy(extraDataClone, dcd.ExtraData) - - return &DomainCoinbaseData{ - ScriptPublicKey: &ScriptPublicKey{Script: scriptPubKeyClone, Version: dcd.ScriptPublicKey.Version}, - ExtraData: extraDataClone, - } -} - -// Equal returns whether dcd equals to other -func (dcd *DomainCoinbaseData) Equal(other *DomainCoinbaseData) bool { - if dcd == nil || other == nil { - return dcd == other - } - - if !bytes.Equal(dcd.ExtraData, other.ExtraData) { - return false - } - - return dcd.ScriptPublicKey.Equal(other.ScriptPublicKey) -} diff --git a/consensus/model/externalapi/coinbase_clone_test.go b/consensus/model/externalapi/coinbase_clone_test.go deleted file mode 100644 index df465d2d..00000000 --- a/consensus/model/externalapi/coinbase_clone_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package externalapi - -import ( - "reflect" - "testing" -) - -func initTestCoinbaseDataStructsForClone() []*DomainCoinbaseData { - - tests := []*DomainCoinbaseData{ - { - &ScriptPublicKey{Script: []byte{1, 2, 3, 4, 5, 6}, Version: 0}, - []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, - }, { - &ScriptPublicKey{Script: []byte{0, 0, 0, 0, 55}, Version: 0}, - []byte{0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF}, - }, - } - return tests -} - -func TestDomainCoinbaseData_Clone(t *testing.T) { - - coinbaseData := initTestCoinbaseDataStructsForClone() - for i, coinbase := range coinbaseData { - coinbaseClone := coinbase.Clone() - if !reflect.DeepEqual(coinbase, coinbaseClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/consensus.go b/consensus/model/externalapi/consensus.go deleted file mode 100644 index da065adb..00000000 --- a/consensus/model/externalapi/consensus.go +++ /dev/null @@ -1,59 +0,0 @@ -package externalapi - -// Consensus maintains the current core state of the node -type Consensus interface { - Init(skipAddingGenesis bool) error - BuildBlock(coinbaseData *DomainCoinbaseData, transactions []*DomainTransaction) (*DomainBlock, error) - BuildBlockTemplate(coinbaseData *DomainCoinbaseData, transactions []*DomainTransaction) (*DomainBlockTemplate, error) - ValidateAndInsertBlock(block *DomainBlock, updateVirtual bool) error - ValidateAndInsertBlockWithTrustedData(block *BlockWithTrustedData, validateUTXO bool) error - ValidateTransactionAndPopulateWithConsensusData(transaction *DomainTransaction) error - ImportPruningPoints(pruningPoints []BlockHeader) error - BuildPruningPointProof() (*PruningPointProof, error) - ValidatePruningPointProof(pruningPointProof *PruningPointProof) error - ApplyPruningPointProof(pruningPointProof *PruningPointProof) error - - GetBlock(blockHash *DomainHash) (*DomainBlock, bool, error) - GetBlockEvenIfHeaderOnly(blockHash *DomainHash) (*DomainBlock, error) - GetBlockHeader(blockHash *DomainHash) (BlockHeader, error) - GetBlockInfo(blockHash *DomainHash) (*BlockInfo, error) - GetBlockRelations(blockHash *DomainHash) (parents []*DomainHash, children []*DomainHash, err error) - GetBlockAcceptanceData(blockHash *DomainHash) (AcceptanceData, error) - GetBlocksAcceptanceData(blockHashes []*DomainHash) ([]AcceptanceData, error) - - GetHashesBetween(lowHash, highHash *DomainHash, maxBlocks uint64) (hashes []*DomainHash, actualHighHash *DomainHash, err error) - GetAnticone(blockHash, contextHash *DomainHash, maxBlocks uint64) (hashes []*DomainHash, err error) - GetMissingBlockBodyHashes(highHash *DomainHash) ([]*DomainHash, error) - GetPruningPointUTXOs(expectedPruningPointHash *DomainHash, fromOutpoint *DomainOutpoint, limit int) ([]*OutpointAndUTXOEntryPair, error) - GetVirtualUTXOs(expectedVirtualParents []*DomainHash, fromOutpoint *DomainOutpoint, limit int) ([]*OutpointAndUTXOEntryPair, error) - PruningPoint() (*DomainHash, error) - PruningPointHeaders() ([]BlockHeader, error) - PruningPointAndItsAnticone() ([]*DomainHash, error) - ClearImportedPruningPointData() error - AppendImportedPruningPointUTXOs(outpointAndUTXOEntryPairs []*OutpointAndUTXOEntryPair) error - ValidateAndInsertImportedPruningPoint(newPruningPoint *DomainHash) error - GetVirtualSelectedParent() (*DomainHash, error) - CreateBlockLocatorFromPruningPoint(highHash *DomainHash, limit uint32) (BlockLocator, error) - CreateHeadersSelectedChainBlockLocator(lowHash, highHash *DomainHash) (BlockLocator, error) - CreateFullHeadersSelectedChainBlockLocator() (BlockLocator, error) - GetSyncInfo() (*SyncInfo, error) - Tips() ([]*DomainHash, error) - GetVirtualInfo() (*VirtualInfo, error) - GetVirtualDAAScore() (uint64, error) - IsValidPruningPoint(blockHash *DomainHash) (bool, error) - ArePruningPointsViolatingFinality(pruningPoints []BlockHeader) (bool, error) - GetVirtualSelectedParentChainFromBlock(blockHash *DomainHash) (*SelectedChainPath, error) - IsInSelectedParentChainOf(blockHashA *DomainHash, blockHashB *DomainHash) (bool, error) - GetHeadersSelectedTip() (*DomainHash, error) - Anticone(blockHash *DomainHash) ([]*DomainHash, error) - EstimateNetworkHashesPerSecond(startHash *DomainHash, windowSize int) (uint64, error) - PopulateMass(transaction *DomainTransaction) - ResolveVirtual(progressReportCallback func(uint64, uint64)) error - BlockDAAWindowHashes(blockHash *DomainHash) ([]*DomainHash, error) - TrustedDataDataDAAHeader(trustedBlockHash, daaBlockHash *DomainHash, daaBlockWindowIndex uint64) (*TrustedDataDataDAAHeader, error) - TrustedBlockAssociatedGHOSTDAGDataBlockHashes(blockHash *DomainHash) ([]*DomainHash, error) - TrustedGHOSTDAGData(blockHash *DomainHash) (*BlockGHOSTDAGData, error) - IsChainBlock(blockHash *DomainHash) (bool, error) - VirtualMergeDepthRoot() (*DomainHash, error) - IsNearlySynced() (bool, error) -} diff --git a/consensus/model/externalapi/consensus_events.go b/consensus/model/externalapi/consensus_events.go deleted file mode 100644 index 54c01584..00000000 --- a/consensus/model/externalapi/consensus_events.go +++ /dev/null @@ -1,30 +0,0 @@ -package externalapi - -// ConsensusEvent is an interface type that is implemented by all events raised by consensus -type ConsensusEvent interface { - isConsensusEvent() -} - -// BlockAdded is an event raised by consensus when a block was added to the dag -type BlockAdded struct { - Block *DomainBlock -} - -func (*BlockAdded) isConsensusEvent() {} - -// VirtualChangeSet is an event raised by consensus when virtual changes -type VirtualChangeSet struct { - VirtualSelectedParentChainChanges *SelectedChainPath - VirtualUTXODiff UTXODiff - VirtualParents []*DomainHash - VirtualSelectedParentBlueScore uint64 - VirtualDAAScore uint64 -} - -func (*VirtualChangeSet) isConsensusEvent() {} - -// SelectedChainPath is a path the of the selected chains between two blocks. -type SelectedChainPath struct { - Added []*DomainHash - Removed []*DomainHash -} diff --git a/consensus/model/externalapi/ghostdag.go b/consensus/model/externalapi/ghostdag.go deleted file mode 100644 index d92918db..00000000 --- a/consensus/model/externalapi/ghostdag.go +++ /dev/null @@ -1,67 +0,0 @@ -package externalapi - -import ( - "math/big" -) - -// KType defines the size of GHOSTDAG consensus algorithm K parameter. -type KType byte - -// BlockGHOSTDAGData represents GHOSTDAG data for some block -type BlockGHOSTDAGData struct { - blueScore uint64 - blueWork *big.Int - selectedParent *DomainHash - mergeSetBlues []*DomainHash - mergeSetReds []*DomainHash - bluesAnticoneSizes map[DomainHash]KType -} - -// NewBlockGHOSTDAGData creates a new instance of BlockGHOSTDAGData -func NewBlockGHOSTDAGData( - blueScore uint64, - blueWork *big.Int, - selectedParent *DomainHash, - mergeSetBlues []*DomainHash, - mergeSetReds []*DomainHash, - bluesAnticoneSizes map[DomainHash]KType) *BlockGHOSTDAGData { - - return &BlockGHOSTDAGData{ - blueScore: blueScore, - blueWork: blueWork, - selectedParent: selectedParent, - mergeSetBlues: mergeSetBlues, - mergeSetReds: mergeSetReds, - bluesAnticoneSizes: bluesAnticoneSizes, - } -} - -// BlueScore returns the BlueScore of the block -func (bgd *BlockGHOSTDAGData) BlueScore() uint64 { - return bgd.blueScore -} - -// BlueWork returns the BlueWork of the block -func (bgd *BlockGHOSTDAGData) BlueWork() *big.Int { - return bgd.blueWork -} - -// SelectedParent returns the SelectedParent of the block -func (bgd *BlockGHOSTDAGData) SelectedParent() *DomainHash { - return bgd.selectedParent -} - -// MergeSetBlues returns the MergeSetBlues of the block (not a copy) -func (bgd *BlockGHOSTDAGData) MergeSetBlues() []*DomainHash { - return bgd.mergeSetBlues -} - -// MergeSetReds returns the MergeSetReds of the block (not a copy) -func (bgd *BlockGHOSTDAGData) MergeSetReds() []*DomainHash { - return bgd.mergeSetReds -} - -// BluesAnticoneSizes returns a map between the blocks in its MergeSetBlues and the size of their anticone -func (bgd *BlockGHOSTDAGData) BluesAnticoneSizes() map[DomainHash]KType { - return bgd.bluesAnticoneSizes -} diff --git a/consensus/model/externalapi/hash.go b/consensus/model/externalapi/hash.go deleted file mode 100644 index 1ea8a900..00000000 --- a/consensus/model/externalapi/hash.go +++ /dev/null @@ -1,123 +0,0 @@ -package externalapi - -import ( - "bytes" - "encoding/hex" - - "github.com/pkg/errors" -) - -// DomainHashSize of array used to store hashes. -const DomainHashSize = 32 - -// DomainHash is the domain representation of a Hash -type DomainHash struct { - hashArray [DomainHashSize]byte -} - -// NewZeroHash returns a DomainHash that represents the zero value (0x000000...000) -func NewZeroHash() *DomainHash { - return &DomainHash{hashArray: [32]byte{}} -} - -// NewDomainHashFromByteArray constructs a new DomainHash out of a byte array -func NewDomainHashFromByteArray(hashBytes *[DomainHashSize]byte) *DomainHash { - return &DomainHash{ - hashArray: *hashBytes, - } -} - -// NewDomainHashFromByteSlice constructs a new DomainHash out of a byte slice. -// Returns an error if the length of the byte slice is not exactly `DomainHashSize` -func NewDomainHashFromByteSlice(hashBytes []byte) (*DomainHash, error) { - if len(hashBytes) != DomainHashSize { - return nil, errors.Errorf("invalid hash size. Want: %d, got: %d", - DomainHashSize, len(hashBytes)) - } - domainHash := DomainHash{ - hashArray: [DomainHashSize]byte{}, - } - copy(domainHash.hashArray[:], hashBytes) - return &domainHash, nil -} - -// NewDomainHashFromString constructs a new DomainHash out of a hex-encoded string. -// Returns an error if the length of the string is not exactly `DomainHashSize * 2` -func NewDomainHashFromString(hashString string) (*DomainHash, error) { - expectedLength := DomainHashSize * 2 - // Return error if hash string is too long. - if len(hashString) != expectedLength { - return nil, errors.Errorf("hash string length is %d, while it should be be %d", - len(hashString), expectedLength) - } - - hashBytes, err := hex.DecodeString(hashString) - if err != nil { - return nil, errors.WithStack(err) - } - - return NewDomainHashFromByteSlice(hashBytes) -} - -// String returns the Hash as the hexadecimal string of the hash. -func (hash DomainHash) String() string { - return hex.EncodeToString(hash.hashArray[:]) -} - -// ByteArray returns the bytes in this hash represented as a byte array. -// The hash bytes are cloned, therefore it is safe to modify the resulting array. -func (hash *DomainHash) ByteArray() *[DomainHashSize]byte { - arrayClone := hash.hashArray - return &arrayClone -} - -// ByteSlice returns the bytes in this hash represented as a byte slice. -// The hash bytes are cloned, therefore it is safe to modify the resulting slice. -func (hash *DomainHash) ByteSlice() []byte { - return hash.ByteArray()[:] -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ DomainHash = DomainHash{hashArray: [DomainHashSize]byte{}} - -// Equal returns whether hash equals to other -func (hash *DomainHash) Equal(other *DomainHash) bool { - if hash == nil || other == nil { - return hash == other - } - - return hash.hashArray == other.hashArray -} - -// Less returns true if hash is less than other -func (hash *DomainHash) Less(other *DomainHash) bool { - return bytes.Compare(hash.hashArray[:], other.hashArray[:]) < 0 -} - -// LessOrEqual returns true if hash is smaller or equal to other -func (hash *DomainHash) LessOrEqual(other *DomainHash) bool { - return bytes.Compare(hash.hashArray[:], other.hashArray[:]) <= 0 -} - -// CloneHashes returns a clone of the given hashes slice. -// Note: since DomainHash is a read-only type, the clone is shallow -func CloneHashes(hashes []*DomainHash) []*DomainHash { - clone := make([]*DomainHash, len(hashes)) - copy(clone, hashes) - return clone -} - -// HashesEqual returns whether the given hash slices are equal. -func HashesEqual(a, b []*DomainHash) bool { - if len(a) != len(b) { - return false - } - - for i, hash := range a { - if !hash.Equal(b[i]) { - return false - } - } - return true -} diff --git a/consensus/model/externalapi/hash_clone_equal_test.go b/consensus/model/externalapi/hash_clone_equal_test.go deleted file mode 100644 index 7e47b035..00000000 --- a/consensus/model/externalapi/hash_clone_equal_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package externalapi - -import ( - "testing" -) - -type testHashToCompare struct { - hash *DomainHash - expectedResult bool -} - -type testHashStruct struct { - baseHash *DomainHash - hashesToCompareTo []testHashToCompare -} - -func initTestDomainHashForEqual() []*testHashStruct { - tests := []*testHashStruct{ - { - baseHash: nil, - hashesToCompareTo: []testHashToCompare{ - { - hash: nil, - expectedResult: true, - }, { - hash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}), - expectedResult: false, - }, - }, - }, { - baseHash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), - hashesToCompareTo: []testHashToCompare{ - { - hash: nil, - expectedResult: false, - }, { - hash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}), - expectedResult: false, - }, { - hash: NewDomainHashFromByteArray(&[DomainHashSize]byte{ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF}), - expectedResult: true, - }, - }, - }, - } - return tests -} - -func TestDomainHash_Equal(t *testing.T) { - hashTests := initTestDomainHashForEqual() - for i, test := range hashTests { - for j, subTest := range test.hashesToCompareTo { - result1 := test.baseHash.Equal(subTest.hash) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.hash.Equal(test.baseHash) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} diff --git a/consensus/model/externalapi/pruning_point_proof.go b/consensus/model/externalapi/pruning_point_proof.go deleted file mode 100644 index bee119c4..00000000 --- a/consensus/model/externalapi/pruning_point_proof.go +++ /dev/null @@ -1,6 +0,0 @@ -package externalapi - -// PruningPointProof is the data structure holding the pruning point proof -type PruningPointProof struct { - Headers [][]BlockHeader -} diff --git a/consensus/model/externalapi/readonlyutxoset.go b/consensus/model/externalapi/readonlyutxoset.go deleted file mode 100644 index 61b1827e..00000000 --- a/consensus/model/externalapi/readonlyutxoset.go +++ /dev/null @@ -1,10 +0,0 @@ -package externalapi - -// ReadOnlyUTXOSetIterator is an iterator over all entries in a -// ReadOnlyUTXOSet -type ReadOnlyUTXOSetIterator interface { - First() bool - Next() bool - Get() (outpoint *DomainOutpoint, utxoEntry UTXOEntry, err error) - Close() error -} diff --git a/consensus/model/externalapi/subnetworkid.go b/consensus/model/externalapi/subnetworkid.go deleted file mode 100644 index 06069c63..00000000 --- a/consensus/model/externalapi/subnetworkid.go +++ /dev/null @@ -1,33 +0,0 @@ -package externalapi - -import "encoding/hex" - -// DomainSubnetworkIDSize is the size of the array used to store subnetwork IDs. -const DomainSubnetworkIDSize = 20 - -// DomainSubnetworkID is the domain representation of a Subnetwork ID -type DomainSubnetworkID [DomainSubnetworkIDSize]byte - -// String stringifies a subnetwork ID. -func (id DomainSubnetworkID) String() string { - return hex.EncodeToString(id[:]) -} - -// Clone returns a clone of DomainSubnetworkID -func (id *DomainSubnetworkID) Clone() *DomainSubnetworkID { - idClone := *id - return &idClone -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ DomainSubnetworkID = [DomainSubnetworkIDSize]byte{} - -// Equal returns whether id equals to other -func (id *DomainSubnetworkID) Equal(other *DomainSubnetworkID) bool { - if id == nil || other == nil { - return id == other - } - - return *id == *other -} diff --git a/consensus/model/externalapi/subnetworkid_clone_equal_test.go b/consensus/model/externalapi/subnetworkid_clone_equal_test.go deleted file mode 100644 index dc6d7c5f..00000000 --- a/consensus/model/externalapi/subnetworkid_clone_equal_test.go +++ /dev/null @@ -1,99 +0,0 @@ -package externalapi - -import ( - "reflect" - "testing" -) - -func initTestDomainSubnetworkIDForClone() []*DomainSubnetworkID { - - tests := []*DomainSubnetworkID{{1, 0, 0xFF, 0}, {0, 1, 0xFF, 1}, - {0, 1, 0xFF, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}} - return tests -} - -type testDomainSubnetworkIDToCompare struct { - domainSubnetworkID *DomainSubnetworkID - expectedResult bool -} - -type testDomainSubnetworkIDStruct struct { - baseDomainSubnetworkID *DomainSubnetworkID - domainSubnetworkIDToCompareTo []testDomainSubnetworkIDToCompare -} - -func initTestDomainSubnetworkIDForEqual() []testDomainSubnetworkIDStruct { - tests := []testDomainSubnetworkIDStruct{ - { - baseDomainSubnetworkID: nil, - domainSubnetworkIDToCompareTo: []testDomainSubnetworkIDToCompare{ - { - domainSubnetworkID: &DomainSubnetworkID{255, 255, 0xFF, 0}, - expectedResult: false, - }, - { - domainSubnetworkID: nil, - expectedResult: true, - }, - }, - }, { - baseDomainSubnetworkID: &DomainSubnetworkID{0}, - domainSubnetworkIDToCompareTo: []testDomainSubnetworkIDToCompare{ - { - domainSubnetworkID: &DomainSubnetworkID{255, 254, 0xFF, 0}, - expectedResult: false, - }, - { - domainSubnetworkID: &DomainSubnetworkID{0}, - expectedResult: true, - }, - }, - }, { - baseDomainSubnetworkID: &DomainSubnetworkID{0, 1, 0xFF, 1, 1, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, - domainSubnetworkIDToCompareTo: []testDomainSubnetworkIDToCompare{ - { - domainSubnetworkID: &DomainSubnetworkID{0, 1, 0xFF, 1, 1, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, - 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF}, - expectedResult: true, - }, - { - domainSubnetworkID: &DomainSubnetworkID{0, 10, 0xFF, 0}, - expectedResult: false, - }, - }, - }, - } - return tests -} - -func TestDomainSubnetworkID_Equal(t *testing.T) { - - domainSubnetworkIDs := initTestDomainSubnetworkIDForEqual() - for i, test := range domainSubnetworkIDs { - for j, subTest := range test.domainSubnetworkIDToCompareTo { - result1 := test.baseDomainSubnetworkID.Equal(subTest.domainSubnetworkID) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.domainSubnetworkID.Equal(test.baseDomainSubnetworkID) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} - -func TestDomainSubnetworkID_Clone(t *testing.T) { - - domainSubnetworkIDs := initTestDomainSubnetworkIDForClone() - for i, domainSubnetworkID := range domainSubnetworkIDs { - domainSubnetworkIDClone := domainSubnetworkID.Clone() - if !domainSubnetworkIDClone.Equal(domainSubnetworkID) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(domainSubnetworkID, domainSubnetworkIDClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/sync.go b/consensus/model/externalapi/sync.go deleted file mode 100644 index 797401e9..00000000 --- a/consensus/model/externalapi/sync.go +++ /dev/null @@ -1,36 +0,0 @@ -package externalapi - -// SyncInfo holds info about the current sync state of the consensus -type SyncInfo struct { - HeaderCount uint64 - BlockCount uint64 -} - -// Clone returns a clone of SyncInfo -func (si *SyncInfo) Clone() *SyncInfo { - return &SyncInfo{ - HeaderCount: si.HeaderCount, - BlockCount: si.BlockCount, - } -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = SyncInfo{0, 0} - -// Equal returns whether si equals to other -func (si *SyncInfo) Equal(other *SyncInfo) bool { - if si == nil || other == nil { - return si == other - } - - if si.HeaderCount != other.HeaderCount { - return false - } - - if si.BlockCount != other.BlockCount { - return false - } - - return true -} diff --git a/consensus/model/externalapi/sync_equal_clone_test.go b/consensus/model/externalapi/sync_equal_clone_test.go deleted file mode 100644 index d4959aca..00000000 --- a/consensus/model/externalapi/sync_equal_clone_test.go +++ /dev/null @@ -1,99 +0,0 @@ -package externalapi - -import ( - "reflect" - "testing" -) - -func initTestSyncInfoForClone() []*SyncInfo { - - tests := []*SyncInfo{{ - 0xF, - 0xF}} - return tests -} - -type testSyncInfoToCompare struct { - syncInfo *SyncInfo - expectedResult bool -} - -type testSyncInfoStruct struct { - baseSyncInfo *SyncInfo - syncInfoToCompareTo []testSyncInfoToCompare -} - -func initTestSyncInfoForEqual() []*testSyncInfoStruct { - tests := []*testSyncInfoStruct{ - { - baseSyncInfo: nil, - syncInfoToCompareTo: []testSyncInfoToCompare{ - { - syncInfo: &SyncInfo{ - 0xF, - 0xF}, - expectedResult: false, - }, { - syncInfo: nil, - expectedResult: true, - }, - }}, { - baseSyncInfo: &SyncInfo{ - 0xF, - 0xF}, - syncInfoToCompareTo: []testSyncInfoToCompare{ - { - syncInfo: &SyncInfo{ - 0xF, - 0xF}, - expectedResult: true, - }, - { - syncInfo: &SyncInfo{ - 0xF1, - 0xF}, - expectedResult: false, - }, { - syncInfo: nil, - expectedResult: false, - }, { - syncInfo: &SyncInfo{ - 0xF, - 0xF1}, - expectedResult: false}, - }, - }, - } - return tests -} - -func TestSyncInfo_Equal(t *testing.T) { - - testSyncState := initTestSyncInfoForEqual() - for i, test := range testSyncState { - for j, subTest := range test.syncInfoToCompareTo { - result1 := test.baseSyncInfo.Equal(subTest.syncInfo) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.syncInfo.Equal(test.baseSyncInfo) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} - -func TestSyncInfo_Clone(t *testing.T) { - - testSyncInfo := initTestSyncInfoForClone() - for i, syncInfo := range testSyncInfo { - syncStateClone := syncInfo.Clone() - if !syncStateClone.Equal(syncInfo) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(syncInfo, syncStateClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} diff --git a/consensus/model/externalapi/transaction.go b/consensus/model/externalapi/transaction.go deleted file mode 100644 index b12baec4..00000000 --- a/consensus/model/externalapi/transaction.go +++ /dev/null @@ -1,363 +0,0 @@ -package externalapi - -import ( - "bytes" - "encoding/binary" - "fmt" - - "github.com/pkg/errors" -) - -// DomainTransaction represents a Kaspa transaction -type DomainTransaction struct { - Version uint16 - Inputs []*DomainTransactionInput - Outputs []*DomainTransactionOutput - LockTime uint64 - SubnetworkID DomainSubnetworkID - Gas uint64 - Payload []byte - - Fee uint64 - Mass uint64 - - // ID is a field that is used to cache the transaction ID. - // Always use consensushashing.TransactionID instead of accessing this field directly - ID *DomainTransactionID -} - -// Clone returns a clone of DomainTransaction -func (tx *DomainTransaction) Clone() *DomainTransaction { - payloadClone := make([]byte, len(tx.Payload)) - copy(payloadClone, tx.Payload) - - inputsClone := make([]*DomainTransactionInput, len(tx.Inputs)) - for i, input := range tx.Inputs { - inputsClone[i] = input.Clone() - } - - outputsClone := make([]*DomainTransactionOutput, len(tx.Outputs)) - for i, output := range tx.Outputs { - outputsClone[i] = output.Clone() - } - - var idClone *DomainTransactionID - if tx.ID != nil { - idClone = tx.ID.Clone() - } - - return &DomainTransaction{ - Version: tx.Version, - Inputs: inputsClone, - Outputs: outputsClone, - LockTime: tx.LockTime, - SubnetworkID: *tx.SubnetworkID.Clone(), - Gas: tx.Gas, - Payload: payloadClone, - Fee: tx.Fee, - Mass: tx.Mass, - ID: idClone, - } -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = DomainTransaction{0, []*DomainTransactionInput{}, []*DomainTransactionOutput{}, 0, - DomainSubnetworkID{}, 0, []byte{}, 0, 0, - &DomainTransactionID{}} - -// Equal returns whether tx equals to other -func (tx *DomainTransaction) Equal(other *DomainTransaction) bool { - if tx == nil || other == nil { - return tx == other - } - - if tx.Version != other.Version { - return false - } - - if len(tx.Inputs) != len(other.Inputs) { - return false - } - - for i, input := range tx.Inputs { - if !input.Equal(other.Inputs[i]) { - return false - } - } - - if len(tx.Outputs) != len(other.Outputs) { - return false - } - - for i, output := range tx.Outputs { - if !output.Equal(other.Outputs[i]) { - return false - } - } - - if tx.LockTime != other.LockTime { - return false - } - - if !tx.SubnetworkID.Equal(&other.SubnetworkID) { - return false - } - - if tx.Gas != other.Gas { - return false - } - - if !bytes.Equal(tx.Payload, other.Payload) { - return false - } - - if tx.Fee != 0 && other.Fee != 0 && tx.Fee != other.Fee { - panic(errors.New("identical transactions should always have the same fee")) - } - - if tx.Mass != 0 && other.Mass != 0 && tx.Mass != other.Mass { - panic(errors.New("identical transactions should always have the same mass")) - } - - if tx.ID != nil && other.ID != nil && !tx.ID.Equal(other.ID) { - panic(errors.New("identical transactions should always have the same ID")) - } - - return true -} - -// DomainTransactionInput represents a Kaspa transaction input -type DomainTransactionInput struct { - PreviousOutpoint DomainOutpoint - SignatureScript []byte - Sequence uint64 - SigOpCount byte - - UTXOEntry UTXOEntry -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = &DomainTransactionInput{DomainOutpoint{}, []byte{}, 0, 0, nil} - -// Equal returns whether input equals to other -func (input *DomainTransactionInput) Equal(other *DomainTransactionInput) bool { - if input == nil || other == nil { - return input == other - } - - if !input.PreviousOutpoint.Equal(&other.PreviousOutpoint) { - return false - } - - if !bytes.Equal(input.SignatureScript, other.SignatureScript) { - return false - } - - if input.Sequence != other.Sequence { - return false - } - - if input.SigOpCount != other.SigOpCount { - return false - } - - if input.UTXOEntry != nil && other.UTXOEntry != nil && !input.UTXOEntry.Equal(other.UTXOEntry) { - panic(errors.New("identical inputs should always have the same UTXO entry")) - } - - return true -} - -// Clone returns a clone of DomainTransactionInput -func (input *DomainTransactionInput) Clone() *DomainTransactionInput { - signatureScriptClone := make([]byte, len(input.SignatureScript)) - copy(signatureScriptClone, input.SignatureScript) - - return &DomainTransactionInput{ - PreviousOutpoint: *input.PreviousOutpoint.Clone(), - SignatureScript: signatureScriptClone, - Sequence: input.Sequence, - SigOpCount: input.SigOpCount, - UTXOEntry: input.UTXOEntry, - } -} - -// DomainOutpoint represents a Kaspa transaction outpoint -type DomainOutpoint struct { - TransactionID DomainTransactionID - Index uint32 -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = DomainOutpoint{DomainTransactionID{}, 0} - -// Equal returns whether op equals to other -func (op *DomainOutpoint) Equal(other *DomainOutpoint) bool { - if op == nil || other == nil { - return op == other - } - - return *op == *other -} - -// Clone returns a clone of DomainOutpoint -func (op *DomainOutpoint) Clone() *DomainOutpoint { - return &DomainOutpoint{ - TransactionID: *op.TransactionID.Clone(), - Index: op.Index, - } -} - -// String stringifies an outpoint. -func (op DomainOutpoint) String() string { - return fmt.Sprintf("(%s: %d)", op.TransactionID, op.Index) -} - -// NewDomainOutpoint instantiates a new DomainOutpoint with the given id and index -func NewDomainOutpoint(id *DomainTransactionID, index uint32) *DomainOutpoint { - return &DomainOutpoint{ - TransactionID: *id, - Index: index, - } -} - -// ScriptPublicKey represents a Kaspad ScriptPublicKey -type ScriptPublicKey struct { - Script []byte - Version uint16 -} - -// Equal returns whether spk equals to other -func (spk *ScriptPublicKey) Equal(other *ScriptPublicKey) bool { - if spk == nil || other == nil { - return spk == other - } - - if spk.Version != other.Version { - return false - } - - return bytes.Equal(spk.Script, other.Script) -} - -// String stringifies a ScriptPublicKey. -func (spk *ScriptPublicKey) String() string { - var versionBytes = make([]byte, 2) // uint16 - binary.LittleEndian.PutUint16(versionBytes, spk.Version) - versionString := string(versionBytes) - scriptString := string(spk.Script) - return versionString + scriptString -} - -// NewScriptPublicKeyFromString converts the given string to a scriptPublicKey -func NewScriptPublicKeyFromString(ScriptPublicKeyString string) *ScriptPublicKey { - bytes := []byte(ScriptPublicKeyString) - version := binary.LittleEndian.Uint16(bytes[:2]) - script := bytes[2:] - return &ScriptPublicKey{Script: script, Version: version} -} - -// DomainTransactionOutput represents a Kaspad transaction output -type DomainTransactionOutput struct { - Value uint64 - ScriptPublicKey *ScriptPublicKey -} - -// If this doesn't compile, it means the type definition has been changed, so it's -// an indication to update Equal and Clone accordingly. -var _ = DomainTransactionOutput{0, &ScriptPublicKey{Script: []byte{}, Version: 0}} - -// Equal returns whether output equals to other -func (output *DomainTransactionOutput) Equal(other *DomainTransactionOutput) bool { - if output == nil || other == nil { - return output == other - } - - if output.Value != other.Value { - return false - } - - return output.ScriptPublicKey.Equal(other.ScriptPublicKey) -} - -// Clone returns a clone of DomainTransactionOutput -func (output *DomainTransactionOutput) Clone() *DomainTransactionOutput { - scriptPublicKeyClone := &ScriptPublicKey{ - Script: make([]byte, len(output.ScriptPublicKey.Script)), - Version: output.ScriptPublicKey.Version} - copy(scriptPublicKeyClone.Script, output.ScriptPublicKey.Script) - - return &DomainTransactionOutput{ - Value: output.Value, - ScriptPublicKey: scriptPublicKeyClone, - } -} - -// DomainTransactionID represents the ID of a Kaspa transaction -type DomainTransactionID DomainHash - -// NewDomainTransactionIDFromByteArray constructs a new TransactionID out of a byte array -func NewDomainTransactionIDFromByteArray(transactionIDBytes *[DomainHashSize]byte) *DomainTransactionID { - return (*DomainTransactionID)(NewDomainHashFromByteArray(transactionIDBytes)) -} - -// NewDomainTransactionIDFromByteSlice constructs a new TransactionID out of a byte slice -// Returns an error if the length of the byte slice is not exactly `DomainHashSize` -func NewDomainTransactionIDFromByteSlice(transactionIDBytes []byte) (*DomainTransactionID, error) { - hash, err := NewDomainHashFromByteSlice(transactionIDBytes) - if err != nil { - return nil, err - } - return (*DomainTransactionID)(hash), nil -} - -// NewDomainTransactionIDFromString constructs a new TransactionID out of a string -// Returns an error if the length of the string is not exactly `DomainHashSize * 2` -func NewDomainTransactionIDFromString(transactionIDString string) (*DomainTransactionID, error) { - hash, err := NewDomainHashFromString(transactionIDString) - if err != nil { - return nil, err - } - return (*DomainTransactionID)(hash), nil -} - -// String stringifies a transaction ID. -func (id DomainTransactionID) String() string { - return DomainHash(id).String() -} - -// Clone returns a clone of DomainTransactionID -func (id *DomainTransactionID) Clone() *DomainTransactionID { - idClone := *id - return &idClone -} - -// Equal returns whether id equals to other -func (id *DomainTransactionID) Equal(other *DomainTransactionID) bool { - return (*DomainHash)(id).Equal((*DomainHash)(other)) -} - -// Less returns true if id is less than other -func (id *DomainTransactionID) Less(other *DomainTransactionID) bool { - return (*DomainHash)(id).Less((*DomainHash)(other)) -} - -// LessOrEqual returns true if id is smaller or equal to other -func (id *DomainTransactionID) LessOrEqual(other *DomainTransactionID) bool { - return (*DomainHash)(id).LessOrEqual((*DomainHash)(other)) -} - -// ByteArray returns the bytes in this transactionID represented as a byte array. -// The transactionID bytes are cloned, therefore it is safe to modify the resulting array. -func (id *DomainTransactionID) ByteArray() *[DomainHashSize]byte { - return (*DomainHash)(id).ByteArray() -} - -// ByteSlice returns the bytes in this transactionID represented as a byte slice. -// The transactionID bytes are cloned, therefore it is safe to modify the resulting slice. -func (id *DomainTransactionID) ByteSlice() []byte { - return (*DomainHash)(id).ByteSlice() -} diff --git a/consensus/model/externalapi/transaction_equal_clone_test.go b/consensus/model/externalapi/transaction_equal_clone_test.go deleted file mode 100644 index 3534ae07..00000000 --- a/consensus/model/externalapi/transaction_equal_clone_test.go +++ /dev/null @@ -1,1107 +0,0 @@ -package externalapi_test - -import ( - "reflect" - "testing" - - "github.com/Qitmeer/qng/consensus/model/externalapi" - "github.com/Qitmeer/qng/consensus/utils/utxo" -) - -// Changed fields of a test struct compared to a base test struct marked as "changed" and -// pointing in some cases name changed struct field - -type transactionToCompare struct { - tx *externalapi.DomainTransaction - expectedResult bool - expectsPanic bool -} - -type testDomainTransactionStruct struct { - baseTx *externalapi.DomainTransaction - transactionToCompareTo []*transactionToCompare -} - -type transactionInputToCompare struct { - tx *externalapi.DomainTransactionInput - expectedResult bool - expectsPanic bool -} - -type testDomainTransactionInputStruct struct { - baseTx *externalapi.DomainTransactionInput - transactionInputToCompareTo []*transactionInputToCompare -} - -type transactionOutputToCompare struct { - tx *externalapi.DomainTransactionOutput - expectedResult bool -} - -type testDomainTransactionOutputStruct struct { - baseTx *externalapi.DomainTransactionOutput - transactionOutputToCompareTo []*transactionOutputToCompare -} - -type domainOutpointToCompare struct { - domainOutpoint *externalapi.DomainOutpoint - expectedResult bool -} - -type testDomainOutpointStruct struct { - baseDomainOutpoint *externalapi.DomainOutpoint - domainOutpointToCompareTo []*domainOutpointToCompare -} - -type domainTransactionIDToCompare struct { - domainTransactionID *externalapi.DomainTransactionID - expectedResult bool -} - -type testDomainTransactionIDStruct struct { - baseDomainTransactionID *externalapi.DomainTransactionID - domainTransactionIDToCompareTo []*domainTransactionIDToCompare -} - -func initTestBaseTransaction() *externalapi.DomainTransaction { - - testTx := &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, - {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - } - return testTx -} - -func initTestTransactionToCompare() []*transactionToCompare { - - testTx := []*transactionToCompare{{ - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}, //Changed - {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, - {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01, 0x02}, //Changed - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, - {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01, 0x02}, //Changed - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: true, - }, - { - // ID changed - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - }, - expectsPanic: true, - }, - { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, - {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 1000000000, //Changed - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: true, - }, { - tx: &externalapi.DomainTransaction{ - 2, //Changed - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 2, //Changed - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01}), - }, - expectsPanic: true, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 2, //Changed - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}, - {externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}, {uint64(0xFFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2, 3}, Version: 0}}}, //changed Outputs - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - nil, //changed - }, - expectedResult: true, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFF0), // Changed sequence - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 3, // Changed SigOpCount - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, - { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{{externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}}, - []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}, - {uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 3}, Version: 0}}}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 2, // Changed - []byte{0x01}, - 0, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - expectedResult: false, - }, - } - return testTx -} - -func initTestDomainTransactionForClone() []*externalapi.DomainTransaction { - - tests := []*externalapi.DomainTransaction{ - { - Version: 1, - Inputs: []*externalapi.DomainTransactionInput{ - {externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2)}, - }, - Outputs: []*externalapi.DomainTransactionOutput{{uint64(0xFFFF), - &externalapi.ScriptPublicKey{Script: []byte{1, 2}, Version: 0}}}, - LockTime: 1, - SubnetworkID: externalapi.DomainSubnetworkID{0x01}, - Gas: 1, - Payload: []byte{0x01}, - Fee: 5555555555, - Mass: 1, - ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, { - Version: 1, - Inputs: []*externalapi.DomainTransactionInput{}, - Outputs: []*externalapi.DomainTransactionOutput{}, - LockTime: 1, - SubnetworkID: externalapi.DomainSubnetworkID{0x01}, - Gas: 1, - Payload: []byte{0x01}, - Fee: 0, - Mass: 1, - ID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{}), - }, - } - return tests -} - -func initTestDomainTransactionForEqual() []testDomainTransactionStruct { - - tests := []testDomainTransactionStruct{ - { - baseTx: initTestBaseTransaction(), - transactionToCompareTo: initTestTransactionToCompare(), - }, - { - baseTx: nil, - transactionToCompareTo: []*transactionToCompare{{ - tx: nil, - expectedResult: true}}, - }, { - baseTx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{}, - []*externalapi.DomainTransactionOutput{}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 1, - 1, - externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - }, - transactionToCompareTo: []*transactionToCompare{{ - tx: nil, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{}, - []*externalapi.DomainTransactionOutput{}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 0, - []byte{0x01}, - 1, - 1, - nil, - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{}, - []*externalapi.DomainTransactionOutput{}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 1, - 1, - nil, - }, - expectedResult: true, - }, { - tx: &externalapi.DomainTransaction{ - 1, - []*externalapi.DomainTransactionInput{}, - []*externalapi.DomainTransactionOutput{}, - 1, - externalapi.DomainSubnetworkID{0x01}, - 1, - []byte{0x01}, - 2, // Changed fee - 1, - nil, - }, - expectsPanic: true, - }}, - }, - } - return tests -} - -func initTestBaseDomainTransactionInput() *externalapi.DomainTransactionInput { - basetxInput := &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - } - return basetxInput -} - -func initTestDomainTxInputToCompare() []*transactionInputToCompare { - txInput := []*transactionInputToCompare{{ - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, - expectedResult: true, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, false, 2), // Changed - }, - expectsPanic: true, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - nil, // Changed - }, - expectedResult: true, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFF0), // Changed - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFF0), - 5, // Changed - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3, 4}, // Changed - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01, 0x02}), 0xFFFF}, // Changed - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01, 0x02}), 0xFFFF}, // Changed - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(2 /* Changed */, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), // Changed - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionInput{ - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01, 0x02}), 0xFFFF}, // Changed - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(3 /* Changed */, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 3), // Changed - }, - expectedResult: false, - }, { - tx: nil, - expectedResult: false, - }} - return txInput - -} - -func initTestDomainTransactionInputForClone() []*externalapi.DomainTransactionInput { - txInput := []*externalapi.DomainTransactionInput{ - { - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, { - - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFFF), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }, { - - externalapi.DomainOutpoint{*externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x01}), 0xFFFF}, - []byte{1, 2, 3}, - uint64(0xFFFFFFF0), - 1, - utxo.NewUTXOEntry(1, &externalapi.ScriptPublicKey{Script: []byte{0, 1, 2, 3}, Version: 0}, true, 2), - }} - return txInput -} - -func initTestBaseDomainTransactionOutput() *externalapi.DomainTransactionOutput { - basetxOutput := &externalapi.DomainTransactionOutput{ - 0xFFFFFFFF, - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, - } - return basetxOutput -} - -func initTestDomainTransactionOutputForClone() []*externalapi.DomainTransactionOutput { - txInput := []*externalapi.DomainTransactionOutput{ - { - 0xFFFFFFFF, - &externalapi.ScriptPublicKey{Script: []byte{0xF0, 0xFF}, Version: 0}, - }, { - 0xFFFFFFF1, - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, - }} - return txInput -} - -func initTestDomainTransactionOutputForEqual() []testDomainTransactionOutputStruct { - tests := []testDomainTransactionOutputStruct{ - { - baseTx: initTestBaseDomainTransactionOutput(), - transactionOutputToCompareTo: []*transactionOutputToCompare{{ - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFFF, - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}}, - expectedResult: true, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFFF, - &externalapi.ScriptPublicKey{Script: []byte{0xF0, 0xFF}, Version: 0}, // Changed - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFF0, // Changed - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, - }, - expectedResult: false, - }, { - tx: nil, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFF0, // Changed - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF, 0x01}, Version: 0}}, // Changed - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFF0, // Changed - &externalapi.ScriptPublicKey{Script: []byte{}, Version: 0}, // Changed - }, - expectedResult: false, - }}, - }, - { - baseTx: nil, - transactionOutputToCompareTo: []*transactionOutputToCompare{{ - tx: nil, - expectedResult: true, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFFF, - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}}, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFFF, - &externalapi.ScriptPublicKey{Script: []byte{0xF0, 0xFF}, Version: 0}, // Changed - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFF0, // Changed - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF}, Version: 0}, - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFF0, - &externalapi.ScriptPublicKey{Script: []byte{0xFF, 0xFF, 0x01}, Version: 0}, // Changed - }, - expectedResult: false, - }, { - tx: &externalapi.DomainTransactionOutput{ - 0xFFFFFFF0, - &externalapi.ScriptPublicKey{Script: []byte{}, Version: 0}, // Changed - }, - expectedResult: false, - }}, - }, - } - return tests -} - -func initTestDomainTransactionInputForEqual() []testDomainTransactionInputStruct { - - tests := []testDomainTransactionInputStruct{ - { - baseTx: initTestBaseDomainTransactionInput(), - transactionInputToCompareTo: initTestDomainTxInputToCompare(), - }, - } - return tests -} - -func TestDomainTransaction_Equal(t *testing.T) { - - txTests := initTestDomainTransactionForEqual() - for i, test := range txTests { - for j, subTest := range test.transactionToCompareTo { - func() { - defer func() { - r := recover() - panicked := r != nil - if panicked != subTest.expectsPanic { - t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) - } - }() - result1 := test.baseTx.Equal(subTest.tx) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - }() - func() { - defer func() { - r := recover() - panicked := r != nil - if panicked != subTest.expectsPanic { - t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) - } - }() - result2 := subTest.tx.Equal(test.baseTx) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - }() - } - } -} - -func TestDomainTransaction_Clone(t *testing.T) { - - txs := initTestDomainTransactionForClone() - for i, tx := range txs { - txClone := tx.Clone() - if !txClone.Equal(tx) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(tx, txClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} - -func TestDomainTransactionInput_Equal(t *testing.T) { - - txTests := initTestDomainTransactionInputForEqual() - for i, test := range txTests { - for j, subTest := range test.transactionInputToCompareTo { - func() { - defer func() { - r := recover() - panicked := r != nil - if panicked != subTest.expectsPanic { - t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) - } - }() - result1 := test.baseTx.Equal(subTest.tx) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - }() - func() { - defer func() { - r := recover() - panicked := r != nil - if panicked != subTest.expectsPanic { - t.Fatalf("Test #%d:%d: panicked expected to be %t but got %t: %s", i, j, subTest.expectsPanic, panicked, r) - } - }() - result2 := subTest.tx.Equal(test.baseTx) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - }() - } - } -} - -func TestDomainTransactionInput_Clone(t *testing.T) { - - txInputs := initTestDomainTransactionInputForClone() - for i, txInput := range txInputs { - txInputClone := txInput.Clone() - if !txInputClone.Equal(txInput) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(txInput, txInputClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} - -func TestDomainTransactionOutput_Equal(t *testing.T) { - - txTests := initTestDomainTransactionOutputForEqual() - for i, test := range txTests { - for j, subTest := range test.transactionOutputToCompareTo { - result1 := test.baseTx.Equal(subTest.tx) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.tx.Equal(test.baseTx) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} - -func TestDomainTransactionOutput_Clone(t *testing.T) { - - txInputs := initTestDomainTransactionOutputForClone() - for i, txOutput := range txInputs { - txOutputClone := txOutput.Clone() - if !txOutputClone.Equal(txOutput) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(txOutput, txOutputClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} - -func initTestDomainOutpointForClone() []*externalapi.DomainOutpoint { - outpoint := []*externalapi.DomainOutpoint{{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - 1}, - } - return outpoint -} - -func initTestDomainOutpointForEqual() []testDomainOutpointStruct { - - var outpoint = []*domainOutpointToCompare{{ - domainOutpoint: &externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - 1}, - expectedResult: true, - }, { - domainOutpoint: &externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - 1}, - expectedResult: false, - }, { - domainOutpoint: &externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0}), - 2}, - expectedResult: false, - }} - tests := []testDomainOutpointStruct{ - { - baseDomainOutpoint: &externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - 1}, - domainOutpointToCompareTo: outpoint, - }, {baseDomainOutpoint: &externalapi.DomainOutpoint{ - *externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - 1}, - domainOutpointToCompareTo: []*domainOutpointToCompare{{domainOutpoint: nil, expectedResult: false}}, - }, {baseDomainOutpoint: nil, - domainOutpointToCompareTo: []*domainOutpointToCompare{{domainOutpoint: nil, expectedResult: true}}, - }, - } - return tests -} - -func TestDomainOutpoint_Equal(t *testing.T) { - - domainOutpoints := initTestDomainOutpointForEqual() - for i, test := range domainOutpoints { - for j, subTest := range test.domainOutpointToCompareTo { - result1 := test.baseDomainOutpoint.Equal(subTest.domainOutpoint) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.domainOutpoint.Equal(test.baseDomainOutpoint) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} - -func TestDomainOutpoint_Clone(t *testing.T) { - - domainOutpoints := initTestDomainOutpointForClone() - for i, outpoint := range domainOutpoints { - outpointClone := outpoint.Clone() - if !outpointClone.Equal(outpoint) { - t.Fatalf("Test #%d:[Equal] clone should be equal to the original", i) - } - if !reflect.DeepEqual(outpoint, outpointClone) { - t.Fatalf("Test #%d:[DeepEqual] clone should be equal to the original", i) - } - } -} - -func initTestDomainTransactionIDForEqual() []testDomainTransactionIDStruct { - - var outpoint = []*domainTransactionIDToCompare{{ - domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - expectedResult: true, - }, { - domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - expectedResult: false, - }, { - domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0}), - expectedResult: false, - }} - tests := []testDomainTransactionIDStruct{ - { - baseDomainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02}), - domainTransactionIDToCompareTo: outpoint, - }, { - baseDomainTransactionID: nil, - domainTransactionIDToCompareTo: []*domainTransactionIDToCompare{{ - domainTransactionID: externalapi.NewDomainTransactionIDFromByteArray(&[externalapi.DomainHashSize]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03}), - expectedResult: false, - }}, - }, - } - return tests -} - -func TestDomainTransactionID_Equal(t *testing.T) { - domainDomainTransactionIDs := initTestDomainTransactionIDForEqual() - for i, test := range domainDomainTransactionIDs { - for j, subTest := range test.domainTransactionIDToCompareTo { - result1 := test.baseDomainTransactionID.Equal(subTest.domainTransactionID) - if result1 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result1) - } - result2 := subTest.domainTransactionID.Equal(test.baseDomainTransactionID) - if result2 != subTest.expectedResult { - t.Fatalf("Test #%d:%d: Expected %t but got %t", i, j, subTest.expectedResult, result2) - } - } - } -} diff --git a/consensus/model/externalapi/utxodiff.go b/consensus/model/externalapi/utxodiff.go deleted file mode 100644 index 30d7e1f3..00000000 --- a/consensus/model/externalapi/utxodiff.go +++ /dev/null @@ -1,32 +0,0 @@ -package externalapi - -// UTXOCollection represents a collection of UTXO entries, indexed by their outpoint -type UTXOCollection interface { - Iterator() ReadOnlyUTXOSetIterator - Get(outpoint *DomainOutpoint) (UTXOEntry, bool) - Contains(outpoint *DomainOutpoint) bool - Len() int -} - -// UTXODiff represents the diff between two UTXO sets -type UTXODiff interface { - ToAdd() UTXOCollection - ToRemove() UTXOCollection - WithDiff(other UTXODiff) (UTXODiff, error) - DiffFrom(other UTXODiff) (UTXODiff, error) - Reversed() UTXODiff - CloneMutable() MutableUTXODiff -} - -// MutableUTXODiff represents a UTXO-Diff that can be mutated -type MutableUTXODiff interface { - ToImmutable() UTXODiff - - WithDiff(other UTXODiff) (UTXODiff, error) - DiffFrom(other UTXODiff) (UTXODiff, error) - ToAdd() UTXOCollection - ToRemove() UTXOCollection - - WithDiffInPlace(other UTXODiff) error - AddTransaction(transaction *DomainTransaction, blockDAAScore uint64) error -} diff --git a/consensus/model/externalapi/utxoentry.go b/consensus/model/externalapi/utxoentry.go deleted file mode 100644 index fb628b60..00000000 --- a/consensus/model/externalapi/utxoentry.go +++ /dev/null @@ -1,20 +0,0 @@ -package externalapi - -// UTXOEntry houses details about an individual transaction output in a utxo -// set such as whether or not it was contained in a coinbase tx, the daa -// score of the block that accepts the tx, its public key script, and how -// much it pays. -type UTXOEntry interface { - Amount() uint64 // Utxo amount in Sompis - ScriptPublicKey() *ScriptPublicKey // The public key script for the output. - BlockDAAScore() uint64 // Daa score of the block accepting the tx. - IsCoinbase() bool - Equal(other UTXOEntry) bool -} - -// OutpointAndUTXOEntryPair is an outpoint along with its -// respective UTXO entry -type OutpointAndUTXOEntryPair struct { - Outpoint *DomainOutpoint - UTXOEntry UTXOEntry -} diff --git a/consensus/model/externalapi/virtual.go b/consensus/model/externalapi/virtual.go deleted file mode 100644 index 42eb975b..00000000 --- a/consensus/model/externalapi/virtual.go +++ /dev/null @@ -1,10 +0,0 @@ -package externalapi - -// VirtualInfo represents information about the virtual block needed by external components -type VirtualInfo struct { - ParentHashes []*DomainHash - Bits uint32 - PastMedianTime int64 - BlueScore uint64 - DAAScore uint64 -} diff --git a/consensus/model/interface_datastructures_blockheaderstore.go b/consensus/model/interface_datastructures_blockheaderstore.go deleted file mode 100644 index 6c683084..00000000 --- a/consensus/model/interface_datastructures_blockheaderstore.go +++ /dev/null @@ -1,15 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// BlockHeaderStore represents a store of block headers -type BlockHeaderStore interface { - Store - Stage(stagingArea *StagingArea, blockHash *externalapi.DomainHash, blockHeader externalapi.BlockHeader) - IsStaged(stagingArea *StagingArea) bool - BlockHeader(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) (externalapi.BlockHeader, error) - HasBlockHeader(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) (bool, error) - BlockHeaders(dbContext DBReader, stagingArea *StagingArea, blockHashes []*externalapi.DomainHash) ([]externalapi.BlockHeader, error) - Delete(stagingArea *StagingArea, blockHash *externalapi.DomainHash) - Count(stagingArea *StagingArea) uint64 -} diff --git a/consensus/model/interface_datastructures_daablocksstore.go b/consensus/model/interface_datastructures_daablocksstore.go deleted file mode 100644 index d750c995..00000000 --- a/consensus/model/interface_datastructures_daablocksstore.go +++ /dev/null @@ -1,14 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// DAABlocksStore represents a store of ??? -type DAABlocksStore interface { - Store - StageDAAScore(stagingArea *StagingArea, blockHash *externalapi.DomainHash, daaScore uint64) - StageBlockDAAAddedBlocks(stagingArea *StagingArea, blockHash *externalapi.DomainHash, addedBlocks []*externalapi.DomainHash) - IsStaged(stagingArea *StagingArea) bool - DAAAddedBlocks(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) - DAAScore(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash) (uint64, error) - Delete(stagingArea *StagingArea, blockHash *externalapi.DomainHash) -} diff --git a/consensus/model/interface_datastructures_ghostdagdatastore.go b/consensus/model/interface_datastructures_ghostdagdatastore.go deleted file mode 100644 index 80a4e1fc..00000000 --- a/consensus/model/interface_datastructures_ghostdagdatastore.go +++ /dev/null @@ -1,12 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/domain/consensus/model/externalapi" - -// GHOSTDAGDataStore represents a store of BlockGHOSTDAGData -type GHOSTDAGDataStore interface { - Store - Stage(stagingArea *StagingArea, blockHash *externalapi.DomainHash, blockGHOSTDAGData *externalapi.BlockGHOSTDAGData, isTrustedData bool) - IsStaged(stagingArea *StagingArea) bool - Get(dbContext DBReader, stagingArea *StagingArea, blockHash *externalapi.DomainHash, isTrustedData bool) (*externalapi.BlockGHOSTDAGData, error) - UnstageAll(stagingArea *StagingArea) -} diff --git a/consensus/model/interface_processes_dagtopologymanager.go b/consensus/model/interface_processes_dagtopologymanager.go deleted file mode 100644 index 81b496d6..00000000 --- a/consensus/model/interface_processes_dagtopologymanager.go +++ /dev/null @@ -1,19 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// DAGTopologyManager exposes methods for querying relationships -// between blocks in the DAG -type DAGTopologyManager interface { - Parents(stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) - Children(stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) - IsParentOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) - IsChildOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) - IsAncestorOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) - IsAncestorOfAny(stagingArea *StagingArea, blockHash *externalapi.DomainHash, potentialDescendants []*externalapi.DomainHash) (bool, error) - IsAnyAncestorOf(stagingArea *StagingArea, potentialAncestors []*externalapi.DomainHash, blockHash *externalapi.DomainHash) (bool, error) - IsInSelectedParentChainOf(stagingArea *StagingArea, blockHashA *externalapi.DomainHash, blockHashB *externalapi.DomainHash) (bool, error) - ChildInSelectedParentChainOf(stagingArea *StagingArea, lowHash, highHash *externalapi.DomainHash) (*externalapi.DomainHash, error) - - SetParents(stagingArea *StagingArea, blockHash *externalapi.DomainHash, parentHashes []*externalapi.DomainHash) error -} diff --git a/consensus/model/interface_processes_dagtraversalmanager.go b/consensus/model/interface_processes_dagtraversalmanager.go deleted file mode 100644 index c1597832..00000000 --- a/consensus/model/interface_processes_dagtraversalmanager.go +++ /dev/null @@ -1,21 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// DAGTraversalManager exposes methods for traversing blocks -// in the DAG -type DAGTraversalManager interface { - LowestChainBlockAboveOrEqualToBlueScore(stagingArea *StagingArea, highHash *externalapi.DomainHash, blueScore uint64) (*externalapi.DomainHash, error) - // SelectedChildIterator should return a BlockIterator that iterates - // from lowHash (exclusive) to highHash (inclusive) over highHash's selected parent chain - SelectedChildIterator(stagingArea *StagingArea, highHash, lowHash *externalapi.DomainHash, includeLowHash bool) (BlockIterator, error) - SelectedChild(stagingArea *StagingArea, highHash, lowHash *externalapi.DomainHash) (*externalapi.DomainHash, error) - AnticoneFromBlocks(stagingArea *StagingArea, tips []*externalapi.DomainHash, blockHash *externalapi.DomainHash, maxTraversalAllowed uint64) ([]*externalapi.DomainHash, error) - AnticoneFromVirtualPOV(stagingArea *StagingArea, blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) - BlockWindow(stagingArea *StagingArea, highHash *externalapi.DomainHash, windowSize int) ([]*externalapi.DomainHash, error) - DAABlockWindow(stagingArea *StagingArea, highHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) - NewDownHeap(stagingArea *StagingArea) BlockHeap - NewUpHeap(stagingArea *StagingArea) BlockHeap - CalculateChainPath(stagingArea *StagingArea, fromBlockHash, toBlockHash *externalapi.DomainHash) ( - *externalapi.SelectedChainPath, error) -} diff --git a/consensus/model/interface_processes_difficultymanager.go b/consensus/model/interface_processes_difficultymanager.go index 9deafac0..cc6222d8 100644 --- a/consensus/model/interface_processes_difficultymanager.go +++ b/consensus/model/interface_processes_difficultymanager.go @@ -1,13 +1,60 @@ package model import ( - "github.com/Qitmeer/qng/consensus/model/externalapi" + "math" + "math/big" + + "github.com/Qitmeer/qng/common/hash" + "github.com/Qitmeer/qng/core/types/pow" ) +type DifficultyBlock struct { + TimeInMilliseconds int64 + Bits uint32 + Hash hash.Hash + BlueWork bool +} +type BlockWindow []DifficultyBlock + +func ghostdagLess(blockA *DifficultyBlock, blockB *DifficultyBlock) bool { + return blockA.BlueWork == blockB.BlueWork +} + +func (window BlockWindow) MinMaxTimestamps() (min, max int64, minIndex int) { + min = math.MaxInt64 + minIndex = 0 + max = 0 + for i, block := range window { + // If timestamps are equal we ghostdag compare in order to reach consensus on `minIndex` + if block.TimeInMilliseconds < min || + (block.TimeInMilliseconds == min && ghostdagLess(&block, &window[minIndex])) { + min = block.TimeInMilliseconds + minIndex = i + } + if block.TimeInMilliseconds > max { + max = block.TimeInMilliseconds + } + } + return +} + +func (window *BlockWindow) Remove(n int) { + (*window)[n] = (*window)[len(*window)-1] + *window = (*window)[:len(*window)-1] +} + +func (window BlockWindow) AverageTarget() *big.Int { + averageTarget := new(big.Int) + targetTmp := new(big.Int) + for _, block := range window { + pow.CompactToBigWithDestination(block.Bits, targetTmp) + averageTarget.Add(averageTarget, targetTmp) + } + return averageTarget.Div(averageTarget, big.NewInt(int64(len(window)))) +} + // DifficultyManager provides a method to resolve the // difficulty value of a block type DifficultyManager interface { - StageDAADataAndReturnRequiredDifficulty(stagingArea *StagingArea, blockHash *externalapi.DomainHash, isBlockWithTrustedData bool) (uint32, error) - RequiredDifficulty(stagingArea *StagingArea, blockHash *externalapi.DomainHash) (uint32, error) - EstimateNetworkHashesPerSecond(startHash *externalapi.DomainHash, windowSize int) (uint64, error) + RequiredDifficulty(blocks BlockWindow, powInstance pow.IPow) (uint32, error) } diff --git a/consensus/model/interface_processes_ghostdagmanager.go b/consensus/model/interface_processes_ghostdagmanager.go deleted file mode 100644 index 5330a989..00000000 --- a/consensus/model/interface_processes_ghostdagmanager.go +++ /dev/null @@ -1,12 +0,0 @@ -package model - -import "github.com/Qitmeer/qng/consensus/model/externalapi" - -// GHOSTDAGManager resolves and manages GHOSTDAG block data -type GHOSTDAGManager interface { - GHOSTDAG(stagingArea *StagingArea, blockHash *externalapi.DomainHash) error - ChooseSelectedParent(stagingArea *StagingArea, blockHashes ...*externalapi.DomainHash) (*externalapi.DomainHash, error) - Less(blockHashA *externalapi.DomainHash, ghostdagDataA *externalapi.BlockGHOSTDAGData, - blockHashB *externalapi.DomainHash, ghostdagDataB *externalapi.BlockGHOSTDAGData) bool - GetSortedMergeSet(stagingArea *StagingArea, current *externalapi.DomainHash) ([]*externalapi.DomainHash, error) -} diff --git a/core/blockchain/blockchain.go b/core/blockchain/blockchain.go index d7205dd6..94388aa9 100644 --- a/core/blockchain/blockchain.go +++ b/core/blockchain/blockchain.go @@ -13,6 +13,7 @@ import ( "github.com/Qitmeer/qng/common/roughtime" "github.com/Qitmeer/qng/common/system" "github.com/Qitmeer/qng/common/util" + "github.com/Qitmeer/qng/consensus/difficultymanager" "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/blockchain/token" "github.com/Qitmeer/qng/core/blockchain/utxo" @@ -137,7 +138,7 @@ type BlockChain struct { quit chan struct{} meerChain *meer.MeerChain - dm *model.DifficultyManager + dm model.DifficultyManager } func (b *BlockChain) Init() error { @@ -1055,6 +1056,7 @@ func New(consensus model.Consensus) (*BlockChain, error) { progressLogger: progresslog.NewBlockProgressLogger("Processed", log), msgChan: make(chan *processMsg), quit: make(chan struct{}), + dm: difficultymanager.New(par), } b.subsidyCache = NewSubsidyCache(0, b.params) diff --git a/core/blockchain/difficulty.go b/core/blockchain/difficulty.go index 12ff8c8a..91775ce4 100644 --- a/core/blockchain/difficulty.go +++ b/core/blockchain/difficulty.go @@ -8,12 +8,14 @@ package blockchain import ( "fmt" + "math/big" + "time" + "github.com/Qitmeer/qng/common/hash" + "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" "github.com/Qitmeer/qng/meerdag" - "math/big" - "time" ) // bigZero is 0 represented as a big.Int. It is defined here to avoid @@ -92,6 +94,9 @@ func (b *BlockChain) findPrevTestNetDifficulty(startBlock meerdag.IBlock, powIns // the exported version uses the current best chain as the previous block node // while this function accepts any block node. func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { + if powInstance.GetPowType() == pow.MEERXKECCAKV1 { + return b.dm.RequiredDifficulty(b.getBlockWindows(block, powInstance.GetPowType(), int(b.params.WorkDiffWindowSize)), powInstance) + } baseTarget := powInstance.GetSafeDiff(0) originCurrentBlock := block // Genesis block. @@ -302,6 +307,50 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi return nextDiffBits, nil } +// blockWindow returns a blockWindow of the given size that contains the +// blocks in the past of startingNode, the sorting is unspecified. +// If the number of blocks in the past of startingNode is less then windowSize, +// the window will be padded by genesis blocks to achieve a size of windowSize. +func (b *BlockChain) getBlockWindows(oldBlock meerdag.IBlock, powType pow.PowType, windowSize int) model.BlockWindow { + windows := make(model.BlockWindow, 0, windowSize) + count := 0 + for i := uint64(0); ; i++ { + // Get the previous node while staying at the genesis block as + // needed. + if oldBlock == nil || !oldBlock.HasParents() { + break + } + + for id := range oldBlock.GetParents().GetMap() { + if count >= windowSize { + return windows + } + oldBlock = b.bd.GetBlockById(id) + if oldBlock == nil { + continue + } + oldBlock = b.getPowTypeNode(oldBlock, powType) + if oldBlock == nil { + continue + } + + on := b.GetBlockHeader(oldBlock) + if on == nil { + continue + } + windows = append(windows, model.DifficultyBlock{ + TimeInMilliseconds: on.Timestamp.UnixMilli(), + Bits: on.Difficulty, + Hash: on.BlockHash(), + BlueWork: b.BlockDAG().IsBlue(oldBlock.GetID()), + }) + count++ + } + + } + return windows +} + // stats current pow count in nodesToTraverse func (b *BlockChain) calcCurrentPowCount(block meerdag.IBlock, nodesToTraverse int64, powType pow.PowType) int64 { // Genesis block. diff --git a/params/params_mixnet.go b/params/params_mixnet.go index 4b8cfae6..c8c78df9 100644 --- a/params/params_mixnet.go +++ b/params/params_mixnet.go @@ -7,10 +7,11 @@ package params import ( - "github.com/Qitmeer/qng/core/types" "math/big" "time" + "github.com/Qitmeer/qng/core/types" + "github.com/Qitmeer/qng/common" "github.com/Qitmeer/qng/core/protocol" "github.com/Qitmeer/qng/core/types/pow" @@ -23,10 +24,10 @@ import ( var testMixNetPowLimit = new(big.Int).Sub(new(big.Int).Lsh(common.Big1, 242), common.Big1) // target time per block unit second(s) -const mixTargetTimePerBlock = 15 +const mixTargetTimePerBlock = 1 -// Difficulty check interval is about 30*15 = 7.5 mins -const mixWorkDiffWindowSize = 30 +// The DAA should take the median of 2640 blocks, so in order to do that we need 2641 window size. +const mixWorkDiffWindowSize = 2641 // testPowNetParams defines the network parameters for the test network. var MixNetParams = Params{ From 23d71e32641fda9c670bb128c2b347b551f39ea8 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Thu, 30 Nov 2023 00:30:13 +0800 Subject: [PATCH 04/15] remove unuse code --- consensus/model/database.go | 57 ------------------------------------- 1 file changed, 57 deletions(-) diff --git a/consensus/model/database.go b/consensus/model/database.go index 24466f28..989e52e4 100644 --- a/consensus/model/database.go +++ b/consensus/model/database.go @@ -83,60 +83,3 @@ type DataBase interface { StartTrack(info string) error StopTrack() error } - -// DBCursor iterates over database entries given some bucket. -type DBCursor interface { - // Next moves the iterator to the next key/value pair. It returns whether the - // iterator is exhausted. Panics if the cursor is closed. - Next() bool - - // First moves the iterator to the first key/value pair. It returns false if - // such a pair does not exist. Panics if the cursor is closed. - First() bool - - // Seek moves the iterator to the first key/value pair whose key is greater - // than or equal to the given key. It returns ErrNotFound if such pair does not - // exist. - Seek(key DBKey) error - - // Key returns the key of the current key/value pair, or ErrNotFound if done. - // The caller should not modify the contents of the returned key, and - // its contents may change on the next call to Next. - Key() (DBKey, error) - - // Value returns the value of the current key/value pair, or ErrNotFound if done. - // The caller should not modify the contents of the returned slice, and its - // contents may change on the next call to Next. - Value() ([]byte, error) - - // Close releases associated resources. - Close() error -} - -// DBReader defines a proxy over domain data access -type DBReader interface { - // Get gets the value for the given key. It returns - // ErrNotFound if the given key does not exist. - Get(key DBKey) ([]byte, error) - - // Has returns true if the database does contains the - // given key. - Has(key DBKey) (bool, error) - - // Cursor begins a new cursor over the given bucket. - Cursor(bucket DBBucket) (DBCursor, error) -} - -// DBKey is an interface for a database key -type DBKey interface { - Bytes() []byte - Bucket() DBBucket - Suffix() []byte -} - -// DBBucket is an interface for a database bucket -type DBBucket interface { - Bucket(bucketBytes []byte) DBBucket - Key(suffix []byte) DBKey - Path() []byte -} From e7caa82d4b9c29f1ac0a238b80e29e7365d3030b Mon Sep 17 00:00:00 2001 From: james Date: Thu, 30 Nov 2023 12:13:07 +0800 Subject: [PATCH 05/15] restruct --- core/blockchain/blockchain.go | 3 --- core/blockchain/difficulty.go | 11 ++++++----- .../types/pow}/difficultymanager/difficultymanager.go | 7 +++---- .../interface_processes_difficultymanager.go | 2 +- 4 files changed, 10 insertions(+), 13 deletions(-) rename {consensus => core/types/pow}/difficultymanager/difficultymanager.go (91%) rename {consensus/model => core/types/pow/difficultymanager}/interface_processes_difficultymanager.go (98%) diff --git a/core/blockchain/blockchain.go b/core/blockchain/blockchain.go index 94388aa9..ae5d7b22 100644 --- a/core/blockchain/blockchain.go +++ b/core/blockchain/blockchain.go @@ -13,7 +13,6 @@ import ( "github.com/Qitmeer/qng/common/roughtime" "github.com/Qitmeer/qng/common/system" "github.com/Qitmeer/qng/common/util" - "github.com/Qitmeer/qng/consensus/difficultymanager" "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/blockchain/token" "github.com/Qitmeer/qng/core/blockchain/utxo" @@ -138,7 +137,6 @@ type BlockChain struct { quit chan struct{} meerChain *meer.MeerChain - dm model.DifficultyManager } func (b *BlockChain) Init() error { @@ -1056,7 +1054,6 @@ func New(consensus model.Consensus) (*BlockChain, error) { progressLogger: progresslog.NewBlockProgressLogger("Processed", log), msgChan: make(chan *processMsg), quit: make(chan struct{}), - dm: difficultymanager.New(par), } b.subsidyCache = NewSubsidyCache(0, b.params) diff --git a/core/blockchain/difficulty.go b/core/blockchain/difficulty.go index 91775ce4..2ec555b7 100644 --- a/core/blockchain/difficulty.go +++ b/core/blockchain/difficulty.go @@ -12,9 +12,10 @@ import ( "time" "github.com/Qitmeer/qng/common/hash" - "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" + + "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/meerdag" ) @@ -95,7 +96,7 @@ func (b *BlockChain) findPrevTestNetDifficulty(startBlock meerdag.IBlock, powIns // while this function accepts any block node. func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { if powInstance.GetPowType() == pow.MEERXKECCAKV1 { - return b.dm.RequiredDifficulty(b.getBlockWindows(block, powInstance.GetPowType(), int(b.params.WorkDiffWindowSize)), powInstance) + return difficultymanager.New(b.params).RequiredDifficulty(b.getBlockWindows(block, powInstance.GetPowType(), int(b.params.WorkDiffWindowSize)), powInstance) } baseTarget := powInstance.GetSafeDiff(0) originCurrentBlock := block @@ -311,8 +312,8 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi // blocks in the past of startingNode, the sorting is unspecified. // If the number of blocks in the past of startingNode is less then windowSize, // the window will be padded by genesis blocks to achieve a size of windowSize. -func (b *BlockChain) getBlockWindows(oldBlock meerdag.IBlock, powType pow.PowType, windowSize int) model.BlockWindow { - windows := make(model.BlockWindow, 0, windowSize) +func (b *BlockChain) getBlockWindows(oldBlock meerdag.IBlock, powType pow.PowType, windowSize int) difficultymanager.BlockWindow { + windows := make(difficultymanager.BlockWindow, 0, windowSize) count := 0 for i := uint64(0); ; i++ { // Get the previous node while staying at the genesis block as @@ -338,7 +339,7 @@ func (b *BlockChain) getBlockWindows(oldBlock meerdag.IBlock, powType pow.PowTyp if on == nil { continue } - windows = append(windows, model.DifficultyBlock{ + windows = append(windows, difficultymanager.DifficultyBlock{ TimeInMilliseconds: on.Timestamp.UnixMilli(), Bits: on.Difficulty, Hash: on.BlockHash(), diff --git a/consensus/difficultymanager/difficultymanager.go b/core/types/pow/difficultymanager/difficultymanager.go similarity index 91% rename from consensus/difficultymanager/difficultymanager.go rename to core/types/pow/difficultymanager/difficultymanager.go index 4b3c101b..dea65a36 100644 --- a/consensus/difficultymanager/difficultymanager.go +++ b/core/types/pow/difficultymanager/difficultymanager.go @@ -5,7 +5,6 @@ import ( "time" "github.com/Qitmeer/qng/common/util/math" - "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types/pow" "github.com/Qitmeer/qng/params" ) @@ -21,7 +20,7 @@ type difficultyManager struct { } // New instantiates a new DifficultyManager -func New(cfg *params.Params) model.DifficultyManager { +func New(cfg *params.Params) DifficultyManager { return &difficultyManager{ powMax: cfg.PowConfig.MeerXKeccakV1PowLimit, difficultyAdjustmentWindowSize: int(cfg.WorkDiffWindowSize), @@ -32,14 +31,14 @@ func New(cfg *params.Params) model.DifficultyManager { } // RequiredDifficulty returns the difficulty required for some block -func (dm *difficultyManager) RequiredDifficulty(targetsWindow model.BlockWindow, powInstance pow.IPow) (uint32, error) { +func (dm *difficultyManager) RequiredDifficulty(targetsWindow BlockWindow, powInstance pow.IPow) (uint32, error) { if powInstance.GetPowType() != pow.MEERXKECCAKV1 || len(targetsWindow) < 1 { return dm.genesisBits, nil } return dm.requiredDifficultyFromTargetsWindow(targetsWindow) } -func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow model.BlockWindow) (uint32, error) { +func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow BlockWindow) (uint32, error) { if dm.disableDifficultyAdjustment { return dm.genesisBits, nil } diff --git a/consensus/model/interface_processes_difficultymanager.go b/core/types/pow/difficultymanager/interface_processes_difficultymanager.go similarity index 98% rename from consensus/model/interface_processes_difficultymanager.go rename to core/types/pow/difficultymanager/interface_processes_difficultymanager.go index cc6222d8..887e43e0 100644 --- a/consensus/model/interface_processes_difficultymanager.go +++ b/core/types/pow/difficultymanager/interface_processes_difficultymanager.go @@ -1,4 +1,4 @@ -package model +package difficultymanager import ( "math" From 46c047d8479799ee68209178bd2a08503199bdf0 Mon Sep 17 00:00:00 2001 From: james Date: Thu, 30 Nov 2023 12:17:53 +0800 Subject: [PATCH 06/15] restruct --- common/{util/math/min.go => math/maxmin.go} | 2 +- common/util/math/min_test.go | 64 ------------------- .../difficultymanager/difficultymanager.go | 4 +- 3 files changed, 3 insertions(+), 67 deletions(-) rename common/{util/math/min.go => math/maxmin.go} (89%) delete mode 100644 common/util/math/min_test.go diff --git a/common/util/math/min.go b/common/math/maxmin.go similarity index 89% rename from common/util/math/min.go rename to common/math/maxmin.go index edbeac11..7d13dbfd 100644 --- a/common/util/math/min.go +++ b/common/math/maxmin.go @@ -9,7 +9,7 @@ func MinInt(x, y int) int { } // MaxInt64 returns the bigger of x or y. -func MaxInt64(x, y int64) int64 { +func MaxInt64Val(x, y int64) int64 { if x > y { return x } diff --git a/common/util/math/min_test.go b/common/util/math/min_test.go deleted file mode 100644 index 62d46f4d..00000000 --- a/common/util/math/min_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package math_test - -import ( - utilMath "github.com/kaspanet/kaspad/util/math" - "math" - "testing" -) - -const ( - MaxInt = int(^uint(0) >> 1) - MinInt = -MaxInt - 1 -) - -func TestMinInt(t *testing.T) { - tests := []struct { - inputs [2]int - expected int - }{ - {[2]int{MaxInt, 0}, 0}, - {[2]int{1, 2}, 1}, - {[2]int{MaxInt, MaxInt}, MaxInt}, - {[2]int{MaxInt, MaxInt - 1}, MaxInt - 1}, - {[2]int{MaxInt, MinInt}, MinInt}, - {[2]int{MinInt, 0}, MinInt}, - {[2]int{MinInt, MinInt}, MinInt}, - {[2]int{0, MinInt + 1}, MinInt + 1}, - {[2]int{0, MinInt}, MinInt}, - } - - for i, test := range tests { - result := utilMath.MinInt(test.inputs[0], test.inputs[1]) - if result != test.expected { - t.Fatalf("%d: Expected %d, instead found: %d", i, test.expected, result) - } - reverseResult := utilMath.MinInt(test.inputs[1], test.inputs[0]) - if result != reverseResult { - t.Fatalf("%d: Expected result and reverseResult to be the same, instead: %d!=%d", i, result, reverseResult) - } - } -} - -func TestMinUint32(t *testing.T) { - tests := []struct { - inputs [2]uint32 - expected uint32 - }{ - {[2]uint32{math.MaxUint32, 0}, 0}, - {[2]uint32{1, 2}, 1}, - {[2]uint32{math.MaxUint32, math.MaxUint32}, math.MaxUint32}, - {[2]uint32{math.MaxUint32, math.MaxUint32 - 1}, math.MaxUint32 - 1}, - } - - for _, test := range tests { - result := utilMath.MinUint32(test.inputs[0], test.inputs[1]) - if result != test.expected { - t.Fatalf("Expected %d, instead found: %d", test.expected, result) - - } - reverseResult := utilMath.MinUint32(test.inputs[1], test.inputs[0]) - if result != reverseResult { - t.Fatalf("Expected result and reverseResult to be the same, instead: %d!=%d", result, reverseResult) - } - } -} diff --git a/core/types/pow/difficultymanager/difficultymanager.go b/core/types/pow/difficultymanager/difficultymanager.go index dea65a36..84f96149 100644 --- a/core/types/pow/difficultymanager/difficultymanager.go +++ b/core/types/pow/difficultymanager/difficultymanager.go @@ -4,7 +4,7 @@ import ( "math/big" "time" - "github.com/Qitmeer/qng/common/util/math" + "github.com/Qitmeer/qng/common/math" "github.com/Qitmeer/qng/core/types/pow" "github.com/Qitmeer/qng/params" ) @@ -66,7 +66,7 @@ func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow B newTarget := targetsWindow.AverageTarget() newTarget. // We need to clamp the timestamp difference to 1 so that we'll never get a 0 target. - Mul(newTarget, div.SetInt64(math.MaxInt64(windowMaxTimeStamp-windowMinTimestamp, 1))). + Mul(newTarget, div.SetInt64(math.MaxInt64Val(windowMaxTimeStamp-windowMinTimestamp, 1))). Div(newTarget, div.SetInt64(dm.targetTimePerBlock.Milliseconds())). Div(newTarget, div.SetUint64(uint64(len(targetsWindow)))) if newTarget.Cmp(dm.powMax) > 0 { From 34c8148830a5992d592156c8ebf938ddb8f25903 Mon Sep 17 00:00:00 2001 From: james Date: Thu, 30 Nov 2023 14:02:44 +0800 Subject: [PATCH 07/15] sort parent map --- core/blockchain/difficulty.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/blockchain/difficulty.go b/core/blockchain/difficulty.go index 2ec555b7..25c243c6 100644 --- a/core/blockchain/difficulty.go +++ b/core/blockchain/difficulty.go @@ -321,8 +321,9 @@ func (b *BlockChain) getBlockWindows(oldBlock meerdag.IBlock, powType pow.PowTyp if oldBlock == nil || !oldBlock.HasParents() { break } - - for id := range oldBlock.GetParents().GetMap() { + ids := oldBlock.GetParents().SortList(false) + for i := 0; i < len(ids); i++ { + id := ids[i] if count >= windowSize { return windows } From 248ddd66ed7b5409eb20cc4a8092ed72b0b6bdbd Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Sun, 10 Dec 2023 20:39:52 +0800 Subject: [PATCH 08/15] restruct difficulty modules --- consensus/consensus.go | 3 +- consensus/model/block.go | 1 + consensus/model/block_chain.go | 2 + consensus/model/meer_dag.go | 11 + core/blockchain/process.go | 6 +- core/blockchain/validate.go | 9 +- core/types/common.go | 5 + core/types/pow/config.go | 4 +- .../difficultymanager/difficultymanager.go | 77 ------- .../interface_difficultymanager.go | 39 ++++ .../interface_processes_difficultymanager.go | 60 ----- core/types/pow/difficultymanager/kaspad.go | 214 ++++++++++++++++++ .../pow/difficultymanager/meer.go} | 202 ++++++----------- node/api.go | 10 +- params/params_mixnet.go | 3 +- services/mining/newblocktemplate.go | 8 +- 16 files changed, 371 insertions(+), 283 deletions(-) create mode 100644 consensus/model/meer_dag.go delete mode 100644 core/types/pow/difficultymanager/difficultymanager.go create mode 100644 core/types/pow/difficultymanager/interface_difficultymanager.go delete mode 100644 core/types/pow/difficultymanager/interface_processes_difficultymanager.go create mode 100644 core/types/pow/difficultymanager/kaspad.go rename core/{blockchain/difficulty.go => types/pow/difficultymanager/meer.go} (65%) diff --git a/consensus/consensus.go b/consensus/consensus.go index dd0d4d33..0cb71ec6 100644 --- a/consensus/consensus.go +++ b/consensus/consensus.go @@ -1,6 +1,8 @@ package consensus import ( + "sync" + "github.com/Qitmeer/qng/common/hash" "github.com/Qitmeer/qng/config" "github.com/Qitmeer/qng/consensus/model" @@ -13,7 +15,6 @@ import ( "github.com/Qitmeer/qng/node/service" "github.com/Qitmeer/qng/params" "github.com/Qitmeer/qng/services/index" - "sync" ) type consensus struct { diff --git a/consensus/model/block.go b/consensus/model/block.go index 395e049d..e0e1f8d3 100644 --- a/consensus/model/block.go +++ b/consensus/model/block.go @@ -9,4 +9,5 @@ type Block interface { GetOrder() uint HasParents() bool GetMainParent() uint + GetHeight() uint } diff --git a/consensus/model/block_chain.go b/consensus/model/block_chain.go index f46724ed..b5cdc197 100644 --- a/consensus/model/block_chain.go +++ b/consensus/model/block_chain.go @@ -26,4 +26,6 @@ type BlockChain interface { GetBlockOrderByHash(hash *hash.Hash) (uint, error) GetBlockHeader(block Block) *types.BlockHeader ForeachBlueBlocks(start Block, depth uint, powType pow.PowType, fn func(block Block, header *types.BlockHeader) error) error + ChainRLock() + ChainRUnlock() } diff --git a/consensus/model/meer_dag.go b/consensus/model/meer_dag.go new file mode 100644 index 00000000..a2bf9fa4 --- /dev/null +++ b/consensus/model/meer_dag.go @@ -0,0 +1,11 @@ +package model + +import ( + "github.com/Qitmeer/qng/common/hash" + "github.com/Qitmeer/qng/rpc/api" +) + +type MeerDag interface { + RegisterAPIs(apis []api.API) + GetBlockIDByTxHash(txhash *hash.Hash) uint64 +} diff --git a/core/blockchain/process.go b/core/blockchain/process.go index 4d8bdb94..84f4a1ff 100644 --- a/core/blockchain/process.go +++ b/core/blockchain/process.go @@ -8,16 +8,18 @@ package blockchain import ( "container/list" "fmt" + "time" + "github.com/Qitmeer/qng/common/hash" "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/blockchain/utxo" "github.com/Qitmeer/qng/core/state" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/engine/txscript" l "github.com/Qitmeer/qng/log" "github.com/Qitmeer/qng/meerdag" - "time" ) // ProcessBlock is the main workhorse for handling insertion of new blocks into @@ -153,7 +155,7 @@ func (b *BlockChain) preProcessBlock(block *types.SerializedBlock, flags Behavio // expected based on elapsed time since the last checkpoint and // maximum adjustment allowed by the retarget rules. duration := blockHeader.Timestamp.Sub(checkpointTime) - requiredTarget := pow.CompactToBig(b.calcEasiestDifficulty( + requiredTarget := pow.CompactToBig(difficultymanager.NewDiffManager(b.consensus.BlockChain(), b.params).CalcEasiestDifficulty( checkpointNode.Difficulty, duration, block.Block().Header.Pow)) currentTarget := pow.CompactToBig(blockHeader.Difficulty) if !block.Block().Header.Pow.CompareDiff(currentTarget, requiredTarget) { diff --git a/core/blockchain/validate.go b/core/blockchain/validate.go index b83c1dbf..b8588b20 100644 --- a/core/blockchain/validate.go +++ b/core/blockchain/validate.go @@ -10,6 +10,9 @@ import ( "encoding/binary" "encoding/hex" "fmt" + "math" + "time" + "github.com/Qitmeer/qng/common/hash" "github.com/Qitmeer/qng/consensus/forks" "github.com/Qitmeer/qng/consensus/model" @@ -22,11 +25,10 @@ import ( "github.com/Qitmeer/qng/core/state" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/engine/txscript" "github.com/Qitmeer/qng/meerdag" "github.com/Qitmeer/qng/params" - "math" - "time" ) const ( @@ -758,7 +760,8 @@ func (b *BlockChain) checkBlockHeaderContext(block *types.SerializedBlock, prevN // Ensure the difficulty specified in the block header matches // the calculated difficulty based on the previous block and // difficulty retarget rules. - expDiff, err := b.calcNextRequiredDifficulty(prevNode, + + expDiff, err := difficultymanager.NewDiffManager(b.consensus.BlockChain(), b.params).RequiredDifficulty(prevNode, header.Timestamp, instance) if err != nil { return err diff --git a/core/types/common.go b/core/types/common.go index 19a0e31c..0ec8c9f3 100644 --- a/core/types/common.go +++ b/core/types/common.go @@ -18,4 +18,9 @@ const ( // MaxAmount is the maximum transaction amount allowed in atoms. // TODO, relocate the coin related item to chain's params MaxAmount = 21e6 * AtomsPerCoin + + // MEER difficulty adjustment + DIFFICULTY_MODE_MEER = 0 + // KASPAD difficulty adjustment + DIFFICULTY_MODE_KASPAD ) diff --git a/core/types/pow/config.go b/core/types/pow/config.go index 52e7b0a4..34a1a1a3 100644 --- a/core/types/pow/config.go +++ b/core/types/pow/config.go @@ -48,9 +48,11 @@ type PowConfig struct { //is init init bool + + DifficultyMode int } -//global cache +// global cache func GetPowConfig() *PowConfig { if PowConfigInstance != nil { return PowConfigInstance diff --git a/core/types/pow/difficultymanager/difficultymanager.go b/core/types/pow/difficultymanager/difficultymanager.go deleted file mode 100644 index 84f96149..00000000 --- a/core/types/pow/difficultymanager/difficultymanager.go +++ /dev/null @@ -1,77 +0,0 @@ -package difficultymanager - -import ( - "math/big" - "time" - - "github.com/Qitmeer/qng/common/math" - "github.com/Qitmeer/qng/core/types/pow" - "github.com/Qitmeer/qng/params" -) - -// DifficultyManager provides a method to resolve the -// difficulty value of a block -type difficultyManager struct { - powMax *big.Int - difficultyAdjustmentWindowSize int - disableDifficultyAdjustment bool - targetTimePerBlock time.Duration - genesisBits uint32 -} - -// New instantiates a new DifficultyManager -func New(cfg *params.Params) DifficultyManager { - return &difficultyManager{ - powMax: cfg.PowConfig.MeerXKeccakV1PowLimit, - difficultyAdjustmentWindowSize: int(cfg.WorkDiffWindowSize), - disableDifficultyAdjustment: false, - targetTimePerBlock: cfg.TargetTimePerBlock, - genesisBits: cfg.PowConfig.MeerXKeccakV1PowLimitBits, - } -} - -// RequiredDifficulty returns the difficulty required for some block -func (dm *difficultyManager) RequiredDifficulty(targetsWindow BlockWindow, powInstance pow.IPow) (uint32, error) { - if powInstance.GetPowType() != pow.MEERXKECCAKV1 || len(targetsWindow) < 1 { - return dm.genesisBits, nil - } - return dm.requiredDifficultyFromTargetsWindow(targetsWindow) -} - -func (dm *difficultyManager) requiredDifficultyFromTargetsWindow(targetsWindow BlockWindow) (uint32, error) { - if dm.disableDifficultyAdjustment { - return dm.genesisBits, nil - } - - // in the past this was < 2 as the comment explains, we changed it to under the window size to - // make the hashrate(which is ~1.5GH/s) constant in the first 2641 blocks so that we won't have a lot of tips - - // We need at least 2 blocks to get a timestamp interval - // We could instead clamp the timestamp difference to `targetTimePerBlock`, - // but then everything will cancel out and we'll get the target from the last block, which will be the same as genesis. - // We add 64 as a safety margin - if len(targetsWindow) < 2 || len(targetsWindow) < dm.difficultyAdjustmentWindowSize { - return dm.genesisBits, nil - } - - windowMinTimestamp, windowMaxTimeStamp, windowMinIndex := targetsWindow.MinMaxTimestamps() - // Remove the last block from the window so to calculate the average target of dag.difficultyAdjustmentWindowSize blocks - targetsWindow.Remove(windowMinIndex) - - // Calculate new target difficulty as: - // averageWindowTarget * (windowMinTimestamp / (targetTimePerBlock * windowSize)) - // The result uses integer division which means it will be slightly - // rounded down. - div := new(big.Int) - newTarget := targetsWindow.AverageTarget() - newTarget. - // We need to clamp the timestamp difference to 1 so that we'll never get a 0 target. - Mul(newTarget, div.SetInt64(math.MaxInt64Val(windowMaxTimeStamp-windowMinTimestamp, 1))). - Div(newTarget, div.SetInt64(dm.targetTimePerBlock.Milliseconds())). - Div(newTarget, div.SetUint64(uint64(len(targetsWindow)))) - if newTarget.Cmp(dm.powMax) > 0 { - return pow.BigToCompact(dm.powMax), nil - } - newTargetBits := pow.BigToCompact(newTarget) - return newTargetBits, nil -} diff --git a/core/types/pow/difficultymanager/interface_difficultymanager.go b/core/types/pow/difficultymanager/interface_difficultymanager.go new file mode 100644 index 00000000..05889613 --- /dev/null +++ b/core/types/pow/difficultymanager/interface_difficultymanager.go @@ -0,0 +1,39 @@ +package difficultymanager + +import ( + "math/big" + "time" + + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/core/types" + "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/params" +) + +// DifficultyManager provides a method to resolve the +// difficulty value of a block +type DifficultyManager interface { + CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) + RequiredDifficulty(block model.Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) + CalcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 + GetCurrentPowDiff(ib model.Block, powType pow.PowType) *big.Int +} + +func NewDiffManager(b model.BlockChain, cfg *params.Params) DifficultyManager { + switch cfg.PowConfig.DifficultyMode { + case types.DIFFICULTY_MODE_KASPAD: + return &kaspadDiff{ + b: b, + powMax: cfg.PowConfig.MeerXKeccakV1PowLimit, + difficultyAdjustmentWindowSize: int(cfg.WorkDiffWindowSize), + disableDifficultyAdjustment: false, + targetTimePerBlock: cfg.TargetTimePerBlock, + genesisBits: cfg.PowConfig.MeerXKeccakV1PowLimitBits, + cfg: cfg, + } + } + return &meerDiff{ + b: b, + cfg: cfg, + } +} diff --git a/core/types/pow/difficultymanager/interface_processes_difficultymanager.go b/core/types/pow/difficultymanager/interface_processes_difficultymanager.go deleted file mode 100644 index 887e43e0..00000000 --- a/core/types/pow/difficultymanager/interface_processes_difficultymanager.go +++ /dev/null @@ -1,60 +0,0 @@ -package difficultymanager - -import ( - "math" - "math/big" - - "github.com/Qitmeer/qng/common/hash" - "github.com/Qitmeer/qng/core/types/pow" -) - -type DifficultyBlock struct { - TimeInMilliseconds int64 - Bits uint32 - Hash hash.Hash - BlueWork bool -} -type BlockWindow []DifficultyBlock - -func ghostdagLess(blockA *DifficultyBlock, blockB *DifficultyBlock) bool { - return blockA.BlueWork == blockB.BlueWork -} - -func (window BlockWindow) MinMaxTimestamps() (min, max int64, minIndex int) { - min = math.MaxInt64 - minIndex = 0 - max = 0 - for i, block := range window { - // If timestamps are equal we ghostdag compare in order to reach consensus on `minIndex` - if block.TimeInMilliseconds < min || - (block.TimeInMilliseconds == min && ghostdagLess(&block, &window[minIndex])) { - min = block.TimeInMilliseconds - minIndex = i - } - if block.TimeInMilliseconds > max { - max = block.TimeInMilliseconds - } - } - return -} - -func (window *BlockWindow) Remove(n int) { - (*window)[n] = (*window)[len(*window)-1] - *window = (*window)[:len(*window)-1] -} - -func (window BlockWindow) AverageTarget() *big.Int { - averageTarget := new(big.Int) - targetTmp := new(big.Int) - for _, block := range window { - pow.CompactToBigWithDestination(block.Bits, targetTmp) - averageTarget.Add(averageTarget, targetTmp) - } - return averageTarget.Div(averageTarget, big.NewInt(int64(len(window)))) -} - -// DifficultyManager provides a method to resolve the -// difficulty value of a block -type DifficultyManager interface { - RequiredDifficulty(blocks BlockWindow, powInstance pow.IPow) (uint32, error) -} diff --git a/core/types/pow/difficultymanager/kaspad.go b/core/types/pow/difficultymanager/kaspad.go new file mode 100644 index 00000000..7ff733e7 --- /dev/null +++ b/core/types/pow/difficultymanager/kaspad.go @@ -0,0 +1,214 @@ +package difficultymanager + +import ( + "math/big" + "time" + + "github.com/Qitmeer/qng/common/hash" + "github.com/Qitmeer/qng/common/math" + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/core/types" + "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/params" +) + +type DifficultyBlock struct { + TimeInMilliseconds int64 + Bits uint32 + Hash hash.Hash + BlueWork bool +} +type blockWindow []DifficultyBlock + +func ghostdagLess(blockA *DifficultyBlock, blockB *DifficultyBlock) bool { + return blockA.BlueWork == blockB.BlueWork +} + +func (window blockWindow) MinMaxTimestamps() (min, max int64, minIndex int) { + min = math.MaxInt64 + minIndex = 0 + max = 0 + for i, block := range window { + // If timestamps are equal we ghostdag compare in order to reach consensus on `minIndex` + if block.TimeInMilliseconds < min || + (block.TimeInMilliseconds == min && ghostdagLess(&block, &window[minIndex])) { + min = block.TimeInMilliseconds + minIndex = i + } + if block.TimeInMilliseconds > max { + max = block.TimeInMilliseconds + } + } + return +} + +func (window *blockWindow) Remove(n int) { + (*window)[n] = (*window)[len(*window)-1] + *window = (*window)[:len(*window)-1] +} + +func (window blockWindow) AverageTarget() *big.Int { + averageTarget := new(big.Int) + targetTmp := new(big.Int) + for _, block := range window { + pow.CompactToBigWithDestination(block.Bits, targetTmp) + averageTarget.Add(averageTarget, targetTmp) + } + return averageTarget.Div(averageTarget, big.NewInt(int64(len(window)))) +} + +// DifficultyManager provides a method to resolve the +// difficulty value of a block +type kaspadDiff struct { + powMax *big.Int + difficultyAdjustmentWindowSize int + disableDifficultyAdjustment bool + targetTimePerBlock time.Duration + genesisBits uint32 + b model.BlockChain + cfg *params.Params +} + +// CalcEasiestDifficulty calculates the easiest possible difficulty that a block +// can have given starting difficulty bits and a duration. It is mainly used to +// verify that claimed proof of work by a block is sane as compared to a +// known good checkpoint. +func (m *kaspadDiff) CalcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 { + // Convert types used in the calculations below. + durationVal := int64(duration) + adjustmentFactor := big.NewInt(m.cfg.RetargetAdjustmentFactor) + maxRetargetTimespan := int64(m.cfg.TargetTimespan) * + m.cfg.RetargetAdjustmentFactor + target := powInstance.GetSafeDiff(0) + // The test network rules allow minimum difficulty blocks once too much + // time has elapsed without mining a block. + if m.cfg.ReduceMinDifficulty { + if durationVal > int64(m.cfg.MinDiffReductionTime) { + return pow.BigToCompact(target) + } + } + + // Since easier difficulty equates to higher numbers, the easiest + // difficulty for a given duration is the largest value possible given + // the number of retargets for the duration and starting difficulty + // multiplied by the max adjustment factor. + newTarget := pow.CompactToBig(bits) + + for durationVal > 0 && powInstance.CompareDiff(newTarget, target) { + newTarget.Mul(newTarget, adjustmentFactor) + newTarget = powInstance.GetNextDiffBig(adjustmentFactor, newTarget, big.NewInt(0)) + durationVal -= maxRetargetTimespan + } + + // Limit new value to the proof of work limit. + if !powInstance.CompareDiff(newTarget, target) { + newTarget.Set(target) + } + + return pow.BigToCompact(newTarget) +} + +func (m *kaspadDiff) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { + m.b.ChainRLock() + block := m.b.GetMainChainTip() + instance := pow.GetInstance(powType, 0, []byte{}) + instance.SetParams(m.cfg.PowConfig) + instance.SetMainHeight(pow.MainHeight(block.GetHeight() + 1)) + difficulty, err := m.RequiredDifficultyByWindows(m.getblockWindows(block, instance.GetPowType(), int(m.cfg.WorkDiffWindowSize))) + m.b.ChainRUnlock() + return difficulty, err +} + +func (m *kaspadDiff) RequiredDifficulty(block model.Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { + return m.RequiredDifficultyByWindows(m.getblockWindows(block, powInstance.GetPowType(), int(m.cfg.WorkDiffWindowSize))) +} + +// RequiredDifficultyByWindows returns the difficulty required for some block +func (dm *kaspadDiff) RequiredDifficultyByWindows(targetsWindow blockWindow) (uint32, error) { + if len(targetsWindow) < 1 { + return dm.genesisBits, nil + } + return dm.requiredDifficultyFromTargetsWindow(targetsWindow) +} + +func (dm *kaspadDiff) requiredDifficultyFromTargetsWindow(targetsWindow blockWindow) (uint32, error) { + if dm.disableDifficultyAdjustment { + return dm.genesisBits, nil + } + + // in the past this was < 2 as the comment explains, we changed it to under the window size to + // make the hashrate(which is ~1.5GH/s) constant in the first 2641 blocks so that we won't have a lot of tips + + // We need at least 2 blocks to get a timestamp interval + // We could instead clamp the timestamp difference to `targetTimePerBlock`, + // but then everything will cancel out and we'll get the target from the last block, which will be the same as genesis. + // We add 64 as a safety margin + if len(targetsWindow) < 2 || len(targetsWindow) < dm.difficultyAdjustmentWindowSize { + return dm.genesisBits, nil + } + + windowMinTimestamp, windowMaxTimeStamp, windowMinIndex := targetsWindow.MinMaxTimestamps() + // Remove the last block from the window so to calculate the average target of dag.difficultyAdjustmentWindowSize blocks + targetsWindow.Remove(windowMinIndex) + + // Calculate new target difficulty as: + // averageWindowTarget * (windowMinTimestamp / (targetTimePerBlock * windowSize)) + // The result uses integer division which means it will be slightly + // rounded down. + div := new(big.Int) + newTarget := targetsWindow.AverageTarget() + newTarget. + // We need to clamp the timestamp difference to 1 so that we'll never get a 0 target. + Mul(newTarget, div.SetInt64(math.MaxInt64Val(windowMaxTimeStamp-windowMinTimestamp, 1))). + Div(newTarget, div.SetInt64(dm.targetTimePerBlock.Milliseconds())). + Div(newTarget, div.SetUint64(uint64(len(targetsWindow)))) + if newTarget.Cmp(dm.powMax) > 0 { + return pow.BigToCompact(dm.powMax), nil + } + newTargetBits := pow.BigToCompact(newTarget) + return newTargetBits, nil +} + +// blockWindow returns a blockWindow of the given size that contains the +// blocks in the past of startingNode, the sorting is unspecified. +// If the number of blocks in the past of startingNode is less then windowSize, +// the window will be padded by genesis blocks to achieve a size of windowSize. +func (dm *kaspadDiff) getblockWindows(oldBlock model.Block, powType pow.PowType, windowSize int) blockWindow { + windows := make(blockWindow, 0, windowSize) + dm.b.ForeachBlueBlocks(oldBlock, uint(windowSize), powType, func(block model.Block, header *types.BlockHeader) error { + windows = append(windows, DifficultyBlock{ + TimeInMilliseconds: header.Timestamp.UnixMilli(), + Bits: header.Difficulty, + Hash: header.BlockHash(), + BlueWork: true, + }) + return nil + }) + + return windows +} + +// find block node by pow type +func (m *kaspadDiff) GetCurrentPowDiff(ib model.Block, powType pow.PowType) *big.Int { + instance := pow.GetInstance(powType, 0, []byte{}) + instance.SetParams(m.cfg.PowConfig) + safeBigDiff := instance.GetSafeDiff(0) + for { + curNode := m.b.GetBlockHeader(ib) + if curNode == nil { + return safeBigDiff + } + if curNode.Pow.GetPowType() == powType { + return pow.CompactToBig(curNode.Difficulty) + } + + if !ib.HasParents() { + return safeBigDiff + } + + ib = m.b.GetBlockById(ib.GetMainParent()) + if ib == nil { + return safeBigDiff + } + } +} diff --git a/core/blockchain/difficulty.go b/core/types/pow/difficultymanager/meer.go similarity index 65% rename from core/blockchain/difficulty.go rename to core/types/pow/difficultymanager/meer.go index 25c243c6..28dfbcde 100644 --- a/core/blockchain/difficulty.go +++ b/core/types/pow/difficultymanager/meer.go @@ -4,19 +4,18 @@ // Use of this source code is governed by an ISC // license that can be found in the LICENSE file. -package blockchain +package difficultymanager import ( "fmt" "math/big" "time" - "github.com/Qitmeer/qng/common/hash" + "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" - - "github.com/Qitmeer/qng/core/types/pow/difficultymanager" - "github.com/Qitmeer/qng/meerdag" + "github.com/Qitmeer/qng/log" + "github.com/Qitmeer/qng/params" ) // bigZero is 0 represented as a big.Int. It is defined here to avoid @@ -27,21 +26,26 @@ var bigZero = big.NewInt(0) // testnet difficulty). const maxShift = uint(256) -// calcEasiestDifficulty calculates the easiest possible difficulty that a block +type meerDiff struct { + b model.BlockChain + cfg *params.Params +} + +// CalcEasiestDifficulty calculates the easiest possible difficulty that a block // can have given starting difficulty bits and a duration. It is mainly used to // verify that claimed proof of work by a block is sane as compared to a // known good checkpoint. -func (b *BlockChain) calcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 { +func (m *meerDiff) CalcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 { // Convert types used in the calculations below. durationVal := int64(duration) - adjustmentFactor := big.NewInt(b.params.RetargetAdjustmentFactor) - maxRetargetTimespan := int64(b.params.TargetTimespan) * - b.params.RetargetAdjustmentFactor + adjustmentFactor := big.NewInt(m.cfg.RetargetAdjustmentFactor) + maxRetargetTimespan := int64(m.cfg.TargetTimespan) * + m.cfg.RetargetAdjustmentFactor target := powInstance.GetSafeDiff(0) // The test network rules allow minimum difficulty blocks once too much // time has elapsed without mining a block. - if b.params.ReduceMinDifficulty { - if durationVal > int64(b.params.MinDiffReductionTime) { + if m.cfg.ReduceMinDifficulty { + if durationVal > int64(m.cfg.MinDiffReductionTime) { return pow.BigToCompact(target) } } @@ -70,34 +74,31 @@ func (b *BlockChain) calcEasiestDifficulty(bits uint32, duration time.Duration, // did not have the special testnet minimum difficulty rule applied. // // This function MUST be called with the chain state lock held (for writes). -func (b *BlockChain) findPrevTestNetDifficulty(startBlock meerdag.IBlock, powInstance pow.IPow) uint32 { +func (m *meerDiff) findPrevTestNetDifficulty(startBlock model.Block, powInstance pow.IPow) uint32 { // Search backwards through the chain for the last block without // the special rule applied. target := powInstance.GetSafeDiff(0) lastBits := pow.BigToCompact(target) - blocksPerRetarget := uint64(b.params.WorkDiffWindowSize * b.params.WorkDiffWindows) + blocksPerRetarget := uint64(m.cfg.WorkDiffWindowSize * m.cfg.WorkDiffWindows) iterBlock := startBlock if iterBlock == nil || uint64(iterBlock.GetHeight())%blocksPerRetarget == 0 { return lastBits } var iterNode *types.BlockHeader - iterNode = b.GetBlockHeader(iterBlock) + iterNode = m.b.GetBlockHeader(iterBlock) if iterNode.Difficulty != pow.BigToCompact(target) { return lastBits } return iterNode.Difficulty } -// calcNextRequiredDifficulty calculates the required difficulty for the block +// RequiredDifficulty calculates the required difficulty for the block // after the passed previous block node based on the difficulty retarget rules. -// This function differs from the exported CalcNextRequiredDifficulty in that +// This function differs from the exported RequiredDifficulty in that // the exported version uses the current best chain as the previous block node // while this function accepts any block node. -func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { - if powInstance.GetPowType() == pow.MEERXKECCAKV1 { - return difficultymanager.New(b.params).RequiredDifficulty(b.getBlockWindows(block, powInstance.GetPowType(), int(b.params.WorkDiffWindowSize)), powInstance) - } +func (m *meerDiff) RequiredDifficulty(block model.Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { baseTarget := powInstance.GetSafeDiff(0) originCurrentBlock := block // Genesis block. @@ -105,11 +106,11 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi return pow.BigToCompact(baseTarget), nil } - block = b.getPowTypeNode(block, powInstance.GetPowType()) + block = m.getPowTypeNode(block, powInstance.GetPowType()) if block == nil { return pow.BigToCompact(baseTarget), nil } - curNode := b.GetBlockHeader(block) + curNode := m.b.GetBlockHeader(block) if curNode == nil { return pow.BigToCompact(baseTarget), nil } @@ -117,21 +118,21 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi // just return this. oldDiff := curNode.Difficulty oldDiffBig := pow.CompactToBig(curNode.Difficulty) - windowsSizeBig := big.NewInt(b.params.WorkDiffWindowSize) + windowsSizeBig := big.NewInt(m.cfg.WorkDiffWindowSize) // percent is *100 * 2^32 windowsSizeBig.Mul(windowsSizeBig, powInstance.PowPercent()) windowsSizeBig.Div(windowsSizeBig, big.NewInt(100)) windowsSizeBig.Rsh(windowsSizeBig, 32) needAjustCount := int64(windowsSizeBig.Uint64()) // We're not at a retarget point, return the oldDiff. - if !b.needAjustPowDifficulty(block, powInstance.GetPowType(), needAjustCount) { + if !m.needAjustPowDifficulty(block, powInstance.GetPowType(), needAjustCount) { // For networks that support it, allow special reduction of the // required difficulty once too much time has elapsed without // mining a block. - if b.params.ReduceMinDifficulty { + if m.cfg.ReduceMinDifficulty { // Return minimum difficulty when more than the desired // amount of time has elapsed without mining a block. - reductionTime := int64(b.params.MinDiffReductionTime / + reductionTime := int64(m.cfg.MinDiffReductionTime / time.Second) allowMinTime := curNode.Timestamp.Unix() + reductionTime @@ -140,7 +141,7 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi if newBlockTime.Unix() > allowMinTime { timePassed := newBlockTime.Unix() - curNode.Timestamp.Unix() timePassed -= reductionTime - shifts := uint((timePassed / int64(b.params.TargetTimePerBlock/ + shifts := uint((timePassed / int64(m.cfg.TargetTimePerBlock/ time.Second)) + 1) // Scale the difficulty with time passed. @@ -163,29 +164,29 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi // The block was mined within the desired timeframe, so // return the difficulty for the last block which did // not have the special minimum difficulty rule applied. - return b.findPrevTestNetDifficulty(block, powInstance), nil + return m.findPrevTestNetDifficulty(block, powInstance), nil } return oldDiff, nil } // Declare some useful variables. - RAFBig := big.NewInt(b.params.RetargetAdjustmentFactor) + RAFBig := big.NewInt(m.cfg.RetargetAdjustmentFactor) nextDiffBigMin := pow.CompactToBig(curNode.Difficulty) nextDiffBigMin.Div(nextDiffBigMin, RAFBig) nextDiffBigMax := pow.CompactToBig(curNode.Difficulty) nextDiffBigMax.Mul(nextDiffBigMax, RAFBig) - alpha := b.params.WorkDiffAlpha + alpha := m.cfg.WorkDiffAlpha // Number of nodes to traverse while calculating difficulty. - nodesToTraverse := needAjustCount * b.params.WorkDiffWindows - percentStatsRecentCount := b.params.WorkDiffWindowSize * b.params.WorkDiffWindows + nodesToTraverse := needAjustCount * m.cfg.WorkDiffWindows + percentStatsRecentCount := m.cfg.WorkDiffWindowSize * m.cfg.WorkDiffWindows //calc pow block count in last nodesToTraverse blocks - currentPowBlockCount := b.calcCurrentPowCount(originCurrentBlock, percentStatsRecentCount, powInstance.GetPowType()) + currentPowBlockCount := m.calcCurrentPowCount(originCurrentBlock, percentStatsRecentCount, powInstance.GetPowType()) // Initialize bigInt slice for the percentage changes for each window period // above or below the target. - windowChanges := make([]*big.Int, b.params.WorkDiffWindows) + windowChanges := make([]*big.Int, m.cfg.WorkDiffWindows) // Regress through all of the previous blocks and store the percent changes // per window period; use bigInts to emulate 64.32 bit fixed point. @@ -205,22 +206,22 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi // Just assume we're at the target (no change) if we've // gone all the way back to the genesis block. if oldBlockOrder == 0 { - timeDifference = int64(b.params.TargetTimespan / + timeDifference = int64(m.cfg.TargetTimespan / time.Second) } timeDifBig := big.NewInt(timeDifference) timeDifBig.Lsh(timeDifBig, 32) // Add padding - targetTemp := big.NewInt(int64(b.params.TargetTimespan / + targetTemp := big.NewInt(int64(m.cfg.TargetTimespan / time.Second)) windowAdjusted := targetTemp.Div(timeDifBig, targetTemp) // Weight it exponentially. Be aware that this could at some point // overflow if alpha or the number of blocks used is really large. windowAdjusted = windowAdjusted.Lsh(windowAdjusted, - uint((b.params.WorkDiffWindows-windowPeriod)*alpha)) + uint((m.cfg.WorkDiffWindows-windowPeriod)*alpha)) // Sum up all the different weights incrementally. - weights += 1 << uint64((b.params.WorkDiffWindows-windowPeriod)* + weights += 1 << uint64((m.cfg.WorkDiffWindows-windowPeriod)* alpha) // Store it in the slice. @@ -237,17 +238,17 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi // Get the previous node while staying at the genesis block as // needed. if oldBlock != nil && oldBlock.HasParents() { - oldBlock = b.bd.GetBlockById(oldBlock.GetMainParent()) + oldBlock = m.b.GetBlockById(oldBlock.GetMainParent()) if oldBlock == nil { continue } - oldBlock = b.getPowTypeNode(oldBlock, powInstance.GetPowType()) + oldBlock = m.getPowTypeNode(oldBlock, powInstance.GetPowType()) if oldBlock == nil { oldNodeTimestamp = 0 oldBlockOrder = 0 continue } - on := b.GetBlockHeader(oldBlock) + on := m.b.GetBlockHeader(oldBlock) if on == nil { continue } @@ -257,7 +258,7 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi } // Sum up the weighted window periods. weightedSum := big.NewInt(0) - for i := int64(0); i < b.params.WorkDiffWindows; i++ { + for i := int64(0); i < m.cfg.WorkDiffWindows; i++ { weightedSum.Add(weightedSum, windowChanges[i]) } @@ -308,53 +309,8 @@ func (b *BlockChain) calcNextRequiredDifficulty(block meerdag.IBlock, newBlockTi return nextDiffBits, nil } -// blockWindow returns a blockWindow of the given size that contains the -// blocks in the past of startingNode, the sorting is unspecified. -// If the number of blocks in the past of startingNode is less then windowSize, -// the window will be padded by genesis blocks to achieve a size of windowSize. -func (b *BlockChain) getBlockWindows(oldBlock meerdag.IBlock, powType pow.PowType, windowSize int) difficultymanager.BlockWindow { - windows := make(difficultymanager.BlockWindow, 0, windowSize) - count := 0 - for i := uint64(0); ; i++ { - // Get the previous node while staying at the genesis block as - // needed. - if oldBlock == nil || !oldBlock.HasParents() { - break - } - ids := oldBlock.GetParents().SortList(false) - for i := 0; i < len(ids); i++ { - id := ids[i] - if count >= windowSize { - return windows - } - oldBlock = b.bd.GetBlockById(id) - if oldBlock == nil { - continue - } - oldBlock = b.getPowTypeNode(oldBlock, powType) - if oldBlock == nil { - continue - } - - on := b.GetBlockHeader(oldBlock) - if on == nil { - continue - } - windows = append(windows, difficultymanager.DifficultyBlock{ - TimeInMilliseconds: on.Timestamp.UnixMilli(), - Bits: on.Difficulty, - Hash: on.BlockHash(), - BlueWork: b.BlockDAG().IsBlue(oldBlock.GetID()), - }) - count++ - } - - } - return windows -} - // stats current pow count in nodesToTraverse -func (b *BlockChain) calcCurrentPowCount(block meerdag.IBlock, nodesToTraverse int64, powType pow.PowType) int64 { +func (m *meerDiff) calcCurrentPowCount(block model.Block, nodesToTraverse int64, powType pow.PowType) int64 { // Genesis block. if block == nil { return 0 @@ -370,9 +326,9 @@ func (b *BlockChain) calcCurrentPowCount(block meerdag.IBlock, nodesToTraverse i currentPowBlockCount-- } if oldBlock.HasParents() { - ob := b.bd.GetBlockById(oldBlock.GetMainParent()) + ob := m.b.GetBlockById(oldBlock.GetMainParent()) if ob != nil { - oldNode := b.GetBlockHeader(ob) + oldNode := m.b.GetBlockHeader(ob) if oldNode == nil { continue } @@ -388,21 +344,21 @@ func (b *BlockChain) calcCurrentPowCount(block meerdag.IBlock, nodesToTraverse i } // whether need ajust Pow Difficulty -// recent b.params.WorkDiffWindowSize blocks +// recent m.cfg.WorkDiffWindowSize blocks // if current count arrived target block count . need ajustment difficulty -func (b *BlockChain) needAjustPowDifficulty(block meerdag.IBlock, powType pow.PowType, needAjustCount int64) bool { - countFromLastAdjustment := b.getDistanceFromLastAdjustment(block, powType, needAjustCount) - // countFromLastAdjustment stats b.params.WorkDiffWindows Multiple count - countFromLastAdjustment /= b.params.WorkDiffWindows +func (m *meerDiff) needAjustPowDifficulty(block model.Block, powType pow.PowType, needAjustCount int64) bool { + countFromLastAdjustment := m.getDistanceFromLastAdjustment(block, powType, needAjustCount) + // countFromLastAdjustment stats m.b.params.WorkDiffWindows Multiple count + countFromLastAdjustment /= m.cfg.WorkDiffWindows return countFromLastAdjustment > 0 && countFromLastAdjustment%needAjustCount == 0 } // Distance block count from last adjustment -func (b *BlockChain) getDistanceFromLastAdjustment(block meerdag.IBlock, powType pow.PowType, needAjustCount int64) int64 { +func (m *meerDiff) getDistanceFromLastAdjustment(block model.Block, powType pow.PowType, needAjustCount int64) int64 { if block == nil { return 0 } - curNode := b.GetBlockHeader(block) + curNode := m.b.GetBlockHeader(block) if curNode == nil { return 0 } @@ -423,59 +379,43 @@ func (b *BlockChain) getDistanceFromLastAdjustment(block meerdag.IBlock, powType } // if TargetTimespan have only one pow block need ajustment difficulty // or count >= needAjustCount - if (count > 1 && currentTime-curNode.Timestamp.Unix() > (count-1)*int64(b.params.TargetTimespan/time.Second)) || + if (count > 1 && currentTime-curNode.Timestamp.Unix() > (count-1)*int64(m.cfg.TargetTimespan/time.Second)) || count >= needAjustCount { - return needAjustCount * b.params.WorkDiffWindows + return needAjustCount * m.cfg.WorkDiffWindows } if !block.HasParents() { return count } - block = b.bd.GetBlockById(block.GetMainParent()) + block = m.b.GetBlockById(block.GetMainParent()) if block != nil { - curNode = b.GetBlockHeader(block) + curNode = m.b.GetBlockHeader(block) } else { return count } } } -// CalcNextRequiredDiffFromNode calculates the required difficulty for the block -// given with the passed hash along with the given timestamp. -// -// This function is NOT safe for concurrent access. -func (b *BlockChain) CalcNextRequiredDiffFromNode(hash *hash.Hash, timestamp time.Time, powType pow.PowType) (uint32, error) { - ib := b.bd.GetBlock(hash) - if ib == nil { - return 0, fmt.Errorf("block %s is not known", hash) - } - - instance := pow.GetInstance(powType, 0, []byte{}) - instance.SetParams(b.params.PowConfig) - instance.SetMainHeight(pow.MainHeight(ib.GetHeight() + 1)) - return b.calcNextRequiredDifficulty(ib, timestamp, instance) -} - // CalcNextRequiredDifficulty calculates the required difficulty for the block // after the end of the current best chain based on the difficulty retarget // rules. // // This function is safe for concurrent access. -func (b *BlockChain) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { - b.ChainRLock() - block := b.bd.GetMainChainTip() +func (m *meerDiff) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { + m.b.ChainRLock() + block := m.b.GetMainChainTip() instance := pow.GetInstance(powType, 0, []byte{}) - instance.SetParams(b.params.PowConfig) + instance.SetParams(m.cfg.PowConfig) instance.SetMainHeight(pow.MainHeight(block.GetHeight() + 1)) - difficulty, err := b.calcNextRequiredDifficulty(block, timestamp, instance) - b.ChainRUnlock() + difficulty, err := m.RequiredDifficulty(block, timestamp, instance) + m.b.ChainRUnlock() return difficulty, err } // find block node by pow type -func (b *BlockChain) getPowTypeNode(block meerdag.IBlock, powType pow.PowType) meerdag.IBlock { +func (m *meerDiff) getPowTypeNode(block model.Block, powType pow.PowType) model.Block { for { - curNode := b.GetBlockHeader(block) + curNode := m.b.GetBlockHeader(block) if curNode == nil { return nil } @@ -486,7 +426,7 @@ func (b *BlockChain) getPowTypeNode(block meerdag.IBlock, powType pow.PowType) m if !block.HasParents() { return nil } - block = b.bd.GetBlockById(block.GetMainParent()) + block = m.b.GetBlockById(block.GetMainParent()) if block == nil { return nil } @@ -494,12 +434,12 @@ func (b *BlockChain) getPowTypeNode(block meerdag.IBlock, powType pow.PowType) m } // find block node by pow type -func (b *BlockChain) GetCurrentPowDiff(ib meerdag.IBlock, powType pow.PowType) *big.Int { +func (m *meerDiff) GetCurrentPowDiff(ib model.Block, powType pow.PowType) *big.Int { instance := pow.GetInstance(powType, 0, []byte{}) - instance.SetParams(b.params.PowConfig) + instance.SetParams(m.cfg.PowConfig) safeBigDiff := instance.GetSafeDiff(0) for { - curNode := b.GetBlockHeader(ib) + curNode := m.b.GetBlockHeader(ib) if curNode == nil { return safeBigDiff } @@ -511,7 +451,7 @@ func (b *BlockChain) GetCurrentPowDiff(ib meerdag.IBlock, powType pow.PowType) * return safeBigDiff } - ib = b.bd.GetBlockById(ib.GetMainParent()) + ib = m.b.GetBlockById(ib.GetMainParent()) if ib == nil { return safeBigDiff } diff --git a/node/api.go b/node/api.go index 018f5006..aa889af8 100644 --- a/node/api.go +++ b/node/api.go @@ -8,12 +8,17 @@ package node import ( js "encoding/json" "fmt" + "math/big" + "strconv" + "time" + "github.com/Qitmeer/qng/common/marshal" "github.com/Qitmeer/qng/common/roughtime" "github.com/Qitmeer/qng/consensus/forks" "github.com/Qitmeer/qng/core/json" "github.com/Qitmeer/qng/core/protocol" "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/meerdag" "github.com/Qitmeer/qng/meerevm/eth" "github.com/Qitmeer/qng/params" @@ -21,9 +26,6 @@ import ( "github.com/Qitmeer/qng/rpc/client/cmds" "github.com/Qitmeer/qng/services/common" "github.com/Qitmeer/qng/version" - "math/big" - "strconv" - "time" ) func (nf *QitmeerFull) apis() []api.API { @@ -58,7 +60,7 @@ func NewPublicBlockChainAPI(node *QitmeerFull) *PublicBlockChainAPI { func (api *PublicBlockChainAPI) GetNodeInfo() (interface{}, error) { best := api.node.GetBlockChain().BestSnapshot() node := api.node.GetBlockChain().BlockDAG().GetBlock(&best.Hash) - powNodes := api.node.GetBlockChain().GetCurrentPowDiff(node, pow.MEERXKECCAKV1) + powNodes := difficultymanager.NewDiffManager(api.node.GetBlockChain().Consensus().BlockChain(), api.node.GetBlockChain().ChainParams()).GetCurrentPowDiff(node, pow.MEERXKECCAKV1) ret := &json.InfoNodeResult{ ID: api.node.GetPeerServer().PeerID().String(), Version: int32(1000000*version.Major + 10000*version.Minor + 100*version.Patch), diff --git a/params/params_mixnet.go b/params/params_mixnet.go index c8c78df9..fd3cc58e 100644 --- a/params/params_mixnet.go +++ b/params/params_mixnet.go @@ -86,6 +86,7 @@ var MixNetParams = Params{ }, // after this height the big graph will be the main pow graph AdjustmentStartMainHeight: 1440 * 15 / mixTargetTimePerBlock, + DifficultyMode: types.DIFFICULTY_MODE_KASPAD, }, WorkDiffAlpha: 1, @@ -127,7 +128,7 @@ var MixNetParams = Params{ SLIP0044CoinType: 813, LegacyCoinType: 223, - CoinbaseMaturity: 720, + CoinbaseMaturity: 16, OrganizationPkScript: hexMustDecode("76a91429209320e66d96839785dd07e643a7f1592edc5a88ac"), TokenAdminPkScript: hexMustDecode("00000000c96d6d76a914b8834294977b26a44094fe2216f8a7d59af1130888ac"), } diff --git a/services/mining/newblocktemplate.go b/services/mining/newblocktemplate.go index 0eb9356e..ca7d94f8 100644 --- a/services/mining/newblocktemplate.go +++ b/services/mining/newblocktemplate.go @@ -2,6 +2,8 @@ package mining import ( "fmt" + "time" + "github.com/Qitmeer/qng/common/hash" "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/address" @@ -11,13 +13,13 @@ import ( s "github.com/Qitmeer/qng/core/serialization" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/engine/txscript" "github.com/Qitmeer/qng/log" "github.com/Qitmeer/qng/meerdag" "github.com/Qitmeer/qng/params" "github.com/Qitmeer/qng/services/mempool" "golang.org/x/net/context" - "time" ) // NewBlockTemplate returns a new block template that is ready to be solved @@ -504,7 +506,7 @@ mempool: ts := MedianAdjustedTime(bc, timeSource) // - reqCompactDifficulty, err := bc.CalcNextRequiredDifficulty(ts, powType) + reqCompactDifficulty, err := difficultymanager.NewDiffManager(bc.Consensus().BlockChain(), bc.ChainParams()).CalcNextRequiredDifficulty(ts, powType) if err != nil { return nil, miningRuleError(ErrGettingDifficulty, err.Error()) } @@ -597,7 +599,7 @@ func UpdateBlockTime(msgBlock *types.Block, chain *blockchain.BlockChain, timeSo // If running on a network that requires recalculating the difficulty, // do so now. if activeNetParams.ReduceMinDifficulty { - difficulty, err := chain.CalcNextRequiredDifficulty( + difficulty, err := difficultymanager.NewDiffManager(chain.Consensus().BlockChain(), chain.ChainParams()).CalcNextRequiredDifficulty( newTimestamp, msgBlock.Header.Pow.GetPowType()) if err != nil { return miningRuleError(ErrGettingDifficulty, err.Error()) From 149a9a776c625b4e72b16f47b982c2ef8e9149a1 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Sun, 10 Dec 2023 20:49:14 +0800 Subject: [PATCH 09/15] remove unuse files --- consensus/model/meer_dag.go | 11 ----------- 1 file changed, 11 deletions(-) delete mode 100644 consensus/model/meer_dag.go diff --git a/consensus/model/meer_dag.go b/consensus/model/meer_dag.go deleted file mode 100644 index a2bf9fa4..00000000 --- a/consensus/model/meer_dag.go +++ /dev/null @@ -1,11 +0,0 @@ -package model - -import ( - "github.com/Qitmeer/qng/common/hash" - "github.com/Qitmeer/qng/rpc/api" -) - -type MeerDag interface { - RegisterAPIs(apis []api.API) - GetBlockIDByTxHash(txhash *hash.Hash) uint64 -} From e209def74c50b09b6b4f77696a29b806b5106835 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Mon, 11 Dec 2023 11:14:03 +0800 Subject: [PATCH 10/15] restruct --- consensus/model/difficulty_manager.go | 17 +++++++ core/blockchain/blockchain.go | 5 +- core/blockchain/difficulty.go | 48 +++++++++++++++++++ core/blockchain/process.go | 3 +- core/blockchain/validate.go | 3 +- ...ficultymanager.go => difficultymanager.go} | 15 +----- node/api.go | 3 +- services/mining/newblocktemplate.go | 5 +- 8 files changed, 75 insertions(+), 24 deletions(-) create mode 100644 consensus/model/difficulty_manager.go create mode 100644 core/blockchain/difficulty.go rename core/types/pow/difficultymanager/{interface_difficultymanager.go => difficultymanager.go} (53%) diff --git a/consensus/model/difficulty_manager.go b/consensus/model/difficulty_manager.go new file mode 100644 index 00000000..98218a1d --- /dev/null +++ b/consensus/model/difficulty_manager.go @@ -0,0 +1,17 @@ +package model + +import ( + "math/big" + "time" + + "github.com/Qitmeer/qng/core/types/pow" +) + +// DifficultyManager provides a method to resolve the +// difficulty value of a block +type DifficultyManager interface { + CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) + RequiredDifficulty(block Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) + CalcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 + GetCurrentPowDiff(ib Block, powType pow.PowType) *big.Int +} diff --git a/core/blockchain/blockchain.go b/core/blockchain/blockchain.go index ae5d7b22..65a53598 100644 --- a/core/blockchain/blockchain.go +++ b/core/blockchain/blockchain.go @@ -22,6 +22,7 @@ import ( "github.com/Qitmeer/qng/core/state" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" + "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/database/common" "github.com/Qitmeer/qng/engine/txscript" l "github.com/Qitmeer/qng/log" @@ -136,7 +137,8 @@ type BlockChain struct { wg sync.WaitGroup quit chan struct{} - meerChain *meer.MeerChain + meerChain *meer.MeerChain + difficultyManager model.DifficultyManager } func (b *BlockChain) Init() error { @@ -1070,6 +1072,7 @@ func New(consensus model.Consensus) (*BlockChain, error) { } b.meerChain = mchain b.Services().RegisterService(b.meerChain) + b.difficultyManager = difficultymanager.NewDiffManager(b.Consensus().BlockChain(), par) return &b, nil } diff --git a/core/blockchain/difficulty.go b/core/blockchain/difficulty.go new file mode 100644 index 00000000..d89ffd2d --- /dev/null +++ b/core/blockchain/difficulty.go @@ -0,0 +1,48 @@ +// Copyright (c) 2017-2018 The qitmeer developers +// Copyright (c) 2013-2016 The btcsuite developers +// Copyright (c) 2015-2018 The Decred developers +// Use of this source code is governed by an ISC +// license that can be found in the LICENSE file. + +package blockchain + +import ( + "math/big" + "time" + + "github.com/Qitmeer/qng/consensus/model" + "github.com/Qitmeer/qng/core/types/pow" +) + +// CalcEasiestDifficulty calculates the easiest possible difficulty that a block +// can have given starting difficulty bits and a duration. It is mainly used to +// verify that claimed proof of work by a block is sane as compared to a +// known good checkpoint. +func (m *BlockChain) calcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 { + return m.difficultyManager.CalcEasiestDifficulty(bits, duration, powInstance) +} + +func (m *BlockChain) calcNextRequiredDifficulty(block model.Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { + return m.difficultyManager.RequiredDifficulty(block, newBlockTime, powInstance) +} + +// CalcNextRequiredDifficulty calculates the required difficulty for the block +// after the end of the current best chain based on the difficulty retarget +// rules. +// +// This function is safe for concurrent access. +func (b *BlockChain) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { + b.ChainRLock() + block := b.GetMainChainTip() + instance := pow.GetInstance(powType, 0, []byte{}) + instance.SetParams(b.params.PowConfig) + instance.SetMainHeight(pow.MainHeight(block.GetHeight() + 1)) + difficulty, err := b.difficultyManager.RequiredDifficulty(block, timestamp, instance) + b.ChainRUnlock() + return difficulty, err +} + +// find block node by pow type +func (b *BlockChain) GetCurrentPowDiff(ib model.Block, powType pow.PowType) *big.Int { + return b.GetCurrentPowDiff(ib, powType) +} diff --git a/core/blockchain/process.go b/core/blockchain/process.go index 84f4a1ff..efce5d44 100644 --- a/core/blockchain/process.go +++ b/core/blockchain/process.go @@ -16,7 +16,6 @@ import ( "github.com/Qitmeer/qng/core/state" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" - "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/engine/txscript" l "github.com/Qitmeer/qng/log" "github.com/Qitmeer/qng/meerdag" @@ -155,7 +154,7 @@ func (b *BlockChain) preProcessBlock(block *types.SerializedBlock, flags Behavio // expected based on elapsed time since the last checkpoint and // maximum adjustment allowed by the retarget rules. duration := blockHeader.Timestamp.Sub(checkpointTime) - requiredTarget := pow.CompactToBig(difficultymanager.NewDiffManager(b.consensus.BlockChain(), b.params).CalcEasiestDifficulty( + requiredTarget := pow.CompactToBig(b.calcEasiestDifficulty( checkpointNode.Difficulty, duration, block.Block().Header.Pow)) currentTarget := pow.CompactToBig(blockHeader.Difficulty) if !block.Block().Header.Pow.CompareDiff(currentTarget, requiredTarget) { diff --git a/core/blockchain/validate.go b/core/blockchain/validate.go index b8588b20..50c6eb64 100644 --- a/core/blockchain/validate.go +++ b/core/blockchain/validate.go @@ -25,7 +25,6 @@ import ( "github.com/Qitmeer/qng/core/state" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" - "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/engine/txscript" "github.com/Qitmeer/qng/meerdag" "github.com/Qitmeer/qng/params" @@ -761,7 +760,7 @@ func (b *BlockChain) checkBlockHeaderContext(block *types.SerializedBlock, prevN // the calculated difficulty based on the previous block and // difficulty retarget rules. - expDiff, err := difficultymanager.NewDiffManager(b.consensus.BlockChain(), b.params).RequiredDifficulty(prevNode, + expDiff, err := b.calcNextRequiredDifficulty(prevNode, header.Timestamp, instance) if err != nil { return err diff --git a/core/types/pow/difficultymanager/interface_difficultymanager.go b/core/types/pow/difficultymanager/difficultymanager.go similarity index 53% rename from core/types/pow/difficultymanager/interface_difficultymanager.go rename to core/types/pow/difficultymanager/difficultymanager.go index 05889613..0280b8f2 100644 --- a/core/types/pow/difficultymanager/interface_difficultymanager.go +++ b/core/types/pow/difficultymanager/difficultymanager.go @@ -1,25 +1,12 @@ package difficultymanager import ( - "math/big" - "time" - "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types" - "github.com/Qitmeer/qng/core/types/pow" "github.com/Qitmeer/qng/params" ) -// DifficultyManager provides a method to resolve the -// difficulty value of a block -type DifficultyManager interface { - CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) - RequiredDifficulty(block model.Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) - CalcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 - GetCurrentPowDiff(ib model.Block, powType pow.PowType) *big.Int -} - -func NewDiffManager(b model.BlockChain, cfg *params.Params) DifficultyManager { +func NewDiffManager(b model.BlockChain, cfg *params.Params) model.DifficultyManager { switch cfg.PowConfig.DifficultyMode { case types.DIFFICULTY_MODE_KASPAD: return &kaspadDiff{ diff --git a/node/api.go b/node/api.go index aa889af8..9a25e58d 100644 --- a/node/api.go +++ b/node/api.go @@ -18,7 +18,6 @@ import ( "github.com/Qitmeer/qng/core/json" "github.com/Qitmeer/qng/core/protocol" "github.com/Qitmeer/qng/core/types/pow" - "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/meerdag" "github.com/Qitmeer/qng/meerevm/eth" "github.com/Qitmeer/qng/params" @@ -60,7 +59,7 @@ func NewPublicBlockChainAPI(node *QitmeerFull) *PublicBlockChainAPI { func (api *PublicBlockChainAPI) GetNodeInfo() (interface{}, error) { best := api.node.GetBlockChain().BestSnapshot() node := api.node.GetBlockChain().BlockDAG().GetBlock(&best.Hash) - powNodes := difficultymanager.NewDiffManager(api.node.GetBlockChain().Consensus().BlockChain(), api.node.GetBlockChain().ChainParams()).GetCurrentPowDiff(node, pow.MEERXKECCAKV1) + powNodes := api.node.GetBlockChain().GetCurrentPowDiff(node, pow.MEERXKECCAKV1) ret := &json.InfoNodeResult{ ID: api.node.GetPeerServer().PeerID().String(), Version: int32(1000000*version.Major + 10000*version.Minor + 100*version.Patch), diff --git a/services/mining/newblocktemplate.go b/services/mining/newblocktemplate.go index ca7d94f8..a83804b0 100644 --- a/services/mining/newblocktemplate.go +++ b/services/mining/newblocktemplate.go @@ -13,7 +13,6 @@ import ( s "github.com/Qitmeer/qng/core/serialization" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" - "github.com/Qitmeer/qng/core/types/pow/difficultymanager" "github.com/Qitmeer/qng/engine/txscript" "github.com/Qitmeer/qng/log" "github.com/Qitmeer/qng/meerdag" @@ -506,7 +505,7 @@ mempool: ts := MedianAdjustedTime(bc, timeSource) // - reqCompactDifficulty, err := difficultymanager.NewDiffManager(bc.Consensus().BlockChain(), bc.ChainParams()).CalcNextRequiredDifficulty(ts, powType) + reqCompactDifficulty, err := bc.CalcNextRequiredDifficulty(ts, powType) if err != nil { return nil, miningRuleError(ErrGettingDifficulty, err.Error()) } @@ -599,7 +598,7 @@ func UpdateBlockTime(msgBlock *types.Block, chain *blockchain.BlockChain, timeSo // If running on a network that requires recalculating the difficulty, // do so now. if activeNetParams.ReduceMinDifficulty { - difficulty, err := difficultymanager.NewDiffManager(chain.Consensus().BlockChain(), chain.ChainParams()).CalcNextRequiredDifficulty( + difficulty, err := chain.CalcNextRequiredDifficulty( newTimestamp, msgBlock.Header.Pow.GetPowType()) if err != nil { return miningRuleError(ErrGettingDifficulty, err.Error()) From a8c673f9b69b93188ae08e69328c463f3c5feed7 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Mon, 11 Dec 2023 11:17:55 +0800 Subject: [PATCH 11/15] fix --- core/blockchain/blockchain.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/blockchain/blockchain.go b/core/blockchain/blockchain.go index 65a53598..809b2187 100644 --- a/core/blockchain/blockchain.go +++ b/core/blockchain/blockchain.go @@ -172,6 +172,8 @@ func (b *BlockChain) Init() error { for _, v := range tips { log.Info(fmt.Sprintf("hash=%s,order=%s,height=%d", v.GetHash(), meerdag.GetOrderLogStr(v.GetOrder()), v.GetHeight())) } + + b.difficultyManager = difficultymanager.NewDiffManager(b.Consensus().BlockChain(), b.params) return nil } @@ -1072,7 +1074,6 @@ func New(consensus model.Consensus) (*BlockChain, error) { } b.meerChain = mchain b.Services().RegisterService(b.meerChain) - b.difficultyManager = difficultymanager.NewDiffManager(b.Consensus().BlockChain(), par) return &b, nil } From d94a47a8b994aa4bafcfb3fa76fba318caedea07 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Mon, 11 Dec 2023 11:26:47 +0800 Subject: [PATCH 12/15] remove unuse code --- common/math/maxmin.go | 25 ------------------- core/types/common.go | 5 ---- core/types/pow/diff.go | 7 ++++++ .../difficultymanager/difficultymanager.go | 4 +-- core/types/pow/difficultymanager/kaspad.go | 4 +-- params/params_mixnet.go | 2 +- 6 files changed, 12 insertions(+), 35 deletions(-) delete mode 100644 common/math/maxmin.go diff --git a/common/math/maxmin.go b/common/math/maxmin.go deleted file mode 100644 index 7d13dbfd..00000000 --- a/common/math/maxmin.go +++ /dev/null @@ -1,25 +0,0 @@ -package math - -// MinInt returns the smaller of x or y. -func MinInt(x, y int) int { - if x < y { - return x - } - return y -} - -// MaxInt64 returns the bigger of x or y. -func MaxInt64Val(x, y int64) int64 { - if x > y { - return x - } - return y -} - -// MinUint32 returns the smaller of x or y. -func MinUint32(x, y uint32) uint32 { - if x < y { - return x - } - return y -} diff --git a/core/types/common.go b/core/types/common.go index 0ec8c9f3..19a0e31c 100644 --- a/core/types/common.go +++ b/core/types/common.go @@ -18,9 +18,4 @@ const ( // MaxAmount is the maximum transaction amount allowed in atoms. // TODO, relocate the coin related item to chain's params MaxAmount = 21e6 * AtomsPerCoin - - // MEER difficulty adjustment - DIFFICULTY_MODE_MEER = 0 - // KASPAD difficulty adjustment - DIFFICULTY_MODE_KASPAD ) diff --git a/core/types/pow/diff.go b/core/types/pow/diff.go index 456a3239..7c102267 100644 --- a/core/types/pow/diff.go +++ b/core/types/pow/diff.go @@ -20,6 +20,13 @@ var ( OneLsh256 = new(big.Int).Lsh(bigOne, 256) ) +const ( + // MEER difficulty adjustment + DIFFICULTY_MODE_MEER = 0 + // KASPAD difficulty adjustment + DIFFICULTY_MODE_KASPAD +) + // HashToBig converts a hash.Hash into a big.Int that can be used to // perform math comparisons. func HashToBig(hash *hash.Hash) *big.Int { diff --git a/core/types/pow/difficultymanager/difficultymanager.go b/core/types/pow/difficultymanager/difficultymanager.go index 0280b8f2..4bb7135b 100644 --- a/core/types/pow/difficultymanager/difficultymanager.go +++ b/core/types/pow/difficultymanager/difficultymanager.go @@ -2,13 +2,13 @@ package difficultymanager import ( "github.com/Qitmeer/qng/consensus/model" - "github.com/Qitmeer/qng/core/types" + "github.com/Qitmeer/qng/core/types/pow" "github.com/Qitmeer/qng/params" ) func NewDiffManager(b model.BlockChain, cfg *params.Params) model.DifficultyManager { switch cfg.PowConfig.DifficultyMode { - case types.DIFFICULTY_MODE_KASPAD: + case pow.DIFFICULTY_MODE_KASPAD: return &kaspadDiff{ b: b, powMax: cfg.PowConfig.MeerXKeccakV1PowLimit, diff --git a/core/types/pow/difficultymanager/kaspad.go b/core/types/pow/difficultymanager/kaspad.go index 7ff733e7..75dd35f8 100644 --- a/core/types/pow/difficultymanager/kaspad.go +++ b/core/types/pow/difficultymanager/kaspad.go @@ -1,11 +1,11 @@ package difficultymanager import ( + "math" "math/big" "time" "github.com/Qitmeer/qng/common/hash" - "github.com/Qitmeer/qng/common/math" "github.com/Qitmeer/qng/consensus/model" "github.com/Qitmeer/qng/core/types" "github.com/Qitmeer/qng/core/types/pow" @@ -159,7 +159,7 @@ func (dm *kaspadDiff) requiredDifficultyFromTargetsWindow(targetsWindow blockWin newTarget := targetsWindow.AverageTarget() newTarget. // We need to clamp the timestamp difference to 1 so that we'll never get a 0 target. - Mul(newTarget, div.SetInt64(math.MaxInt64Val(windowMaxTimeStamp-windowMinTimestamp, 1))). + Mul(newTarget, div.SetInt64(int64(math.Max(float64(windowMaxTimeStamp-windowMinTimestamp), 1)))). Div(newTarget, div.SetInt64(dm.targetTimePerBlock.Milliseconds())). Div(newTarget, div.SetUint64(uint64(len(targetsWindow)))) if newTarget.Cmp(dm.powMax) > 0 { diff --git a/params/params_mixnet.go b/params/params_mixnet.go index fd3cc58e..2a0a252d 100644 --- a/params/params_mixnet.go +++ b/params/params_mixnet.go @@ -86,7 +86,7 @@ var MixNetParams = Params{ }, // after this height the big graph will be the main pow graph AdjustmentStartMainHeight: 1440 * 15 / mixTargetTimePerBlock, - DifficultyMode: types.DIFFICULTY_MODE_KASPAD, + DifficultyMode: pow.DIFFICULTY_MODE_KASPAD, }, WorkDiffAlpha: 1, From 04946b40dc0c51617bb370bf9fd56162d778aa71 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Mon, 11 Dec 2023 11:29:55 +0800 Subject: [PATCH 13/15] remove unuse code --- consensus/model/block.go | 1 - consensus/model/block_chain.go | 2 -- 2 files changed, 3 deletions(-) diff --git a/consensus/model/block.go b/consensus/model/block.go index e0e1f8d3..395e049d 100644 --- a/consensus/model/block.go +++ b/consensus/model/block.go @@ -9,5 +9,4 @@ type Block interface { GetOrder() uint HasParents() bool GetMainParent() uint - GetHeight() uint } diff --git a/consensus/model/block_chain.go b/consensus/model/block_chain.go index b5cdc197..f46724ed 100644 --- a/consensus/model/block_chain.go +++ b/consensus/model/block_chain.go @@ -26,6 +26,4 @@ type BlockChain interface { GetBlockOrderByHash(hash *hash.Hash) (uint, error) GetBlockHeader(block Block) *types.BlockHeader ForeachBlueBlocks(start Block, depth uint, powType pow.PowType, fn func(block Block, header *types.BlockHeader) error) error - ChainRLock() - ChainRUnlock() } From ce9c3333a743e4bdf15422ba21b6ec090a05f666 Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Mon, 11 Dec 2023 11:34:03 +0800 Subject: [PATCH 14/15] remove unuse code --- consensus/model/block.go | 1 + consensus/model/difficulty_manager.go | 1 - core/blockchain/difficulty.go | 2 +- core/types/pow/difficultymanager/kaspad.go | 11 ----------- core/types/pow/difficultymanager/meer.go | 16 ---------------- 5 files changed, 2 insertions(+), 29 deletions(-) diff --git a/consensus/model/block.go b/consensus/model/block.go index 395e049d..e0e1f8d3 100644 --- a/consensus/model/block.go +++ b/consensus/model/block.go @@ -9,4 +9,5 @@ type Block interface { GetOrder() uint HasParents() bool GetMainParent() uint + GetHeight() uint } diff --git a/consensus/model/difficulty_manager.go b/consensus/model/difficulty_manager.go index 98218a1d..0dace629 100644 --- a/consensus/model/difficulty_manager.go +++ b/consensus/model/difficulty_manager.go @@ -10,7 +10,6 @@ import ( // DifficultyManager provides a method to resolve the // difficulty value of a block type DifficultyManager interface { - CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) RequiredDifficulty(block Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) CalcEasiestDifficulty(bits uint32, duration time.Duration, powInstance pow.IPow) uint32 GetCurrentPowDiff(ib Block, powType pow.PowType) *big.Int diff --git a/core/blockchain/difficulty.go b/core/blockchain/difficulty.go index d89ffd2d..e0cdebdd 100644 --- a/core/blockchain/difficulty.go +++ b/core/blockchain/difficulty.go @@ -33,7 +33,7 @@ func (m *BlockChain) calcNextRequiredDifficulty(block model.Block, newBlockTime // This function is safe for concurrent access. func (b *BlockChain) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { b.ChainRLock() - block := b.GetMainChainTip() + block := b.bd.GetMainChainTip() instance := pow.GetInstance(powType, 0, []byte{}) instance.SetParams(b.params.PowConfig) instance.SetMainHeight(pow.MainHeight(block.GetHeight() + 1)) diff --git a/core/types/pow/difficultymanager/kaspad.go b/core/types/pow/difficultymanager/kaspad.go index 75dd35f8..7727ad4a 100644 --- a/core/types/pow/difficultymanager/kaspad.go +++ b/core/types/pow/difficultymanager/kaspad.go @@ -108,17 +108,6 @@ func (m *kaspadDiff) CalcEasiestDifficulty(bits uint32, duration time.Duration, return pow.BigToCompact(newTarget) } -func (m *kaspadDiff) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { - m.b.ChainRLock() - block := m.b.GetMainChainTip() - instance := pow.GetInstance(powType, 0, []byte{}) - instance.SetParams(m.cfg.PowConfig) - instance.SetMainHeight(pow.MainHeight(block.GetHeight() + 1)) - difficulty, err := m.RequiredDifficultyByWindows(m.getblockWindows(block, instance.GetPowType(), int(m.cfg.WorkDiffWindowSize))) - m.b.ChainRUnlock() - return difficulty, err -} - func (m *kaspadDiff) RequiredDifficulty(block model.Block, newBlockTime time.Time, powInstance pow.IPow) (uint32, error) { return m.RequiredDifficultyByWindows(m.getblockWindows(block, powInstance.GetPowType(), int(m.cfg.WorkDiffWindowSize))) } diff --git a/core/types/pow/difficultymanager/meer.go b/core/types/pow/difficultymanager/meer.go index 28dfbcde..ecff9a0e 100644 --- a/core/types/pow/difficultymanager/meer.go +++ b/core/types/pow/difficultymanager/meer.go @@ -396,22 +396,6 @@ func (m *meerDiff) getDistanceFromLastAdjustment(block model.Block, powType pow. } } -// CalcNextRequiredDifficulty calculates the required difficulty for the block -// after the end of the current best chain based on the difficulty retarget -// rules. -// -// This function is safe for concurrent access. -func (m *meerDiff) CalcNextRequiredDifficulty(timestamp time.Time, powType pow.PowType) (uint32, error) { - m.b.ChainRLock() - block := m.b.GetMainChainTip() - instance := pow.GetInstance(powType, 0, []byte{}) - instance.SetParams(m.cfg.PowConfig) - instance.SetMainHeight(pow.MainHeight(block.GetHeight() + 1)) - difficulty, err := m.RequiredDifficulty(block, timestamp, instance) - m.b.ChainRUnlock() - return difficulty, err -} - // find block node by pow type func (m *meerDiff) getPowTypeNode(block model.Block, powType pow.PowType) model.Block { for { From 06167896f966974ea6dc7363fbc72e32a9518d4a Mon Sep 17 00:00:00 2001 From: frankcrypto Date: Mon, 11 Dec 2023 11:49:51 +0800 Subject: [PATCH 15/15] fix --- core/types/pow/diff.go | 2 +- core/types/pow/difficultymanager/meer.go | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/core/types/pow/diff.go b/core/types/pow/diff.go index 7c102267..86892646 100644 --- a/core/types/pow/diff.go +++ b/core/types/pow/diff.go @@ -24,7 +24,7 @@ const ( // MEER difficulty adjustment DIFFICULTY_MODE_MEER = 0 // KASPAD difficulty adjustment - DIFFICULTY_MODE_KASPAD + DIFFICULTY_MODE_KASPAD = 1 ) // HashToBig converts a hash.Hash into a big.Int that can be used to diff --git a/core/types/pow/difficultymanager/meer.go b/core/types/pow/difficultymanager/meer.go index ecff9a0e..482b08d6 100644 --- a/core/types/pow/difficultymanager/meer.go +++ b/core/types/pow/difficultymanager/meer.go @@ -305,7 +305,6 @@ func (m *meerDiff) RequiredDifficulty(block model.Block, newBlockTime time.Time, "diff", fmt.Sprintf("(%064x)", oldDiffBig)) log.Debug("New target", "bits", fmt.Sprintf("%08x", nextDiffBits), "diff", fmt.Sprintf("(%064x)", nextDiffBig)) - return nextDiffBits, nil }