From d0141b1dabdaeb57b2a9301712082de09b94dff7 Mon Sep 17 00:00:00 2001 From: Bushstar Date: Fri, 8 Nov 2024 05:20:56 +0000 Subject: [PATCH] Fix error in no undo hash --- src/dfi/masternodes.cpp | 2 +- src/dfi/rpc_accounts.cpp | 3 +++ .../functional/feature_consolidate_rewards.py | 20 +++++++++---------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/dfi/masternodes.cpp b/src/dfi/masternodes.cpp index 6aef72faf9..3d93bd4ca4 100644 --- a/src/dfi/masternodes.cpp +++ b/src/dfi/masternodes.cpp @@ -1475,7 +1475,7 @@ std::pair GetDVMDBHashes(CCustomCSView &view) { unsigned char hash[CSHA256::OUTPUT_SIZE]; unsigned char hashNoUndo[CSHA256::OUTPUT_SIZE]; hasher.Finalize(hash); - hasher.Finalize(hashNoUndo); + hasherNoUndo.Finalize(hashNoUndo); // Convert hashes to hex string const auto hashHex = HexStr(hash, hash + CSHA256::OUTPUT_SIZE); diff --git a/src/dfi/rpc_accounts.cpp b/src/dfi/rpc_accounts.cpp index b29e73f3e8..e5630cec40 100644 --- a/src/dfi/rpc_accounts.cpp +++ b/src/dfi/rpc_accounts.cpp @@ -3668,6 +3668,9 @@ UniValue logdvmstate(const JSONRPCRequest &request) { pcursor->Next(); } + // Delete iterator + delete pcursor; + if (outFile.is_open()) { outFile.close(); } diff --git a/test/functional/feature_consolidate_rewards.py b/test/functional/feature_consolidate_rewards.py index 359e854f33..d012f2ba23 100755 --- a/test/functional/feature_consolidate_rewards.py +++ b/test/functional/feature_consolidate_rewards.py @@ -155,8 +155,8 @@ def setup(self): def pre_fork24_consolidate(self): # Compare hash before consolidation - hash_0 = self.nodes[0].logdbhashes()["dvmhash"] - hash_1 = self.nodes[1].logdbhashes()["dvmhash"] + hash_0 = self.nodes[0].logdbhashes()["dvmhash_no_undo"] + hash_1 = self.nodes[1].logdbhashes()["dvmhash_no_undo"] assert_equal(hash_0, hash_1) # Generate rewards @@ -167,7 +167,7 @@ def pre_fork24_consolidate(self): self.stop_node(1) # Start node with consolidation - self.args.append(f"-consolidaterewards={self.symbolGOOGL}") + self.args.append(f"-consolidaterewards={self.symbolGD}") self.start_node(1, self.args) connect_nodes_bi(self.nodes, 0, 1) @@ -186,8 +186,8 @@ def pre_fork24_consolidate(self): self.idGOOGL = list(self.nodes[0].gettoken(self.symbolGOOGL).keys())[0] # Compare hash before consolidation - hash_0 = self.nodes[0].logdbhashes()["dvmhash"] - hash_1 = self.nodes[1].logdbhashes()["dvmhash"] + hash_0 = self.nodes[0].logdbhashes()["dvmhash_no_undo"] + hash_1 = self.nodes[1].logdbhashes()["dvmhash_no_undo"] assert_equal(hash_0, hash_1) def post_fork24_consolidate(self): @@ -200,15 +200,15 @@ def post_fork24_consolidate(self): self.sync_blocks() # Compare hash before consolidation - hash_0 = self.nodes[0].logdbhashes()["dvmhash"] - hash_1 = self.nodes[1].logdbhashes()["dvmhash"] + hash_0 = self.nodes[0].logdbhashes()["dvmhash_no_undo"] + hash_1 = self.nodes[1].logdbhashes()["dvmhash_no_undo"] assert_equal(hash_0, hash_1) # Stop node self.stop_node(1) # Start node with consolidation - self.args.append(f"-consolidaterewards={self.symbolGOOGL}") + self.args.append(f"-consolidaterewards={self.symbolGD}") self.start_node(1, self.args) connect_nodes_bi(self.nodes, 0, 1) @@ -224,8 +224,8 @@ def post_fork24_consolidate(self): self.sync_blocks() # Compare hash before consolidation - hash_0 = self.nodes[0].logdbhashes()["dvmhash"] - hash_1 = self.nodes[1].logdbhashes()["dvmhash"] + hash_0 = self.nodes[0].logdbhashes()["dvmhash_no_undo"] + hash_1 = self.nodes[1].logdbhashes()["dvmhash_no_undo"] assert_equal(hash_0, hash_1)