diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 7807a1b833e9..07e63fdb0f90 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -13,7 +13,6 @@ from chia.consensus.coinbase import create_farmer_coin, create_pool_coin from chia.consensus.constants import ConsensusConstants from chia.consensus.cost_calculator import NPCResult -from chia.consensus.find_fork_point import find_fork_point_in_chain from chia.full_node.block_store import BlockStore from chia.full_node.coin_store import CoinStore from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions, mempool_check_time_locks @@ -39,12 +38,11 @@ async def validate_block_body( blocks: BlockchainInterface, block_store: BlockStore, coin_store: CoinStore, - peak: Optional[BlockRecord], block: Union[FullBlock, UnfinishedBlock], height: uint32, npc_result: Optional[NPCResult], - fork_point_with_peak: Optional[uint32], - get_block_generator: Callable[[BlockInfo], Awaitable[Optional[BlockGenerator]]], + fork_point_with_peak: int, + get_block_generator: Callable[[BlockInfo, int, Dict[uint32, FullBlock]], Awaitable[Optional[BlockGenerator]]], *, validate_signature: bool = True, ) -> Tuple[Optional[Err], Optional[NPCResult]]: @@ -279,12 +277,6 @@ async def validate_block_body( # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) # The fork point is the last block in common between the peak chain and the chain of `block` - if peak is None or height == 0: - fork_h: int = -1 - elif fork_point_with_peak is not None: - fork_h = fork_point_with_peak - else: - fork_h = find_fork_point_in_chain(blocks, peak, blocks.block_record(block.prev_header_hash)) # Get additions and removals since (after) fork_h but not including this block # The values include: the coin that was added, the height of the block in which it was confirmed, and the @@ -299,22 +291,24 @@ async def validate_block_body( reorg_blocks: Dict[uint32, FullBlock] = {} curr: Optional[FullBlock] = prev_block assert curr is not None - while curr.height > fork_h: + while curr.height > fork_point_with_peak: if curr.height == 0: break curr = await block_store.get_full_block(curr.prev_header_hash) assert curr is not None reorg_blocks[curr.height] = curr - if fork_h != -1: - assert len(reorg_blocks) == height - fork_h - 1 + if fork_point_with_peak != -1: + assert len(reorg_blocks) == height - fork_point_with_peak - 1 curr = prev_block assert curr is not None - while curr.height > fork_h: + while curr.height > fork_point_with_peak: # Coin store doesn't contain coins from fork, we have to run generator for each block in fork if curr.transactions_generator is not None: # These blocks are in the past and therefore assumed to be valid, so get_block_generator won't raise - curr_block_generator: Optional[BlockGenerator] = await get_block_generator(curr) + curr_block_generator: Optional[BlockGenerator] = await get_block_generator( + curr, 0 if fork_point_with_peak is None else fork_point_with_peak, reorg_blocks + ) assert curr_block_generator is not None and curr.transactions_info is not None curr_npc_result = get_name_puzzle_conditions( curr_block_generator, @@ -381,10 +375,10 @@ async def validate_block_body( # can't find in the DB, but also coins that were spent after the fork point look_in_fork: List[bytes32] = [] for unspent in unspent_records: - if unspent.confirmed_block_index <= fork_h: + if unspent.confirmed_block_index <= fork_point_with_peak: # Spending something in the current chain, confirmed before fork # (We ignore all coins confirmed after fork) - if unspent.spent == 1 and unspent.spent_block_index <= fork_h: + if unspent.spent == 1 and unspent.spent_block_index <= fork_point_with_peak: # Check for coins spent in an ancestor block return Err.DOUBLE_SPEND, None removal_coin_records[unspent.name] = unspent diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index c9217d535c66..51057637d792 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -252,11 +252,10 @@ async def add_block( self, self.block_store, self.coin_store, - self.get_peak(), block, block.height, npc_result, - fork_point_with_peak, + -1 if fork_point_with_peak is None else fork_point_with_peak, self.get_block_generator, # If we did not already validate the signature, validate it now validate_signature=not pre_validation_result.validated_signature, @@ -404,10 +403,12 @@ async def _reconsider_peak( # We need to recompute the additions and removals, since they are not stored on DB (only generator is). if fetched_block_record.header_hash == block_record.header_hash: tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions( - fetched_full_block, npc_result + fetched_full_block, fork_height, npc_result ) else: - tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions(fetched_full_block, None) + tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions( + fetched_full_block, fork_height + ) # Collect the NPC results for later post-processing if npc_res is not None: @@ -438,7 +439,7 @@ async def _reconsider_peak( ) async def get_tx_removals_and_additions( - self, block: FullBlock, npc_result: Optional[NPCResult] = None + self, block: FullBlock, fork_height: int, npc_result: Optional[NPCResult] = None ) -> Tuple[List[bytes32], List[Coin], Optional[NPCResult]]: if not block.is_transaction_block(): return [], [], None @@ -447,7 +448,7 @@ async def get_tx_removals_and_additions( return [], [], None if npc_result is None: - block_generator: Optional[BlockGenerator] = await self.get_block_generator(block) + block_generator: Optional[BlockGenerator] = await self.get_block_generator(block, fork_height) assert block_generator is not None npc_result = get_name_puzzle_conditions( block_generator, @@ -625,11 +626,10 @@ async def validate_unfinished_block( self, self.block_store, self.coin_store, - self.get_peak(), block, uint32(prev_height + 1), npc_result, - None, + prev_height, self.get_block_generator, validate_signature=False, # Signature was already validated before calling this method, no need to validate ) @@ -647,6 +647,7 @@ async def pre_validate_blocks_multiprocessing( wp_summaries: Optional[List[SubEpochSummary]] = None, *, validate_signatures: bool, + fork_height: Optional[uint32] = None, ) -> List[PreValidationResult]: return await pre_validate_blocks_multiprocessing( self.constants, @@ -657,6 +658,7 @@ async def pre_validate_blocks_multiprocessing( npc_results, self.get_block_generator, batch_size, + fork_height, wp_summaries, validate_signatures=validate_signatures, ) @@ -882,7 +884,10 @@ def seen_compact_proofs(self, vdf_info: VDFInfo, height: uint32) -> bool: return False async def get_block_generator( - self, block: BlockInfo, additional_blocks: Optional[Dict[bytes32, FullBlock]] = None + self, + block: BlockInfo, + fork: Optional[int] = None, + additional_blocks: Optional[Dict[uint32, FullBlock]] = None, ) -> Optional[BlockGenerator]: if additional_blocks is None: additional_blocks = {} @@ -905,56 +910,32 @@ async def get_block_generator( # (as long as we're in the main chain) result = await self.block_store.get_generators_at(block.transactions_generator_ref_list) else: - # First tries to find the blocks in additional_blocks - reorg_chain: Dict[uint32, FullBlock] = {} - curr = block - additional_height_dict = {} - while curr.prev_header_hash in additional_blocks: - prev: FullBlock = additional_blocks[curr.prev_header_hash] - additional_height_dict[prev.height] = prev - if isinstance(curr, FullBlock): - assert curr.height == prev.height + 1 - reorg_chain[prev.height] = prev - curr = prev - - peak: Optional[BlockRecord] = self.get_peak() - if self.contains_block(curr.prev_header_hash) and peak is not None: - # Then we look up blocks up to fork point one at a time, backtracking - previous_block_hash = curr.prev_header_hash - prev_block_record = await self.block_store.get_block_record(previous_block_hash) - prev_block = await self.block_store.get_full_block(previous_block_hash) - assert prev_block is not None - assert prev_block_record is not None - fork = find_fork_point_in_chain(self, peak, prev_block_record) - curr_2: Optional[FullBlock] = prev_block - assert curr_2 is not None and isinstance(curr_2, FullBlock) - reorg_chain[curr_2.height] = curr_2 - while curr_2.height > fork and curr_2.height > 0: - curr_2 = await self.block_store.get_full_block(curr_2.prev_header_hash) - assert curr_2 is not None - reorg_chain[curr_2.height] = curr_2 - + assert fork is not None + prev_full_block = await self.block_store.get_full_block(block.prev_header_hash) + assert prev_full_block is not None + reorg_chain_height_to_hash = {} + block_recs = await self.block_store.get_block_records_in_range(fork, prev_full_block.height) + curr = block_recs[prev_full_block.header_hash] + while curr.height > fork and curr.height > 0: + reorg_chain_height_to_hash[curr.height] = curr.header_hash + + # todo aggregate heights and hashes to one query for ref_height in block.transactions_generator_ref_list: - if ref_height in reorg_chain: - ref_block = reorg_chain[ref_height] + if ref_height in additional_blocks: + ref_block = additional_blocks[ref_height] assert ref_block is not None if ref_block.transactions_generator is None: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) result.append(ref_block.transactions_generator) + if ref_height > fork: + gen = await self.block_store.get_generator(reorg_chain_height_to_hash[ref_height]) + if gen is None: + raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) + result.append(gen) else: - if ref_height in additional_height_dict: - ref_block = additional_height_dict[ref_height] - assert ref_block is not None - if ref_block.transactions_generator is None: - raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) - result.append(ref_block.transactions_generator) - else: - header_hash = self.height_to_hash(ref_height) - if header_hash is None: - raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) - gen = await self.block_store.get_generator(header_hash) - if gen is None: - raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) - result.append(gen) + [gen] = await self.block_store.get_generators_at([ref_height]) + if gen is None: + raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) + result.append(gen) assert len(result) == len(ref_list) return BlockGenerator(block.transactions_generator, result, []) diff --git a/chia/consensus/find_fork_point.py b/chia/consensus/find_fork_point.py index 1dc233c15ad2..c99fe96b5536 100644 --- a/chia/consensus/find_fork_point.py +++ b/chia/consensus/find_fork_point.py @@ -14,7 +14,7 @@ def find_fork_point_in_chain( ) -> int: """Tries to find height where new chain (block_2) diverged from block_1 (assuming prev blocks are all included in chain) - Returns -1 if chains have no common ancestor + Returns 0 if chains have no common ancestor * assumes the fork point is loaded in blocks """ while block_2.height > 0 or block_1.height > 0: @@ -30,6 +30,5 @@ def find_fork_point_in_chain( if block_2 != block_1: # All blocks are different return -1 - # First block is the same return 0 diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 81bdff107e0d..e609c8326a78 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -170,8 +170,11 @@ async def pre_validate_blocks_multiprocessing( pool: Executor, check_filter: bool, npc_results: Dict[uint32, NPCResult], - get_block_generator: Callable[[BlockInfo, Dict[bytes32, FullBlock]], Awaitable[Optional[BlockGenerator]]], + get_block_generator: Callable[ + [BlockInfo, Optional[uint32], Dict[uint32, FullBlock]], Awaitable[Optional[BlockGenerator]] + ], batch_size: int, + fork_height: Optional[uint32], wp_summaries: Optional[List[SubEpochSummary]] = None, *, validate_signatures: bool = True, @@ -300,12 +303,12 @@ async def pre_validate_blocks_multiprocessing( # We ONLY add blocks which are in the past, based on header hashes (which are validated later) to the # prev blocks dict. This is important since these blocks are assumed to be valid and are used as previous # generator references - prev_blocks_dict: Dict[bytes32, FullBlock] = {} + prev_blocks_dict: Dict[uint32, FullBlock] = {} curr_b: FullBlock = block while curr_b.prev_header_hash in block_dict: curr_b = block_dict[curr_b.prev_header_hash] - prev_blocks_dict[curr_b.header_hash] = curr_b + prev_blocks_dict[curr_b.height] = curr_b if isinstance(block, FullBlock): assert get_block_generator is not None @@ -313,7 +316,9 @@ async def pre_validate_blocks_multiprocessing( b_pickled = [] b_pickled.append(bytes(block)) try: - block_generator: Optional[BlockGenerator] = await get_block_generator(block, prev_blocks_dict) + block_generator: Optional[BlockGenerator] = await get_block_generator( + block, fork_height, prev_blocks_dict + ) except ValueError: return [ PreValidationResult( diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 9bcaf7c0f50e..7012856fe5c2 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -567,7 +567,7 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t raise ValueError(f"Error short batch syncing, invalid/no response for {height}-{end_height}") async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high): state_change_summary: Optional[StateChangeSummary] - success, state_change_summary, _ = await self.add_block_batch(response.blocks, peer, None) + success, state_change_summary, _ = await self.add_block_batch(response.blocks, peer, start_height) if not success: raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}") if state_change_summary is not None: @@ -1202,7 +1202,11 @@ async def add_block_batch( # for these blocks (unlike during normal operation where we validate one at a time) pre_validate_start = time.monotonic() pre_validation_results: List[PreValidationResult] = await self.blockchain.pre_validate_blocks_multiprocessing( - blocks_to_validate, {}, wp_summaries=wp_summaries, validate_signatures=True + blocks_to_validate, + {}, + wp_summaries=wp_summaries, + validate_signatures=True, + fork_height=fork_point, ) pre_validate_end = time.monotonic() pre_validate_time = pre_validate_end - pre_validate_start diff --git a/tests/blockchain/blockchain_test_utils.py b/tests/blockchain/blockchain_test_utils.py index a3be079d74db..4cb99f5ffc11 100644 --- a/tests/blockchain/blockchain_test_utils.py +++ b/tests/blockchain/blockchain_test_utils.py @@ -59,7 +59,10 @@ async def _validate_and_add_block( else: # Do not change this, validate_signatures must be False pre_validation_results: List[PreValidationResult] = await blockchain.pre_validate_blocks_multiprocessing( - [block], {}, validate_signatures=False + [block], + {}, + validate_signatures=False, + fork_height=fork_point_with_peak, ) assert pre_validation_results is not None results = pre_validation_results[0] diff --git a/tests/blockchain/test_blockchain.py b/tests/blockchain/test_blockchain.py index cb42cd034bd0..7ccaebc5dbfd 100644 --- a/tests/blockchain/test_blockchain.py +++ b/tests/blockchain/test_blockchain.py @@ -3196,7 +3196,7 @@ async def test_reorg_from_genesis(self, empty_blockchain, bt): assert b.get_peak().height == 14 # Reorg to alternate chain that is 1 height longer - blocks_reorg_chain = bt.get_consecutive_blocks(16, [], seed=b"2") + blocks_reorg_chain = bt.get_consecutive_blocks(15, [blocks[0]], seed=b"2") for reorg_block in blocks_reorg_chain: if reorg_block.height < 15: await _validate_and_add_block_multi_result( @@ -3380,6 +3380,7 @@ async def test_reorg_new_ref(empty_blockchain, bt): else: expected = AddBlockResult.NEW_PEAK fork_point_with_peak = uint32(1) + log.info(f"check block {block.height}") await _validate_and_add_block(b, block, expected_result=expected, fork_point_with_peak=fork_point_with_peak) assert b.get_peak().height == 20 @@ -3430,7 +3431,7 @@ async def test_reorg_stale_fork_height(empty_blockchain, bt): # fake the fork_height to make every new block look like a reorg for block in blocks[5:]: - await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_point_with_peak=2) + await _validate_and_add_block(b, block, expected_result=AddBlockResult.NEW_PEAK, fork_point_with_peak=uint32(2)) assert b.get_peak().height == 13 @@ -3585,12 +3586,14 @@ async def test_reorg_flip_flop(empty_blockchain, bt): fork_height = 2 if counter > 3 else None preval: List[PreValidationResult] = await b.pre_validate_blocks_multiprocessing( - [block1], {}, validate_signatures=False + [block1], {}, validate_signatures=False, fork_height=fork_height ) result, err, _ = await b.add_block(block1, preval[0], fork_point_with_peak=fork_height) assert not err preval: List[PreValidationResult] = await b.pre_validate_blocks_multiprocessing( - [block2], {}, validate_signatures=False + [block2], + {}, + validate_signatures=False, ) result, err, _ = await b.add_block(block2, preval[0], fork_point_with_peak=fork_height) assert not err diff --git a/tests/core/full_node/test_full_node.py b/tests/core/full_node/test_full_node.py index 144b146c3878..f6c0d1b46137 100644 --- a/tests/core/full_node/test_full_node.py +++ b/tests/core/full_node/test_full_node.py @@ -406,7 +406,7 @@ async def check_transaction_confirmed(transaction) -> bool: else: assert template is not None if test_reorgs: - reog_blocks = bt.get_consecutive_blocks(14) + reog_blocks: List[FullBlock] = bt.get_consecutive_blocks(14) for r in range(0, len(reog_blocks), 3): for reorg_block in reog_blocks[:r]: await _validate_and_add_block_no_error(blockchain, reorg_block) @@ -495,10 +495,10 @@ async def test_basic_chain(self, wallet_nodes, self_hostname): assert full_node_1.full_node.blockchain.get_peak().height == 0 - for block in bt.get_consecutive_blocks(30): + for block in bt.get_consecutive_blocks(30, block_list_input=blocks): await full_node_1.full_node.add_block(block, peer) - assert full_node_1.full_node.blockchain.get_peak().height == 29 + assert full_node_1.full_node.blockchain.get_peak().height == 30 @pytest.mark.asyncio async def test_respond_end_of_sub_slot(self, wallet_nodes, self_hostname): diff --git a/tests/wallet/sync/test_wallet_sync.py b/tests/wallet/sync/test_wallet_sync.py index 2342f2793d91..fed106232a33 100644 --- a/tests/wallet/sync/test_wallet_sync.py +++ b/tests/wallet/sync/test_wallet_sync.py @@ -291,7 +291,7 @@ async def test_short_batch_sync_wallet(self, two_wallet_nodes, default_400_block @pytest.mark.limit_consensus_modes(reason="save time") @pytest.mark.asyncio - async def test_long_sync_wallet(self, two_wallet_nodes, default_1000_blocks, default_400_blocks, self_hostname): + async def test_long_sync_wallet(self, two_wallet_nodes, default_400_blocks, bt, self_hostname): full_nodes, wallets, bt = two_wallet_nodes full_node_api = full_nodes[0] full_node_server = full_node_api.full_node.server @@ -311,8 +311,9 @@ async def test_long_sync_wallet(self, two_wallet_nodes, default_1000_blocks, def for wallet_node, wallet_server in wallets: await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_400_blocks) - 1) + reorg_blocks = bt.get_consecutive_blocks(num_blocks=400, block_list_input=default_400_blocks[:10]) # Tests a long reorg - for block in default_1000_blocks: + for block in reorg_blocks: await full_node_api.full_node.add_block(block) for wallet_node, wallet_server in wallets: @@ -321,20 +322,20 @@ async def test_long_sync_wallet(self, two_wallet_nodes, default_1000_blocks, def log.info( f"wallet node height is {await wallet_node.wallet_state_manager.blockchain.get_finished_sync_up_to()}" ) - await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(default_1000_blocks) - 1) + await time_out_assert(600, wallet_height_at_least, True, wallet_node, len(reorg_blocks) - 1) await disconnect_all_and_reconnect(wallet_server, full_node_server, self_hostname) # Tests a short reorg num_blocks = 30 - blocks_reorg = bt.get_consecutive_blocks(num_blocks, block_list_input=default_1000_blocks[:-5]) + blocks_reorg = bt.get_consecutive_blocks(num_blocks, block_list_input=reorg_blocks[:-5]) for i in range(len(blocks_reorg) - num_blocks - 10, len(blocks_reorg)): await full_node_api.full_node.add_block(blocks_reorg[i]) for wallet_node, wallet_server in wallets: await time_out_assert( - 600, wallet_height_at_least, True, wallet_node, len(default_1000_blocks) + num_blocks - 5 - 1 + 600, wallet_height_at_least, True, wallet_node, len(reorg_blocks) + num_blocks - 5 - 1 ) @pytest.mark.limit_consensus_modes(reason="save time")