simplify the interface to mempool_manager.new_peak() (#16787)

simplify the interface to mempool_manager.new_peak() to just take the spent coin IDs rather than full NPCResult objects
This commit is contained in:
Arvid Norberg 2023-11-07 19:40:16 +01:00 committed by GitHub
parent eabeac82b4
commit 64d6fb56a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 28 additions and 143 deletions

View File

@ -8,7 +8,6 @@ from typing import Dict, Optional
from chia_rs import G2Element
from clvm.casts import int_to_bytes
from chia.consensus.cost_calculator import NPCResult
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.full_node.mempool_manager import MempoolManager
from chia.types.blockchain_format.coin import Coin
@ -18,7 +17,6 @@ from chia.types.coin_record import CoinRecord
from chia.types.coin_spend import CoinSpend
from chia.types.condition_opcodes import ConditionOpcode
from chia.types.spend_bundle import SpendBundle
from chia.types.spend_bundle_conditions import Spend, SpendBundleConditions
from chia.util.ints import uint32, uint64
# this is one week worth of blocks
@ -99,45 +97,7 @@ async def run_mempool_benchmark() -> None:
rec = fake_block_record(uint32(height), timestamp)
# the new block spends on coind, the most recently added one
# most_recent_coin_id
npc_result = NPCResult(
None,
SpendBundleConditions(
[
Spend(
most_recent_coin_id,
bytes32(b" " * 32),
bytes32(b" " * 32),
123,
None,
0,
None,
None,
None,
None,
[],
[],
[],
[],
[],
[],
[],
[],
0,
)
],
0,
0,
0,
None,
None,
[],
0,
0,
0,
),
uint64(1000000000),
)
await mempool.new_peak(rec, npc_result)
await mempool.new_peak(rec, [most_recent_coin_id])
# add 10 transactions to the mempool
for i in range(10):

View File

@ -9,7 +9,6 @@ from time import monotonic
from typing import Dict, Iterator, List, Optional, Tuple
from chia.consensus.coinbase import create_farmer_coin, create_pool_coin
from chia.consensus.cost_calculator import NPCResult
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.full_node.mempool_manager import MempoolManager
from chia.simulator.wallet_tools import WalletTool
@ -18,7 +17,6 @@ from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_record import CoinRecord
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.types.spend_bundle import SpendBundle
from chia.types.spend_bundle_conditions import Spend, SpendBundleConditions
from chia.util.ints import uint32, uint64
from chia.util.misc import to_batches
@ -230,55 +228,17 @@ async def run_mempool_benchmark() -> None:
print(f" per call: {(stop - start) / 500 * 1000:0.2f}ms")
print("\nProfiling new_peak() (optimized)")
blocks: List[Tuple[BenchBlockRecord, NPCResult]] = []
blocks: List[Tuple[BenchBlockRecord, List[bytes32]]] = []
for coin_id in all_coins.keys():
height = uint32(height + 1)
timestamp = uint64(timestamp + 19)
rec = fake_block_record(height, timestamp)
npc_result = NPCResult(
None,
SpendBundleConditions(
[
Spend(
coin_id,
bytes32(b" " * 32),
bytes32(b" " * 32),
123,
None,
None,
None,
None,
None,
None,
[],
[],
[],
[],
[],
[],
[],
[],
0,
)
],
0,
0,
0,
None,
None,
[],
0,
0,
0,
),
uint64(1000000000),
)
blocks.append((rec, npc_result))
blocks.append((rec, [coin_id]))
with enable_profiler(True, f"new-peak-{suffix}"):
start = monotonic()
for rec, npc_result in blocks:
await mempool.new_peak(rec, npc_result)
for rec, spends in blocks:
await mempool.new_peak(rec, spends)
stop = monotonic()
print(f" time: {stop - start:0.4f}s")
print(f" per call: {(stop - start) / len(blocks) * 1000:0.2f}ms")
@ -289,50 +249,12 @@ async def run_mempool_benchmark() -> None:
height = uint32(height + 2)
timestamp = uint64(timestamp + 28)
rec = fake_block_record(height, timestamp)
npc_result = NPCResult(
None,
SpendBundleConditions(
[
Spend(
coin_id,
bytes32(b" " * 32),
bytes32(b" " * 32),
123,
None,
None,
None,
None,
None,
None,
[],
[],
[],
[],
[],
[],
[],
[],
0,
)
],
0,
0,
0,
None,
None,
[],
0,
0,
0,
),
uint64(1000000000),
)
blocks.append((rec, npc_result))
blocks.append((rec, [coin_id]))
with enable_profiler(True, f"new-peak-reorg-{suffix}"):
start = monotonic()
for rec, npc_result in blocks:
await mempool.new_peak(rec, npc_result)
for rec, spends in blocks:
await mempool.new_peak(rec, spends)
stop = monotonic()
print(f" time: {stop - start:0.4f}s")
print(f" per call: {(stop - start) / len(blocks) * 1000:0.2f}ms")

View File

@ -1466,9 +1466,12 @@ class FullNode:
)
# Update the mempool (returns successful pending transactions added to the mempool)
spent_coins: Optional[List[bytes32]] = None
new_npc_results: List[NPCResult] = state_change_summary.new_npc_results
if len(new_npc_results) > 0 and new_npc_results[-1].conds is not None:
spent_coins = [bytes32(s.coin_id) for s in new_npc_results[-1].conds.spends]
mempool_new_peak_result: List[Tuple[SpendBundle, NPCResult, bytes32]] = await self.mempool_manager.new_peak(
self.blockchain.get_peak(), new_npc_results[-1] if len(new_npc_results) > 0 else None
self.blockchain.get_peak(), spent_coins
)
# Check if we detected a spent transaction, to load up our generator cache

View File

@ -578,7 +578,7 @@ class MempoolManager:
return item
async def new_peak(
self, new_peak: Optional[BlockRecordProtocol], last_npc_result: Optional[NPCResult]
self, new_peak: Optional[BlockRecordProtocol], spent_coins: Optional[List[bytes32]]
) -> List[Tuple[SpendBundle, NPCResult, bytes32]]:
"""
Called when a new peak is available, we try to recreate a mempool for the new tip.
@ -599,21 +599,20 @@ class MempoolManager:
use_optimization: bool = self.peak is not None and new_peak.prev_transaction_block_hash == self.peak.header_hash
self.peak = new_peak
if use_optimization and last_npc_result is not None:
if use_optimization and spent_coins is not None:
# We don't reinitialize a mempool, just kick removed items
if last_npc_result.conds is not None:
# transactions in the mempool may be spending multiple coins,
# when looking up transactions by all coin IDs, we're likely to
# find the same transaction multiple times. We put them in a set
# to deduplicate
spendbundle_ids_to_remove: Set[bytes32] = set()
for spend in last_npc_result.conds.spends:
items: List[MempoolItem] = self.mempool.get_items_by_coin_id(bytes32(spend.coin_id))
for item in items:
included_items.append(MempoolItemInfo(item.cost, item.fee, item.height_added_to_mempool))
self.remove_seen(item.name)
spendbundle_ids_to_remove.add(item.name)
self.mempool.remove_from_pool(list(spendbundle_ids_to_remove), MempoolRemoveReason.BLOCK_INCLUSION)
# transactions in the mempool may be spending multiple coins,
# when looking up transactions by all coin IDs, we're likely to
# find the same transaction multiple times. We put them in a set
# to deduplicate
spendbundle_ids_to_remove: Set[bytes32] = set()
for spend in spent_coins:
items: List[MempoolItem] = self.mempool.get_items_by_coin_id(spend)
for item in items:
included_items.append(MempoolItemInfo(item.cost, item.fee, item.height_added_to_mempool))
self.remove_seen(item.name)
spendbundle_ids_to_remove.add(item.name)
self.mempool.remove_from_pool(list(spendbundle_ids_to_remove), MempoolRemoveReason.BLOCK_INCLUSION)
else:
old_pool = self.mempool
self.mempool = Mempool(old_pool.mempool_info, old_pool.fee_estimator)

View File

@ -1368,7 +1368,8 @@ async def test_coin_spending_different_ways_then_finding_it_spent_in_new_peak(ne
test_coin_records = {coin_id: CoinRecord(coin, uint32(0), TEST_HEIGHT, False, uint64(0))}
block_record = create_test_block_record(height=new_height)
npc_result = NPCResult(None, make_test_conds(spend_ids=[coin_id]), uint64(0))
await mempool_manager.new_peak(block_record, npc_result)
assert npc_result.conds is not None
await mempool_manager.new_peak(block_record, [bytes32(s.coin_id) for s in npc_result.conds.spends])
# As the coin was a spend in all the mempool items we had, nothing should be left now
assert len(mempool_manager.mempool.get_items_by_coin_id(coin_id)) == 0
assert mempool_manager.mempool.size() == 0