Ms.mempool improvements (#1823)

* Remove overflow from list, and remove useless call to handle_eos

* Unindent

* Changes to mempool

* tests

* progress on tests

* Add tests for new mempool

* Fix lint and revert streamable changes

* Improve logging

* Test level warning

* Fix test

* Increase mempool size to 150x
This commit is contained in:
Mariano Sorgente 2021-04-14 13:19:12 +09:00 committed by GitHub
parent 9ea6399280
commit d2466ee822
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 224 additions and 873 deletions

View File

@ -37,22 +37,13 @@ class ConsensusConstants:
GENESIS_PRE_FARM_POOL_PUZZLE_HASH: bytes32 # The block at height must pay out to this pool puzzle hash
GENESIS_PRE_FARM_FARMER_PUZZLE_HASH: bytes32 # The block at height must pay out to this farmer puzzle hash
MAX_VDF_WITNESS_SIZE: int # The maximum number of classgroup elements within an n-wesolowski proof
# Target tx count per sec
TX_PER_SEC: int
# Size of mempool = 10x the size of block
MEMPOOL_BLOCK_BUFFER: int
# Max coin amount uint(1 << 64). This allows coin amounts to fit in 64 bits. This is around 18M chia.
MAX_COIN_AMOUNT: int
# Raw size per block target = 1,000,000 bytes
# Rax TX (single in, single out) = 219 bytes (not compressed)
# TX = 457 vBytes
# floor(1,000,000 / 219) * 457 = 2086662 (size in vBytes)
# Max block cost in virtual bytes
MAX_BLOCK_COST: int
# MAX block cost in clvm cost units = MAX_BLOCK_COST * CLVM_COST_RATIO_CONSTANT
# 1 vByte = 108 clvm cost units
CLVM_COST_RATIO_CONSTANT: int
# Max block cost in clvm cost units (MAX_BLOCK_COST * CLVM_COST_RATIO_CONSTANT)
# Max block cost in clvm cost units
MAX_BLOCK_COST_CLVM: int
WEIGHT_PROOF_THRESHOLD: uint8

View File

@ -36,24 +36,13 @@ testnet_kwargs = {
"3d8765d3a597ec1d99663f6c9816d915b9f68613ac94009884c4addaefcce6af"
),
"MAX_VDF_WITNESS_SIZE": 64,
# Target tx count per sec
"TX_PER_SEC": 20,
# Size of mempool = 10x the size of block
"MEMPOOL_BLOCK_BUFFER": 10,
# Size of mempool = 150x the size of block
"MEMPOOL_BLOCK_BUFFER": 150,
# Max coin amount, fits into 64 bits
"MAX_COIN_AMOUNT": uint64((1 << 64) - 1),
# Targeting twice bitcoin's block size of 1.3MB per block
# Raw size per block target = 1,300,000 * 600 / 47 = approx 100 KB
# Rax TX (single in, single out) = 219 bytes (not compressed)
# TX = 457 vBytes
# floor(100 * 1024 / 219) * 457 = 213684 (size in vBytes)
# Max block cost in virtual bytes
"MAX_BLOCK_COST": 213684,
# MAX block cost in clvm cost units = MAX_BLOCK_COST * CLVM_COST_RATIO_CONSTANT
# 1 vByte = 108 clvm cost units
"CLVM_COST_RATIO_CONSTANT": 108,
# Max block cost in clvm cost units (MAX_BLOCK_COST * CLVM_COST_RATIO_CONSTANT)
# "MAX_BLOCK_COST_CLVM": 23077872,
# Max block cost in clvm cost units
"MAX_BLOCK_COST_CLVM": 40000000, # Based on arvid analysis
"WEIGHT_PROOF_THRESHOLD": 2,
"BLOCKS_CACHE_SIZE": 4608 + (128 * 4),

View File

@ -44,6 +44,7 @@ from chia.types.header_block import HeaderBlock
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
from chia.types.spend_bundle import SpendBundle
from chia.types.unfinished_block import UnfinishedBlock
from chia.util.bech32m import encode_puzzle_hash
from chia.util.db_wrapper import DBWrapper
from chia.util.errors import ConsensusError, Err
from chia.util.ints import uint8, uint32, uint64, uint128
@ -88,6 +89,7 @@ class FullNode:
self.state_changed_callback: Optional[Callable] = None
self.full_node_peers = None
self.sync_store = None
self.signage_point_times = [time.time() for _ in range(self.constants.NUM_SPS_SUB_SLOT)]
if name:
self.log = logging.getLogger(name)
@ -1095,9 +1097,16 @@ class FullNode:
self.full_node_store.add_unfinished_block(height, block, validate_result)
if farmed_block is True:
self.log.info(f"🍀 Farmed unfinished_block {block_hash}")
self.log.info(
f"🍀 Farmed unfinished_block {block_hash}, SP: {block.reward_chain_block.signage_point_index}"
)
else:
self.log.info(f"Added unfinished_block {block_hash}, not farmed")
self.log.info(
f"Added unfinished_block {block_hash}, not farmed by us,"
f" SP: {block.reward_chain_block.signage_point_index} time: "
f"{time.time() - self.signage_point_times[block.reward_chain_block.signage_point_index]}"
f"Pool pk {encode_puzzle_hash(block.foliage.foliage_block_data.pool_target.puzzle_hash, 'xch')}"
)
sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
self.constants,

View File

@ -494,6 +494,7 @@ class FullNodeAPI:
f"{self.full_node.constants.NUM_SPS_SUB_SLOT}: "
f"{request.challenge_chain_vdf.output.get_hash()} "
)
self.full_node.signage_point_times[request.index_from_challenge] = time.time()
sub_slot_tuple = self.full_node.full_node_store.get_sub_slot(request.challenge_chain_vdf.challenge)
if sub_slot_tuple is not None:
prev_challenge = sub_slot_tuple[0].challenge_chain.challenge_chain_end_of_slot_vdf.challenge

View File

@ -8,31 +8,39 @@ from chia.types.mempool_item import MempoolItem
class Mempool:
spends: Dict[bytes32, MempoolItem]
sorted_spends: SortedDict # Dict[float, Dict[bytes32, MempoolItem]]
additions: Dict[bytes32, MempoolItem]
removals: Dict[bytes32, MempoolItem]
size: int
def __init__(self, max_size_in_cost: int):
self.spends: Dict[bytes32, MempoolItem] = {}
self.sorted_spends: SortedDict = SortedDict()
self.additions: Dict[bytes32, MempoolItem] = {}
self.removals: Dict[bytes32, MempoolItem] = {}
self.max_size_in_cost: int = max_size_in_cost
self.total_mempool_cost: int = 0
# if new min fee is added
@staticmethod
def create(size: int):
self = Mempool()
self.spends = {}
self.additions = {}
self.removals = {}
self.sorted_spends = SortedDict()
self.size = size
return self
def get_min_fee_rate(self, cost: int) -> float:
"""
Gets the minimum fpc rate that a transaction with specified cost will need in order to get included.
"""
def get_min_fee_rate(self) -> float:
if self.at_full_capacity():
fee_per_cost, val = self.sorted_spends.peekitem(index=0)
return fee_per_cost
if self.at_full_capacity(cost):
current_cost = self.total_mempool_cost
# Iterates through all spends in increasing fee per cost
for fee_per_cost, spends_with_fpc in self.sorted_spends.items():
for spend_name, item in spends_with_fpc.items():
current_cost -= item.cost_result.cost
# Removing one at a time, until our transaction of size cost fits
if current_cost + cost <= self.max_size_in_cost:
return fee_per_cost
raise ValueError(
f"Transaction with cost {cost} does not fit in mempool of max cost {self.max_size_in_cost}"
)
else:
return 0
def remove_spend(self, item: MempoolItem):
def remove_from_pool(self, item: MempoolItem):
"""
Removes an item from the mempool.
"""
removals: List[Coin] = item.spend_bundle.removals()
additions: List[Coin] = item.spend_bundle.additions()
for rem in removals:
@ -44,6 +52,8 @@ class Mempool:
dic = self.sorted_spends[item.fee_per_cost]
if len(dic.values()) == 0:
del self.sorted_spends[item.fee_per_cost]
self.total_mempool_cost -= item.cost_result.cost
assert self.total_mempool_cost >= 0
def add_to_pool(
self,
@ -51,25 +61,33 @@ class Mempool:
additions: List[Coin],
removals_dic: Dict[bytes32, Coin],
):
if self.at_full_capacity():
"""
Adds an item to the mempool by kicking out transactions (if it doesn't fit), in order of increasing fee per cost
"""
while self.at_full_capacity(item.cost_result.cost):
# Val is Dict[hash, MempoolItem]
fee_per_cost, val = self.sorted_spends.peekitem(index=0)
to_remove = list(val.values())[0]
self.remove_spend(to_remove)
self.remove_from_pool(to_remove)
self.spends[item.name] = item
# sorted_spends is Dict[float, Dict[bytes32, MempoolItem]]
if item.fee_per_cost in self.sorted_spends:
self.sorted_spends[item.fee_per_cost][item.name] = item
else:
if item.fee_per_cost not in self.sorted_spends:
self.sorted_spends[item.fee_per_cost] = {}
self.sorted_spends[item.fee_per_cost][item.name] = item
self.sorted_spends[item.fee_per_cost][item.name] = item
for add in additions:
self.additions[add.name()] = item
for key in removals_dic.keys():
self.removals[key] = item
self.total_mempool_cost += item.cost_result.cost
def at_full_capacity(self) -> bool:
return len(self.spends.keys()) >= self.size
def at_full_capacity(self, cost: int) -> bool:
"""
Checks whether the mempool is at full capacity and cannot accept a transaction with size cost.
"""
return self.total_mempool_cost + cost > self.max_size_in_cost

View File

@ -57,19 +57,17 @@ class MempoolManager:
self.coin_store = coin_store
tx_per_sec = self.constants.TX_PER_SEC
sec_per_block = self.constants.SUB_SLOT_TIME_TARGET // self.constants.SLOT_BLOCKS_TARGET
block_buffer_count = self.constants.MEMPOOL_BLOCK_BUFFER
# MEMPOOL_SIZE = 60000
self.mempool_size = int(tx_per_sec * sec_per_block * block_buffer_count)
self.potential_cache_size = 300
self.mempool_max_total_cost = int(self.constants.MAX_BLOCK_COST_CLVM * self.constants.MEMPOOL_BLOCK_BUFFER)
self.potential_cache_max_total_cost = int(
self.constants.MAX_BLOCK_COST_CLVM * self.constants.MEMPOOL_BLOCK_BUFFER
)
self.potential_cache_cost: int = 0
self.seen_cache_size = 10000
self.pool = ProcessPoolExecutor(max_workers=1)
# The mempool will correspond to a certain peak
self.peak: Optional[BlockRecord] = None
self.mempool: Mempool = Mempool.create(self.mempool_size)
self.mempool: Mempool = Mempool(self.mempool_max_total_cost)
def shut_down(self):
self.pool.shutdown(wait=True)
@ -93,8 +91,13 @@ class MempoolManager:
spend_bundles: List[SpendBundle] = []
removals = []
additions = []
broke_from_inner_loop = False
log.info(f"Starting to make block, max cost: {self.constants.MAX_BLOCK_COST_CLVM}")
for dic in self.mempool.sorted_spends.values():
if broke_from_inner_loop:
break
for item in dic.values():
log.info(f"Cumulative cost: {cost_sum}")
if (
item.cost_result.cost + cost_sum <= self.constants.MAX_BLOCK_COST_CLVM
and item.fee + fee_sum <= self.constants.MAX_COIN_AMOUNT
@ -105,6 +108,7 @@ class MempoolManager:
removals.extend(item.removals)
additions.extend(item.additions)
else:
broke_from_inner_loop = True
break
if len(spend_bundles) > 0:
return SpendBundle.aggregate(spend_bundles), additions, removals
@ -130,7 +134,7 @@ class MempoolManager:
if cost == 0:
return False
fees_per_cost = fees / cost
if not self.mempool.at_full_capacity() or fees_per_cost >= self.mempool.get_min_fee_rate():
if not self.mempool.at_full_capacity(cost) or fees_per_cost > self.mempool.get_min_fee_rate(cost):
return True
return False
@ -272,10 +276,10 @@ class MempoolManager:
fees_per_cost: float = fees / cost
# If pool is at capacity check the fee, if not then accept even without the fee
if self.mempool.at_full_capacity():
if self.mempool.at_full_capacity(cost):
if fees == 0:
return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE
if fees_per_cost < self.mempool.get_min_fee_rate():
if fees_per_cost <= self.mempool.get_min_fee_rate(cost):
return None, MempoolInclusionStatus.FAILED, Err.INVALID_FEE_LOW_FEE
# Check removals against UnspentDB + DiffStore + Mempool + SpendBundle
# Use this information later when constructing a block
@ -344,7 +348,7 @@ class MempoolManager:
if fail_reason:
mempool_item: MempoolItem
for mempool_item in conflicting_pool_items.values():
self.mempool.remove_spend(mempool_item)
self.mempool.remove_from_pool(mempool_item)
removals: List[Coin] = [coin for coin in removal_coin_dict.values()]
new_item = MempoolItem(new_spend, uint64(fees), cost_result, spend_name, additions, removals)
@ -381,10 +385,15 @@ class MempoolManager:
Adds SpendBundles that have failed to be added to the pool in potential tx set.
This is later used to retry to add them.
"""
self.potential_txs[spend_name] = spend, cost_result, spend_name
if spend_name in self.potential_txs:
return
while len(self.potential_txs) > self.potential_cache_size:
self.potential_txs[spend_name] = spend, cost_result, spend_name
self.potential_cache_cost += cost_result.cost
while self.potential_cache_cost > self.potential_cache_max_total_cost:
first_in = list(self.potential_txs.keys())[0]
self.potential_cache_max_total_cost -= self.potential_txs[first_in][1].cost
self.potential_txs.pop(first_in)
def get_spendbundle(self, bundle_hash: bytes32) -> Optional[SpendBundle]:
@ -413,7 +422,7 @@ class MempoolManager:
self.peak = new_peak
old_pool = self.mempool
self.mempool = Mempool.create(self.mempool_size)
self.mempool = Mempool(self.mempool_max_total_cost)
for item in old_pool.spends.values():
await self.add_spendbundle(item.spend_bundle, item.cost_result, item.spend_bundle_name, False)
@ -426,7 +435,8 @@ class MempoolManager:
if status == MempoolInclusionStatus.SUCCESS:
txs_added.append((tx, cached_result, cached_name))
log.debug(
f"Size of mempool: {len(self.mempool.spends)}, minimum fee to get in: {self.mempool.get_min_fee_rate()}"
f"Size of mempool: {len(self.mempool.spends)} spends, cost: {self.mempool.total_mempool_cost} "
f"minimum fee to get in: {self.mempool.get_min_fee_rate(100000)}"
)
return txs_added

View File

@ -62,6 +62,7 @@ class TimelordAPI:
last_ip_iters = self.timelord.last_state.get_last_ip()
if sp_iters > ip_iters:
self.timelord.overflow_blocks.append(new_unfinished_block)
log.warning(f"Overflow unfinished block, total {self.timelord.total_unfinished}")
elif ip_iters > last_ip_iters:
new_block_iters: Optional[uint64] = self.timelord._can_infuse_unfinished_block(new_unfinished_block)
if new_block_iters:
@ -72,6 +73,7 @@ class TimelordAPI:
self.timelord.iters_to_submit[Chain.INFUSED_CHALLENGE_CHAIN].append(new_block_iters)
self.timelord.iteration_to_proof_type[new_block_iters] = IterationType.INFUSION_POINT
self.timelord.total_unfinished += 1
log.warning(f"Non-overflow unfinished block, total {self.timelord.total_unfinished}")
@api_request
async def request_compact_proof_of_time(self, vdf_info: timelord_protocol.RequestCompactProofOfTime):

View File

@ -75,7 +75,6 @@ class LastState:
self.passed_ses_height_but_not_yet_included = True
else:
self.passed_ses_height_but_not_yet_included = state.passes_ses_height_but_not_yet_included
log.warning(f"Signage point index: {self.peak.reward_chain_block.signage_point_index}")
elif isinstance(state, EndOfSubSlotBundle):
self.state_type = StateType.END_OF_SUB_SLOT
if self.peak is not None:

View File

@ -88,7 +88,6 @@ test_constants = DEFAULT_CONSTANTS.replace(
* 24
* 10, # Allows creating blockchains with timestamps up to 10 days in the future, for testing
"MEMPOOL_BLOCK_BUFFER": 6,
"TX_PER_SEC": 1,
"CLVM_COST_RATIO_CONSTANT": 108,
"INITIAL_FREEZE_PERIOD": 0,
"NETWORK_TYPE": 1,

View File

@ -3,7 +3,7 @@
python3 -m venv venv
# shellcheck disable=SC1091
. ./activate
pip3 install .
pip3 install ".[dev]"
py.test ./tests/blockchain -s -v
py.test ./tests/core -s -v

View File

@ -15,6 +15,7 @@ from chia.protocols import full_node_protocol as fnp
from chia.protocols import timelord_protocol
from chia.protocols.protocol_message_types import ProtocolMessageTypes
from chia.server.address_manager import AddressManager
from chia.simulator.simulator_protocol import FarmNewBlockProtocol
from chia.types.blockchain_format.classgroup import ClassgroupElement
from chia.types.blockchain_format.program import SerializedProgram
from chia.types.blockchain_format.vdf import CompressibleVDFField, VDFProof
@ -59,7 +60,7 @@ def event_loop():
@pytest.fixture(scope="module")
async def wallet_nodes():
async_gen = setup_simulators_and_wallets(2, 1, {})
async_gen = setup_simulators_and_wallets(2, 1, {"MEMPOOL_BLOCK_BUFFER": 2, "MAX_BLOCK_COST_CLVM": 4000000})
nodes, wallets = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
@ -425,13 +426,13 @@ class TestFullNodeProtocol:
wallet_ph = wallet_a.get_new_puzzlehash()
blocks = bt.get_consecutive_blocks(
3,
10,
block_list_input=blocks,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=wallet_ph,
pool_reward_puzzle_hash=wallet_ph,
)
for block in blocks[-3:]:
for block in blocks:
await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
start_height = (
@ -439,61 +440,72 @@ class TestFullNodeProtocol:
if full_node_1.full_node.blockchain.get_peak() is not None
else -1
)
conditions_dict: Dict = {ConditionOpcode.CREATE_COIN: []}
peer = await connect_and_get_peer(server_1, server_2)
# Mempool has capacity of 100, make 110 unspents that we can use
puzzle_hashes = []
tx_per_sec = bt.constants.TX_PER_SEC
sec_per_block = bt.constants.SUB_SLOT_TIME_TARGET // bt.constants.SLOT_BLOCKS_TARGET
block_buffer_count = bt.constants.MEMPOOL_BLOCK_BUFFER
mempool_size = int(tx_per_sec * sec_per_block * block_buffer_count)
block_buffer_count = full_node_1.full_node.constants.MEMPOOL_BLOCK_BUFFER
for _ in range(mempool_size + 1):
receiver_puzzlehash = wallet_receiver.get_new_puzzlehash()
puzzle_hashes.append(receiver_puzzlehash)
output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [receiver_puzzlehash, int_to_bytes(1000)])
conditions_dict[ConditionOpcode.CREATE_COIN].append(output)
# Makes a bunch of coins
for i in range(5):
conditions_dict: Dict = {ConditionOpcode.CREATE_COIN: []}
# This should fit in one transaction
for _ in range(100):
receiver_puzzlehash = wallet_receiver.get_new_puzzlehash()
puzzle_hashes.append(receiver_puzzlehash)
output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [receiver_puzzlehash, int_to_bytes(10000000)])
spend_bundle = wallet_a.generate_signed_transaction(
100,
puzzle_hashes[0],
get_future_reward_coins(blocks[1])[0],
condition_dic=conditions_dict,
)
assert spend_bundle is not None
conditions_dict[ConditionOpcode.CREATE_COIN].append(output)
new_transaction = fnp.NewTransaction(spend_bundle.get_hash(), uint64(100), uint64(100))
spend_bundle = wallet_a.generate_signed_transaction(
100,
puzzle_hashes[0],
get_future_reward_coins(blocks[1 + i])[0],
condition_dic=conditions_dict,
)
assert spend_bundle is not None
cost_result = await full_node_1.full_node.mempool_manager.pre_validate_spendbundle(spend_bundle)
log.info(f"Cost result: {cost_result.cost}")
msg = await full_node_1.new_transaction(new_transaction)
assert msg.data == bytes(fnp.RequestTransaction(spend_bundle.get_hash()))
new_transaction = fnp.NewTransaction(spend_bundle.get_hash(), uint64(100), uint64(100))
respond_transaction_2 = fnp.RespondTransaction(spend_bundle)
await full_node_1.respond_transaction(respond_transaction_2, peer)
msg = await full_node_1.new_transaction(new_transaction)
assert msg.data == bytes(fnp.RequestTransaction(spend_bundle.get_hash()))
blocks_new = bt.get_consecutive_blocks(
2,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
respond_transaction_2 = fnp.RespondTransaction(spend_bundle)
await full_node_1.respond_transaction(respond_transaction_2, peer)
# Already seen
msg = await full_node_1.new_transaction(new_transaction)
assert msg is None
# Farm one block
for block in blocks_new[-2:]:
await full_node_1.full_node.respond_block(fnp.RespondBlock(block), peer)
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
guarantee_transaction_block=True,
transaction_data=spend_bundle,
)
await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks[-1]), peer)
await time_out_assert(10, node_height_at_least, True, full_node_1, start_height + 2)
# Already seen
msg = await full_node_1.new_transaction(new_transaction)
assert msg is None
await time_out_assert(10, node_height_at_least, True, full_node_1, start_height + 5)
spend_bundles = []
included_tx = 0
not_included_tx = 0
seen_bigger_transaction_has_high_fee = False
# Fill mempool
for puzzle_hash in puzzle_hashes[1:]:
coin_record = (await full_node_1.full_node.coin_store.get_coin_records_by_puzzle_hash(True, puzzle_hash))[0]
receiver_puzzlehash = wallet_receiver.get_new_puzzlehash()
fee = random.randint(2, 499)
if puzzle_hash == puzzle_hashes[-1]:
force_high_fee = True
fee = 10000000 # 10 million
else:
force_high_fee = False
fee = random.randint(1, 10000000)
spend_bundle = wallet_receiver.generate_signed_transaction(
uint64(500), receiver_puzzlehash, coin_record.coin, fee=fee
)
@ -502,23 +514,39 @@ class TestFullNodeProtocol:
request = fnp.RequestTransaction(spend_bundle.get_hash())
req = await full_node_1.request_transaction(request)
if req.data == bytes(fnp.RespondTransaction(spend_bundle)):
fee_rate_for_small = full_node_1.full_node.mempool_manager.mempool.get_min_fee_rate(10)
fee_rate_for_med = full_node_1.full_node.mempool_manager.mempool.get_min_fee_rate(50000)
fee_rate_for_large = full_node_1.full_node.mempool_manager.mempool.get_min_fee_rate(500000)
log.info(f"Min fee rate (10): {fee_rate_for_small}")
log.info(f"Min fee rate (50000): {fee_rate_for_med}")
log.info(f"Min fee rate (500000): {fee_rate_for_large}")
if fee_rate_for_large > fee_rate_for_med:
seen_bigger_transaction_has_high_fee = True
if req is not None and req.data == bytes(fnp.RespondTransaction(spend_bundle)):
included_tx += 1
spend_bundles.append(spend_bundle)
assert not full_node_1.full_node.mempool_manager.mempool.at_full_capacity(0)
assert full_node_1.full_node.mempool_manager.mempool.get_min_fee_rate(0) == 0
else:
assert full_node_1.full_node.mempool_manager.mempool.at_full_capacity(133000)
assert full_node_1.full_node.mempool_manager.mempool.get_min_fee_rate(133000) > 0
assert not force_high_fee
not_included_tx += 1
log.info(f"Included: {included_tx}, not included: {not_included_tx}")
assert included_tx > 0
assert not_included_tx > 0
assert seen_bigger_transaction_has_high_fee
# Mempool is full
new_transaction = fnp.NewTransaction(token_bytes(32), uint64(1000000), uint64(1))
msg = await full_node_1.new_transaction(new_transaction)
assert msg is None
agg_bundle: SpendBundle = SpendBundle.aggregate(spend_bundles)
blocks_new = bt.get_consecutive_blocks(
1,
block_list_input=blocks_new,
transaction_data=agg_bundle,
guarantee_transaction_block=True,
)
# Farm one block to clear mempool
await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-1]), peer)
await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(receiver_puzzlehash))
# No longer full
new_transaction = fnp.NewTransaction(token_bytes(32), uint64(1000000), uint64(1))
@ -1267,729 +1295,3 @@ class TestFullNodeProtocol:
if block.challenge_chain_sp_proof is not None:
assert not block.challenge_chain_sp_proof.normalized_to_identity
assert not block.challenge_chain_ip_proof.normalized_to_identity
#
# async def test_new_unfinished(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# blocks_list = await get_block_path(full_node_1.full_node)
#
# blocks_new = bt.get_consecutive_blocks(
# 1,
# block_list_input=blocks_list,
# seed=b"another seed 2",
# )
# block = blocks_new[-1].
# assert blocks_new[-1].proof_of_time is not None
# assert blocks_new[-2].proof_of_time is not None
# already_have = fnp.NewUnfinishedBlock(
# blocks_new[-2].prev_header_hash,
# blocks_new[-2].proof_of_time.number_of_iterations,
# blocks_new[-2].header_hash,
# )
# res = await full_node_1.new_unfinished_block(already_have)
# assert res is None
#
# bad_prev = fnp.NewUnfinishedBlock(
# blocks_new[-1].header_hash,
# blocks_new[-1].proof_of_time.number_of_iterations,
# blocks_new[-1].header_hash,
# )
#
# res = await full_node_1.new_unfinished_block(bad_prev)
# assert res is None
# good = fnp.NewUnfinishedBlock(
# blocks_new[-1].prev_header_hash,
# blocks_new[-1].proof_of_time.number_of_iterations,
# blocks_new[-1].header_hash,
# )
# res = full_node_1.new_unfinished_block(good)
# assert res is not None
#
# unf_block = FullBlock(
# blocks_new[-1].proof_of_space,
# None,
# blocks_new[-1].header,
# blocks_new[-1].transactions_generator,
# blocks_new[-1].transactions_filter,
# )
# unf_block_req = fnp.RespondUnfinishedBlock(unf_block)
# await full_node_1.respond_unfinished_block(unf_block_req)
#
# res = await full_node_1.new_unfinished_block(good)
# assert res is None
#
# @pytest.mark.asyncio
# async def test_request_unfinished(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# blocks_list = await get_block_path(full_node_1.full_node)
#
# blocks_new = bt.get_consecutive_blocks(
# 2,
# blocks_list,
# 10,
# seed=b"another seed 3",
# )
# # Add one block
# await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-2]))
#
# unf_block = FullBlock(
# blocks_new[-1].proof_of_space,
# None,
# blocks_new[-1].header,
# blocks_new[-1].transactions_generator,
# blocks_new[-1].transactions_filter,
# )
# unf_block_req = fnp.RespondUnfinishedBlock(unf_block)
#
# # Don't have
# req = fnp.RequestUnfinishedBlock(unf_block.header_hash)
# res = await full_node_1.request_unfinished_block(req)
# assert res is not None
# assert res.data == fnp.RejectUnfinishedBlockRequest(unf_block.header_hash)
# # Have unfinished block
# await full_node_1.respond_unfinished_block(unf_block_req)
# res = await full_node_1.request_unfinished_block(req)
# assert res is not None
# assert res.data == fnp.RespondUnfinishedBlock(unf_block)
#
# # Have full block (genesis in this case)
# req = fnp.RequestUnfinishedBlock(blocks_new[0].header_hash)
# res = await full_node_1.request_unfinished_block(req)
# assert res is not None
# assert res.data.block.header_hash == blocks_new[0].header_hash
#
# @pytest.mark.asyncio
# async def test_respond_unfinished(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# blocks_list = await get_block_path(full_node_1.full_node)
#
# blocks_new = bt.get_consecutive_blocks(
# 1,
# blocks_list[:],
# 4,
# seed=b"Another seed 4",
# )
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# candidates = []
# for i in range(50):
# blocks_new_2 = bt.get_consecutive_blocks(
# 1,
# blocks_new[:],
# 4,
# seed=i.to_bytes(4, "big") + b"Another seed",
# )
# candidates.append(blocks_new_2[-1])
#
# unf_block_not_child = FullBlock(
# blocks_new[-7].proof_of_space,
# None,
# blocks_new[-7].header,
# blocks_new[-7].transactions_generator,
# blocks_new[-7].transactions_filter,
# )
#
# unf_block_req_bad = fnp.RespondUnfinishedBlock(unf_block_not_child)
# res = await full_node_1.respond_unfinished_block(unf_block_req_bad)
# assert res is None
#
# candidates = sorted(candidates, key=lambda c: c.proof_of_time.number_of_iterations) # type: ignore
#
# def get_cand(index: int):
# unf_block = FullBlock(
# candidates[index].proof_of_space,
# None,
# candidates[index].header,
# candidates[index].transactions_generator,
# candidates[index].transactions_filter,
# )
# return fnp.RespondUnfinishedBlock(unf_block)
#
# # Highest height should propagate
# # Slow block should delay prop
# start = time.time()
# await full_node_1.respond_unfinished_block(get_cand(20))
#
# # Already seen
# res = await full_node_1.respond_unfinished_block(get_cand(20))
# assert res is None
#
# # Slow equal height should not propagate
# res = await full_node_1.respond_unfinished_block(get_cand(49))
# assert res is None
#
# # Fastest equal height should propagate
# start = time.time()
# await full_node_1.respond_unfinished_block(get_cand(0))
# assert time.time() - start < 3
#
# # Equal height (fast) should propagate
# for i in range(1, 5):
# # Checks a few blocks in case they have the same PoS
# if candidates[i].proof_of_space.get_hash() != candidates[0].proof_of_space.get_hash():
# start = time.time()
# await full_node_1.respond_unfinished_block(get_cand(i))
# assert time.time() - start < 3
# break
#
# await full_node_1.respond_unfinished_block(get_cand(40))
#
# # Don't propagate at old height
# await full_node_1.full_node.respond_block(fnp.RespondBlock(candidates[0]))
# blocks_new_3 = bt.get_consecutive_blocks(
# 1,
# blocks_new[:] + [candidates[0]],
# 10,
# )
# unf_block_new = FullBlock(
# blocks_new_3[-1].proof_of_space,
# None,
# blocks_new_3[-1].header,
# blocks_new_3[-1].transactions_generator,
# blocks_new_3[-1].transactions_filter,
# )
#
# unf_block_new_req = fnp.RespondUnfinishedBlock(unf_block_new)
# await full_node_1.respond_unfinished_block(unf_block_new_req)
# await full_node_1.respond_unfinished_block(get_cand(10))
#
# @pytest.mark.asyncio
# async def test_request_all_header_hashes(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
# tips = full_node_1.full_node.blockchain.get_current_tips()
# request = fnp.RequestAllHeaderHashes(tips[0].header_hash)
# res = await full_node_1.request_all_header_hashes(request)
# assert res is not None
# assert len(res.data.header_hashes) > 0
#
# @pytest.mark.asyncio
# async def test_request_block(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# res = await full_node_1.request_header_block(fnp.RequestHeaderBlock(uint32(1), blocks[1].header_hash))
# assert res is not None
# assert res.data.header_block.header_hash == blocks[1].header_hash
#
# res = await full_node_1.request_header_block(fnp.RequestHeaderBlock(uint32(1), blocks[2].header_hash))
# assert res is not None
# assert res.data == fnp.RejectHeaderBlockRequest(uint32(1), blocks[2].header_hash)
#
# res = await full_node_1.request_header_block(fnp.RequestHeaderBlock(uint32(1), bytes([0] * 32)))
# assert res is not None
# assert res.data == fnp.RejectHeaderBlockRequest(uint32(1), bytes([0] * 32))
#
# # Full blocks
# res = await full_node_1.request_block(fnp.RequestBlock(uint32(1), blocks[1].header_hash))
# assert res is not None
# assert res.data.block.header_hash == blocks[1].header_hash
#
# res = await full_node_1.request_block(fnp.RequestHeaderBlock(uint32(1), bytes([0] * 32)))
# assert res is not None
# assert res.data == fnp.RejectBlockRequest(uint32(1), bytes([0] * 32))
#
# @pytest.mark.asyncio
# async def testrespond_block(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# # Already seen
# res = await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks[0]))
# assert res is None
#
# tip_hashes = set([t.header_hash for t in full_node_1.full_node.blockchain.get_current_tips()])
# blocks_list = await get_block_path(full_node_1.full_node)
#
# blocks_new = bt.get_consecutive_blocks(
# 5,
# blocks_list[:],
# 10,
# seed=b"Another seed 5",
# )
#
# # In sync mode
# full_node_1.full_node.sync_store.set_sync_mode(True)
# res = await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-5]))
# assert res is None
# full_node_1.full_node.sync_store.set_sync_mode(False)
#
# # If invalid, do nothing
# block_invalid = FullBlock(
# ProofOfSpace(
# blocks_new[-5].proof_of_space.challenge,
# blocks_new[-5].proof_of_space.pool_public_key,
# blocks_new[-5].proof_of_space.plot_public_key,
# uint8(blocks_new[-5].proof_of_space.size + 1),
# blocks_new[-5].proof_of_space.proof,
# ),
# blocks_new[-5].proof_of_time,
# blocks_new[-5].header,
# blocks_new[-5].transactions_generator,
# blocks_new[-5].transactions_filter,
# )
# threw = False
# try:
# res = await full_node_1.full_node.respond_block(fnp.RespondBlock(block_invalid))
# except ConsensusError:
# threw = True
# assert threw
#
# # If a few blocks behind, request short sync
# res = await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-3]))
#
# # Updates full nodes, farmers, and timelords
# tip_hashes_again = set([t.header_hash for t in full_node_1.full_node.blockchain.get_current_tips()])
# assert tip_hashes_again == tip_hashes
# await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-5]))
# # TODO test propagation
# """
# msgs = [
# _ async for _ in full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-5]))
# ]
# assert len(msgs) == 5 or len(msgs) == 6
# """
# # Updates blockchain tips
# tip_hashes_again = set([t.header_hash for t in full_node_1.full_node.blockchain.get_current_tips()])
# assert tip_hashes_again != tip_hashes
#
# # If orphan, don't send anything
# blocks_orphan = bt.get_consecutive_blocks(
# 1,
# blocks_list[:-5],
# 10,
# seed=b"Another seed 6",
# )
# res = full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_orphan[-1]))
#
#
# class TestWalletProtocol:
# @pytest.mark.asyncio
# async def test_send_transaction(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# await server_2.start_client(PeerInfo("localhost", uint16(server_1._port)), None)
# blocks_list = await get_block_path(full_node_1.full_node)
#
# blocks_new = bt.get_consecutive_blocks(
# 1,
# block_list_input=blocks_list,
# seed=b"test_request_additions",
# )
# await full_node_1.full_node.respond_block(fnp.RespondBlock(blocks_new[-1]))
#
# spend_bundle = wallet_a.generate_signed_transaction(
# 100,
# wallet_a.get_new_puzzlehash(),
# blocks_new[-1].get_coinbase(),
# )
# spend_bundle_bad = wallet_a.generate_signed_transaction(
# test_constants.MAX_COIN_AMOUNT,
# wallet_a.get_new_puzzlehash(),
# blocks_new[-1].get_coinbase(),
# )
#
# res = await full_node_1.send_transaction(wallet_protocol.SendTransaction(spend_bundle))
#
# assert res is not None
# assert res.data == wallet_protocol.TransactionAck(spend_bundle.name(), MempoolInclusionStatus.SUCCESS, None)
#
# res = await full_node_1.send_transaction(wallet_protocol.SendTransaction(spend_bundle))
#
# assert res is not None
# assert res.data == wallet_protocol.TransactionAck(spend_bundle.name(), MempoolInclusionStatus.SUCCESS, None)
#
# res = await full_node_1.send_transaction(wallet_protocol.SendTransaction(spend_bundle_bad))
# assert res is not None
# assert res.data == wallet_protocol.TransactionAck(
# spend_bundle_bad.name(),
# MempoolInclusionStatus.FAILED,
# Err.COIN_AMOUNT_EXCEEDS_MAXIMUM.name,
# )
#
# @pytest.mark.asyncio
# async def test_request_all_proof_hashes(self, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# blocks_list = await get_block_path(full_node_1.full_node)
#
# res = await full_node_1.request_all_proof_hashes(wallet_protocol.RequestAllProofHashes())
# hashes = res.data.hashes
# assert len(hashes) >= len(blocks_list) - 2
# for i in range(len(hashes)):
# if i % test_constants.DIFFICULTY_EPOCH == test_constants.DIFFICULTY_DELAY:
# assert hashes[i][1] is not None
# elif i > 0:
# assert hashes[i][1] is None
# if i % test_constants.DIFFICULTY_EPOCH == test_constants.DIFFICULTY_EPOCH - 1:
# assert hashes[i][2] is not None
# else:
# assert hashes[i][2] is None
# assert hashes[i][0] == std_hash(
# blocks_list[i].proof_of_space.get_hash() + blocks_list[i].proof_of_time.output.get_hash()
# )
#
# @pytest.mark.asyncio
# async def test_request_all_header_hashes_after(self, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# blocks_list = await get_block_path(full_node_1.full_node)
#
# res = await full_node_1.request_all_header_hashes_after(
# wallet_protocol.RequestAllHeaderHashesAfter(uint32(5), blocks_list[5].proof_of_space.challenge_hash)
# )
# assert isinstance(res.data, wallet_protocol.RespondAllHeaderHashesAfter)
# assert res.data.starting_height == 5
# assert res.data.previous_challenge_hash == blocks_list[5].proof_of_space.challenge_hash
# assert res.data.hashes[:3] == [b.header_hash for b in blocks_list[5:8]]
#
# # Wrong prev challenge
# res = await full_node_1.request_all_header_hashes_after(
# wallet_protocol.RequestAllHeaderHashesAfter(uint32(5), blocks_list[4].proof_of_space.challenge_hash)
# )
# assert isinstance(res.data, wallet_protocol.RejectAllHeaderHashesAfterRequest)
# assert res.data.starting_height == 5
# assert res.data.previous_challenge_hash == blocks_list[4].proof_of_space.challenge_hash
#
# @pytest.mark.asyncio
# async def test_request_header(self, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# num_blocks = 2
# blocks = bt.get_consecutive_blocks(test_constants, num_blocks, [], 10, seed=b"test_request_header")
# for block in blocks[:2]:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# res = await full_node_1.request_header(wallet_protocol.RequestHeader(uint32(1), blocks[1].header_hash))
# assert isinstance(res.data, wallet_protocol.RespondHeader)
# assert res.data.header_block.header == blocks[1].header
# assert res.data.transactions_filter == blocks[1].transactions_filter
#
# # Don't have
# res = await full_node_1.request_header(wallet_protocol.RequestHeader(uint32(2), blocks[2].header_hash))
# assert isinstance(res.data, wallet_protocol.RejectHeaderRequest)
# assert res.data.height == 2
# assert res.data.header_hash == blocks[2].header_hash
#
# @pytest.mark.asyncio
# async def test_request_removals(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# await server_2.start_client(PeerInfo("localhost", uint16(server_1._port)), None)
# blocks_list = await get_block_path(full_node_1.full_node)
# blocks_new = bt.get_consecutive_blocks(test_constants, 5, seed=b"test_request_removals")
#
# # Request removals for nonexisting block fails
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(blocks_new[-1].height, blocks_new[-1].header_hash, None)
# )
# assert isinstance(res.data, wallet_protocol.RejectRemovalsRequest)
#
# # Request removals for orphaned block fails
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(blocks_new[-1].height, blocks_new[-1].header_hash, None)
# )
# assert isinstance(res.data, wallet_protocol.RejectRemovalsRequest)
#
# # If there are no transactions, empty proof and coins
# blocks_new = bt.get_consecutive_blocks(
# test_constants,
# 10,
# block_list_input=blocks_list,
# )
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(blocks_new[-4].height, blocks_new[-4].header_hash, None)
# )
#
# assert isinstance(res.data, wallet_protocol.RespondRemovals)
# assert len(res.data.coins) == 0
# assert res.data.proofs is None
#
# # Add a block with transactions
# spend_bundles = []
# for i in range(5):
# spend_bundles.append(
# wallet_a.generate_signed_transaction(
# 100,
# wallet_a.get_new_puzzlehash(),
# blocks_new[i - 8].get_coinbase(),
# )
# )
# height_with_transactions = len(blocks_new) + 1
# agg = SpendBundle.aggregate(spend_bundles)
# dic_h = {
# height_with_transactions: (
# best_solution_program(agg),
# agg.aggregated_signature,
# )
# }
# blocks_new = bt.get_consecutive_blocks(
# test_constants, 5, block_list_input=blocks_new, transaction_data_at_height=dic_h
# )
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# # If no coins requested, respond all coins and NO proof
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# None,
# )
# )
# assert isinstance(res.data, wallet_protocol.RespondRemovals)
# assert len(res.data.coins) == 5
# assert res.data.proofs is None
#
# removals_merkle_set = MerkleSet()
# for sb in spend_bundles:
# for coin in sb.removals():
# if coin is not None:
# removals_merkle_set.add_already_hashed(coin.name())
#
# # Ask for one coin and check PoI
# coin_list = [spend_bundles[0].removals()[0].name()]
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# coin_list,
# )
# )
#
# assert isinstance(res.data, wallet_protocol.RespondRemovals)
# assert len(res.data.coins) == 1
# assert res.data.proofs is not None
# assert len(res.data.proofs) == 1
# assert confirm_included_already_hashed(
# blocks_new[height_with_transactions].header.data.removals_root,
# coin_list[0],
# res.data.proofs[0][1],
# )
#
# # Ask for one coin and check PoE
# coin_list = [token_bytes(32)]
#
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# coin_list,
# )
# )
# assert isinstance(res.data, wallet_protocol.RespondRemovals)
# assert len(res.data.coins) == 1
# assert res.data.coins[0][1] is None
# assert res.data.proofs is not None
# assert len(res.data.proofs) == 1
# assert confirm_not_included_already_hashed(
# blocks_new[height_with_transactions].header.data.removals_root,
# coin_list[0],
# res.data.proofs[0][1],
# )
#
# # Ask for two coins
# coin_list = [spend_bundles[0].removals()[0].name(), token_bytes(32)]
#
# res = await full_node_1.request_removals(
# wallet_protocol.RequestRemovals(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# coin_list,
# )
# )
#
# assert isinstance(res.data, wallet_protocol.RespondRemovals)
# assert len(res.data.coins) == 2
# assert res.data.coins[0][1] is not None
# assert res.data.coins[1][1] is None
# assert res.data.proofs is not None
# assert len(res.data.proofs) == 2
# assert confirm_included_already_hashed(
# blocks_new[height_with_transactions].header.data.removals_root,
# coin_list[0],
# res.data.proofs[0][1],
# )
# assert confirm_not_included_already_hashed(
# blocks_new[height_with_transactions].header.data.removals_root,
# coin_list[1],
# res.data.proofs[1][1],
# )
#
# @pytest.mark.asyncio
# async def test_request_additions(self, two_nodes, wallet_nodes):
# full_node_1, full_node_2, server_1, server_2, wallet_a, wallet_receiver = wallet_nodes
# wallet_a, wallet_receiver, blocks = wallet_blocks
#
# await server_2.start_client(PeerInfo("localhost", uint16(server_1._port)), None)
# blocks_list = await get_block_path(full_node_1.full_node)
# blocks_new = bt.get_consecutive_blocks(test_constants, 5, seed=b"test_request_additions")
#
# # Request additinos for nonexisting block fails
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(blocks_new[-1].height, blocks_new[-1].header_hash, None)
# )
# assert isinstance(res.data, wallet_protocol.RejectAdditionsRequest)
#
# # Request additions for orphaned block fails
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(blocks_new[-1].height, blocks_new[-1].header_hash, None)
# )
# assert isinstance(res.data, wallet_protocol.RejectAdditionsRequest)
#
# # If there are no transactions, only cb and fees additions
# blocks_new = bt.get_consecutive_blocks(
# test_constants,
# 10,
# block_list_input=blocks_list,
# )
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(blocks_new[-4].height, blocks_new[-4].header_hash, None)
# )
# assert isinstance(res.data, wallet_protocol.RespondAdditions)
# assert len(res.data.coins) == 2
# assert res.data.proofs is None
#
# # Add a block with transactions
# spend_bundles = []
# puzzle_hashes = [wallet_a.get_new_puzzlehash(), wallet_a.get_new_puzzlehash()]
# for i in range(5):
# spend_bundles.append(
# wallet_a.generate_signed_transaction(
# 100,
# puzzle_hashes[i % 2],
# blocks_new[i - 8].get_coinbase(),
# )
# )
# height_with_transactions = len(blocks_new) + 1
# agg = SpendBundle.aggregate(spend_bundles)
# dic_h = {
# height_with_transactions: (
# best_solution_program(agg),
# agg.aggregated_signature,
# )
# }
# blocks_new = bt.get_consecutive_blocks(
# test_constants, 5, block_list_input=blocks_new, transaction_data_at_height=dic_h
# )
# for block in blocks_new:
# await full_node_1.full_node.respond_block(fnp.RespondBlock(block))
#
# # If no puzzle hashes requested, respond all coins and NO proof
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# None,
# )
# )
# assert isinstance(res.data, wallet_protocol.RespondAdditions)
# # One puzzle hash with change and fee (x3) = 9, minus two repeated ph = 7 + coinbase and fees = 9
# assert len(res.data.coins) == 9
# assert res.data.proofs is None
#
# additions_merkle_set = MerkleSet()
# for sb in spend_bundles:
# for coin in sb.additions():
# if coin is not None:
# additions_merkle_set.add_already_hashed(coin.name())
#
# # Ask for one coin and check both PoI
# ph_list = [puzzle_hashes[0]]
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# ph_list,
# )
# )
# assert isinstance(res.data, wallet_protocol.RespondAdditions)
# assert len(res.data.coins) == 1
# assert len(res.data.coins[0][1]) == 3
# assert res.data.proofs is not None
# assert len(res.data.proofs) == 1
# assert confirm_included_already_hashed(
# blocks_new[height_with_transactions].header.data.additions_root,
# ph_list[0],
# res.data.proofs[0][1],
# )
# coin_list_for_ph = [
# coin for coin in blocks_new[height_with_transactions].additions() if coin.puzzle_hash == ph_list[0]
# ]
# assert confirm_included_already_hashed(
# blocks_new[height_with_transactions].header.data.additions_root,
# hash_coin_list(coin_list_for_ph),
# res.data.proofs[0][2],
# )
#
# # Ask for one ph and check PoE
# ph_list = [token_bytes(32)]
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# ph_list,
# )
# )
# assert isinstance(res.data, wallet_protocol.RespondAdditions)
# assert len(res.data.coins) == 1
# assert len(res.data.coins[0][1]) == 0
# assert res.data.proofs is not None
# assert len(res.data.proofs) == 1
# assert confirm_not_included_already_hashed(
# blocks_new[height_with_transactions].header.data.additions_root,
# ph_list[0],
# res.data.proofs[0][1],
# )
# assert res.data.proofs[0][2] is None
#
# # Ask for two puzzle_hashes
# ph_list = [puzzle_hashes[0], token_bytes(32)]
# res = await full_node_1.request_additions(
# wallet_protocol.RequestAdditions(
# blocks_new[height_with_transactions].height,
# blocks_new[height_with_transactions].header_hash,
# ph_list,
# )
# )
# assert isinstance(res.data, wallet_protocol.RespondAdditions)
# assert len(res.data.coins) == 2
# assert len(res.data.coins[0][1]) == 3
# assert res.data.proofs is not None
# assert len(res.data.proofs) == 2
# assert confirm_included_already_hashed(
# blocks_new[height_with_transactions].header.data.additions_root,
# ph_list[0],
# res.data.proofs[0][1],
# )
# assert confirm_included_already_hashed(
# blocks_new[height_with_transactions].header.data.additions_root,
# hash_coin_list(coin_list_for_ph),
# res.data.proofs[0][2],
# )
# assert confirm_not_included_already_hashed(
# blocks_new[height_with_transactions].header.data.additions_root,
# ph_list[1],
# res.data.proofs[1][1],
# )
# assert res.data.proofs[1][2] is None

View File

@ -1,9 +1,11 @@
import asyncio
import logging
from time import time
from typing import Dict, List
import pytest
from chia.full_node.mempool import Mempool
from chia.protocols import full_node_protocol
from chia.types.announcement import Announcement
from chia.types.blockchain_format.coin import Coin
@ -26,6 +28,8 @@ BURN_PUZZLE_HASH_2 = b"1" * 32
WALLET_A = bt.get_pool_wallet_tool()
log = logging.getLogger(__name__)
def generate_test_spend_bundle(
coin: Coin,
@ -47,20 +51,21 @@ def event_loop():
yield loop
@pytest.fixture(scope="module")
async def two_nodes():
async_gen = setup_simulators_and_wallets(2, 1, {})
nodes, _ = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
yield full_node_1, full_node_2, server_1, server_2
async for _ in async_gen:
yield _
class TestMempool:
@pytest.fixture(scope="module")
async def two_nodes(self):
async_gen = setup_simulators_and_wallets(2, 1, {})
nodes, _ = await async_gen.__anext__()
full_node_1 = nodes[0]
full_node_2 = nodes[1]
server_1 = full_node_1.full_node.server
server_2 = full_node_2.full_node.server
yield full_node_1, full_node_2, server_1, server_2
async for _ in async_gen:
yield _
@pytest.mark.asyncio
async def test_basic_mempool(self, two_nodes):
reward_ph = WALLET_A.get_new_puzzlehash()
@ -70,18 +75,47 @@ class TestMempool:
farmer_reward_puzzle_hash=reward_ph,
pool_reward_puzzle_hash=reward_ph,
)
full_node_1, _, server_1, _ = two_nodes
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await time_out_assert(60, node_height_at_least, True, full_node_1, blocks[-1].height)
max_mempool_cost = 40000000 * 5
mempool = Mempool(max_mempool_cost)
assert mempool.get_min_fee_rate(104000) == 0
with pytest.raises(ValueError):
mempool.get_min_fee_rate(max_mempool_cost + 1)
spend_bundle = generate_test_spend_bundle(list(blocks[-1].get_included_reward_coins())[0])
assert spend_bundle is not None
class TestMempoolManager:
@pytest.mark.asyncio
async def test_basic_mempool_manager(self, two_nodes):
reward_ph = WALLET_A.get_new_puzzlehash()
blocks = bt.get_consecutive_blocks(
5,
guarantee_transaction_block=True,
farmer_reward_puzzle_hash=reward_ph,
pool_reward_puzzle_hash=reward_ph,
)
full_node_1, full_node_2, server_1, server_2 = two_nodes
peer = await connect_and_get_peer(server_1, server_2)
for block in blocks:
await full_node_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
await time_out_assert(60, node_height_at_least, True, full_node_2, 2)
await time_out_assert(60, node_height_at_least, True, full_node_2, blocks[-1].height)
spend_bundle = generate_test_spend_bundle(list(blocks[-1].get_included_reward_coins())[0])
assert spend_bundle is not None
tx: full_node_protocol.RespondTransaction = full_node_protocol.RespondTransaction(spend_bundle)
await full_node_1.respond_transaction(tx, peer)
res = await full_node_1.respond_transaction(tx, peer)
log.info(f"Res {res}")
await time_out_assert(
10,

View File

@ -1,6 +1,4 @@
# flake8: noqa: F811, F401
"""
Commenting out until clvm_rs is in.
import asyncio
import time
@ -36,7 +34,7 @@ def event_loop():
yield loop
class XTestMempoolPerformance:
class TestMempoolPerformance:
@pytest.fixture(scope="module")
async def wallet_nodes(self):
key_seed = bt.farmer_master_sk_entropy
@ -72,11 +70,10 @@ class XTestMempoolPerformance:
for con in cons:
await con.close()
# blocks = bt.get_consecutive_blocks(3, blocks)
# await full_node_api_1.full_node.respond_block(full_node_protocol.respondblock(blocks[-3]))
#
# for block in blocks[-2:]:
# start_t_2 = time.time()
# await full_node_api_1.full_node.respond_block(full_node_protocol.respondblock(block))
# assert time.time() - start_t_2 < 1
"""
blocks = bt.get_consecutive_blocks(3, blocks)
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(blocks[-3]))
for block in blocks[-2:]:
start_t_2 = time.time()
await full_node_api_1.full_node.respond_block(full_node_protocol.RespondBlock(block))
assert time.time() - start_t_2 < 1